Decouple XContentBuilder from BytesReference (#28972)

* Decouple XContentBuilder from BytesReference

This commit removes all mentions of `BytesReference` from `XContentBuilder`.
This is needed so that we can completely decouple the XContent code and move it
into its own dependency.

While this change appears large, it is due to two main changes, moving
`.bytes()` and `.string()` out of XContentBuilder itself into static methods
`BytesReference.bytes` and `Strings.toString` respectively. The rest of the
change is code reacting to these changes (the majority of it in tests).

Relates to #28504
This commit is contained in:
Lee Hinman 2018-03-14 13:47:57 -06:00 committed by GitHub
parent ef6fc1e9fd
commit 8e8fdc4f0e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
301 changed files with 3563 additions and 3369 deletions

View File

@ -328,7 +328,7 @@ public final class Request {
}
metadata.endObject();
BytesRef metadataSource = metadata.bytes().toBytesRef();
BytesRef metadataSource = BytesReference.bytes(metadata).toBytesRef();
content.write(metadataSource.bytes, metadataSource.offset, metadataSource.length);
content.write(separator);
}
@ -343,7 +343,7 @@ public final class Request {
LoggingDeprecationHandler.INSTANCE, indexSource, indexXContentType)) {
try (XContentBuilder builder = XContentBuilder.builder(bulkContentType.xContent())) {
builder.copyCurrentStructure(parser);
source = builder.bytes().toBytesRef();
source = BytesReference.bytes(builder).toBytesRef();
}
}
} else if (opType == DocWriteRequest.OpType.UPDATE) {

View File

@ -617,7 +617,8 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
bulkRequest.add(deleteRequest);
} else {
BytesReference source = XContentBuilder.builder(xContentType.xContent()).startObject().field("id", i).endObject().bytes();
BytesReference source = BytesReference.bytes(XContentBuilder.builder(xContentType.xContent())
.startObject().field("id", i).endObject());
if (opType == DocWriteRequest.OpType.INDEX) {
IndexRequest indexRequest = new IndexRequest("index", "test", id).source(source, xContentType);
if (erroneous) {

View File

@ -55,6 +55,7 @@ import org.elasticsearch.action.search.SearchScrollRequest;
import org.elasticsearch.action.search.ShardSearchFailure;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.common.CheckedFunction;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -272,7 +273,7 @@ public class RestHighLevelClientTests extends ESTestCase {
builder.startObject();
builder.field("field", "value");
builder.endObject();
return new ByteArrayEntity(builder.bytes().toBytesRef().bytes, contentType);
return new ByteArrayEntity(BytesReference.bytes(builder).toBytesRef().bytes, contentType);
}
}

View File

@ -34,6 +34,7 @@ import org.elasticsearch.action.search.MultiSearchResponse;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchScrollRequest;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.MatchQueryBuilder;
@ -478,7 +479,7 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
for (int i = 0; i < 100; i++) {
XContentBuilder builder = jsonBuilder().startObject().field("field", i).endObject();
HttpEntity entity = new NStringEntity(builder.string(), ContentType.APPLICATION_JSON);
HttpEntity entity = new NStringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON);
client().performRequest(HttpPut.METHOD_NAME, "test/type1/" + Integer.toString(i), Collections.emptyMap(), entity);
}
client().performRequest(HttpPost.METHOD_NAME, "/test/_refresh");

View File

@ -266,13 +266,13 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
assertSame(indexResponse.status(), RestStatus.CREATED);
XContentType xContentType = XContentType.JSON;
String script = XContentBuilder.builder(xContentType.xContent())
String script = Strings.toString(XContentBuilder.builder(xContentType.xContent())
.startObject()
.startObject("script")
.field("lang", "painless")
.field("code", "ctx._source.field += params.count")
.endObject()
.endObject().string();
.endObject());
HttpEntity body = new NStringEntity(script, ContentType.create(xContentType.mediaType()));
Response response = client().performRequest(HttpPost.METHOD_NAME, "/_scripts/increment-field", emptyMap(), body);
assertEquals(response.getStatusLine().getStatusCode(), RestStatus.OK.getStatus());

View File

@ -33,6 +33,7 @@ import org.elasticsearch.client.ESRestHighLevelClientTestCase;
import org.elasticsearch.client.Response;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.cluster.health.ClusterHealthStatus;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
@ -75,7 +76,7 @@ public class MigrationDocumentationIT extends ESRestHighLevelClientTestCase {
.put(SETTING_NUMBER_OF_REPLICAS, 0)
.build();
String payload = XContentFactory.jsonBuilder() // <2>
String payload = Strings.toString(XContentFactory.jsonBuilder() // <2>
.startObject()
.startObject("settings") // <3>
.value(indexSettings)
@ -89,7 +90,7 @@ public class MigrationDocumentationIT extends ESRestHighLevelClientTestCase {
.endObject()
.endObject()
.endObject()
.endObject().string();
.endObject());
HttpEntity entity = new NStringEntity(payload, ContentType.APPLICATION_JSON); // <5>

View File

@ -21,13 +21,13 @@ package org.elasticsearch.ingest.common;
import com.fasterxml.jackson.core.JsonFactory;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.common.xcontent.json.JsonXContentParser;
import org.elasticsearch.ingest.AbstractProcessor;
import org.elasticsearch.ingest.IngestDocument;
import org.elasticsearch.ingest.Processor;
@ -99,7 +99,7 @@ public final class ScriptProcessor extends AbstractProcessor {
public ScriptProcessor create(Map<String, Processor.Factory> registry, String processorTag,
Map<String, Object> config) throws Exception {
try (XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent).map(config);
InputStream stream = builder.bytes().streamInput();
InputStream stream = BytesReference.bytes(builder).streamInput();
XContentParser parser = XContentType.JSON.xContent().createParser(NamedXContentRegistry.EMPTY,
LoggingDeprecationHandler.INSTANCE, stream)) {
Script script = Script.parse(parser);

View File

@ -19,6 +19,7 @@
package org.elasticsearch.ingest.common;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.xcontent.ToXContent;
@ -63,7 +64,7 @@ public class GrokProcessorGetActionTests extends ESTestCase {
GrokProcessorGetAction.Response response = new GrokProcessorGetAction.Response(TEST_PATTERNS);
try (XContentBuilder builder = JsonXContent.contentBuilder()) {
response.toXContent(builder, ToXContent.EMPTY_PARAMS);
Map<String, Object> converted = XContentHelper.convertToMap(builder.bytes(), false, builder.contentType()).v2();
Map<String, Object> converted = XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2();
Map<String, String> patterns = (Map<String, String>) converted.get("patterns");
assertThat(patterns.size(), equalTo(1));
assertThat(patterns.get("PATTERN"), equalTo("foo"));

View File

@ -19,6 +19,7 @@
package org.elasticsearch.ingest.common;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentType;
@ -48,7 +49,7 @@ public class JsonProcessorTests extends ESTestCase {
Map<String, Object> randomJsonMap = RandomDocumentPicks.randomSource(random());
XContentBuilder builder = JsonXContent.contentBuilder().map(randomJsonMap);
String randomJson = XContentHelper.convertToJson(builder.bytes(), false, XContentType.JSON);
String randomJson = XContentHelper.convertToJson(BytesReference.bytes(builder), false, XContentType.JSON);
document.put(randomField, randomJson);
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);

View File

@ -30,6 +30,7 @@ import com.github.mustachejava.TemplateContext;
import com.github.mustachejava.codes.DefaultMustache;
import com.github.mustachejava.codes.IterableCode;
import com.github.mustachejava.codes.WriteCode;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentType;
@ -215,7 +216,7 @@ public class CustomMustacheFactory extends DefaultMustacheFactory {
// Do not handle as JSON
return oh.stringify(resolved);
}
return builder.string();
return Strings.toString(builder);
} catch (IOException e) {
throw new MustacheException("Failed to convert object to JSON", e);
}

View File

@ -23,6 +23,7 @@ import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -63,7 +64,7 @@ public class RestSearchTemplateAction extends BaseRestHandler {
if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
//convert the template to json which is the only supported XContentType (see CustomMustacheFactory#createEncoder)
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
request.setScript(builder.copyCurrentStructure(parser).string());
request.setScript(Strings.toString(builder.copyCurrentStructure(parser)));
} catch (IOException e) {
throw new ParsingException(parser.getTokenLocation(), "Could not parse inline template", e);
}

View File

@ -30,6 +30,7 @@ import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.rest.RestStatus;
import java.io.IOException;
import java.io.InputStream;
public class SearchTemplateResponse extends ActionResponse implements StatusToXContentObject {
@ -83,7 +84,9 @@ public class SearchTemplateResponse extends ActionResponse implements StatusToX
} else {
builder.startObject();
//we can assume the template is always json as we convert it before compiling it
builder.rawField("template_output", source, XContentType.JSON);
try (InputStream stream = source.streamInput()) {
builder.rawField("template_output", stream, XContentType.JSON);
}
builder.endObject();
}
return builder;

View File

@ -21,6 +21,7 @@ package org.elasticsearch.script.mustache;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.common.Strings;
import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.ScriptType;
@ -61,13 +62,13 @@ public class MultiSearchTemplateIT extends ESIntegTestCase {
}
indexRandom(true, indexRequestBuilders);
final String template = jsonBuilder().startObject()
final String template = Strings.toString(jsonBuilder().startObject()
.startObject("query")
.startObject("{{query_type}}")
.field("{{field_name}}", "{{field_value}}")
.endObject()
.endObject()
.endObject().string();
.endObject());
MultiSearchTemplateRequest multiRequest = new MultiSearchTemplateRequest();

View File

@ -30,6 +30,7 @@ import java.util.Map;
import java.util.Set;
import com.github.mustachejava.MustacheException;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.script.ScriptEngine;
@ -248,7 +249,7 @@ public class MustacheTests extends ESTestCase {
.endObject();
Map<String, Object> ctx =
singletonMap("ctx", XContentHelper.convertToMap(builder.bytes(), false, builder.contentType()).v2());
singletonMap("ctx", XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2());
assertScript("{{#ctx.bulks}}{{#toJson}}.{{/toJson}}{{/ctx.bulks}}", ctx,
equalTo("{\"index\":\"index-1\",\"id\":1,\"type\":\"type-1\"}{\"index\":\"index-2\",\"id\":2,\"type\":\"type-2\"}"));
@ -290,7 +291,7 @@ public class MustacheTests extends ESTestCase {
.endObject();
Map<String, Object> ctx =
singletonMap("ctx", XContentHelper.convertToMap(builder.bytes(), false, builder.contentType()).v2());
singletonMap("ctx", XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2());
assertScript("{{#join}}ctx.people.0.emails{{/join}}", ctx,
equalTo("john@smith.com,john.smith@email.com,jsmith@email.com"));

View File

@ -23,6 +23,7 @@ import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRespo
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.plugins.Plugin;
@ -317,7 +318,8 @@ public class SearchTemplateIT extends ESSingleNodeTestCase {
assertAcked(
client().admin().cluster().preparePutStoredScript()
.setId("4")
.setContent(jsonBuilder().startObject().field("template", multiQuery).endObject().bytes(), XContentType.JSON)
.setContent(BytesReference.bytes(jsonBuilder().startObject().field("template", multiQuery).endObject()),
XContentType.JSON)
);
BulkRequestBuilder bulkRequestBuilder = client().prepareBulk();
bulkRequestBuilder.add(client().prepareIndex("test", "type", "1").setSource("{\"theField\":\"foo\"}", XContentType.JSON));

View File

@ -21,6 +21,8 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
@ -54,20 +56,20 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase {
}
public void testDefaults() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "scaled_float")
.field("scaling_factor", 10.0).endObject().endObject()
.endObject().endObject().string();
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", 123)
.endObject()
.bytes(),
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", 123)
.endObject()),
XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field");
@ -83,9 +85,9 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase {
}
public void testMissingScalingFactor() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "scaled_float").endObject().endObject()
.endObject().endObject().string();
.endObject().endObject());
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> parser.parse("type", new CompressedXContent(mapping)));
@ -93,10 +95,10 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase {
}
public void testIllegalScalingFactor() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "scaled_float")
.field("scaling_factor", -1).endObject().endObject()
.endObject().endObject().string();
.endObject().endObject());
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> parser.parse("type", new CompressedXContent(mapping)));
@ -104,20 +106,20 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase {
}
public void testNotIndexed() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "scaled_float")
.field("index", false).field("scaling_factor", 10.0).endObject().endObject()
.endObject().endObject().string();
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", 123)
.endObject()
.bytes(),
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", 123)
.endObject()),
XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field");
@ -128,20 +130,20 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase {
}
public void testNoDocValues() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "scaled_float")
.field("doc_values", false).field("scaling_factor", 10.0).endObject().endObject()
.endObject().endObject().string();
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", 123)
.endObject()
.bytes(),
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", 123)
.endObject()),
XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field");
@ -152,20 +154,20 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase {
}
public void testStore() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "scaled_float")
.field("store", true).field("scaling_factor", 10.0).endObject().endObject()
.endObject().endObject().string();
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", 123)
.endObject()
.bytes(),
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", 123)
.endObject()),
XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field");
@ -181,20 +183,20 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase {
}
public void testCoerce() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "scaled_float")
.field("scaling_factor", 10.0).endObject().endObject()
.endObject().endObject().string();
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", "123")
.endObject()
.bytes(),
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "123")
.endObject()),
XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field");
@ -205,20 +207,20 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase {
IndexableField dvField = fields[1];
assertEquals(DocValuesType.SORTED_NUMERIC, dvField.fieldType().docValuesType());
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "scaled_float")
.field("scaling_factor", 10.0).field("coerce", false).endObject().endObject()
.endObject().endObject().string();
.endObject().endObject());
DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper2.mappingSource().toString());
ThrowingRunnable runnable = () -> mapper2.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", "123")
.endObject()
.bytes(),
ThrowingRunnable runnable = () -> mapper2.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "123")
.endObject()),
XContentType.JSON));
MapperParsingException e = expectThrows(MapperParsingException.class, runnable);
assertThat(e.getCause().getMessage(), containsString("passed as String"));
@ -234,36 +236,36 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase {
}
private void doTestIgnoreMalformed(String value, String exceptionMessageContains) throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "scaled_float")
.field("scaling_factor", 10.0).endObject().endObject()
.endObject().endObject().string();
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ThrowingRunnable runnable = () -> mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", value)
.endObject()
.bytes(),
ThrowingRunnable runnable = () -> mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", value)
.endObject()),
XContentType.JSON));
MapperParsingException e = expectThrows(MapperParsingException.class, runnable);
assertThat(e.getCause().getMessage(), containsString(exceptionMessageContains));
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "scaled_float")
.field("scaling_factor", 10.0).field("ignore_malformed", true).endObject().endObject()
.endObject().endObject().string();
.endObject().endObject());
DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = mapper2.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", value)
.endObject()
.bytes(),
ParsedDocument doc = mapper2.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", value)
.endObject()),
XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field");
@ -271,7 +273,7 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase {
}
public void testNullValue() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject()
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("type")
.startObject("properties")
.startObject("field")
@ -279,20 +281,20 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase {
.field("scaling_factor", 10.0)
.endObject()
.endObject()
.endObject().endObject().string();
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.nullField("field")
.endObject()
.bytes(),
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.nullField("field")
.endObject()),
XContentType.JSON));
assertArrayEquals(new IndexableField[0], doc.rootDoc().getFields("field"));
mapping = XContentFactory.jsonBuilder().startObject()
mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("type")
.startObject("properties")
.startObject("field")
@ -301,16 +303,16 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase {
.field("null_value", 2.5)
.endObject()
.endObject()
.endObject().endObject().string();
.endObject().endObject());
mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.nullField("field")
.endObject()
.bytes(),
doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.nullField("field")
.endObject()),
XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(2, fields.length);
@ -325,11 +327,11 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase {
public void testEmptyName() throws IOException {
// after 5.x
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("")
.field("type", "scaled_float")
.field("scaling_factor", 10.0).endObject().endObject()
.endObject().endObject().string();
.endObject().endObject());
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> parser.parse("type", new CompressedXContent(mapping))
@ -341,13 +343,13 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase {
* `index_options` was deprecated and is rejected as of 7.0
*/
public void testRejectIndexOptions() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("foo")
.field("type", "scaled_float")
.field("index_options", randomFrom(new String[] { "docs", "freqs", "positions", "offsets" }))
.endObject()
.endObject().endObject().endObject().string();
.endObject().endObject().endObject());
MapperParsingException e = expectThrows(MapperParsingException.class, () -> parser.parse("type", new CompressedXContent(mapping)));
assertThat(e.getMessage(), containsString("index_options not allowed in field [foo] of type [scaled_float]"));
}

View File

@ -24,6 +24,7 @@ import org.apache.lucene.analysis.CannedTokenStream;
import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.analysis.Token;
import org.apache.lucene.analysis.TokenStream;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentFactory;
@ -52,7 +53,7 @@ public class TokenCountFieldMapperTests extends ESSingleNodeTestCase {
}
public void testMerge() throws IOException {
String stage1Mapping = XContentFactory.jsonBuilder().startObject()
String stage1Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("person")
.startObject("properties")
.startObject("tc")
@ -60,12 +61,12 @@ public class TokenCountFieldMapperTests extends ESSingleNodeTestCase {
.field("analyzer", "keyword")
.endObject()
.endObject()
.endObject().endObject().string();
.endObject().endObject());
MapperService mapperService = createIndex("test").mapperService();
DocumentMapper stage1 = mapperService.merge("person",
new CompressedXContent(stage1Mapping), MapperService.MergeReason.MAPPING_UPDATE);
String stage2Mapping = XContentFactory.jsonBuilder().startObject()
String stage2Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("person")
.startObject("properties")
.startObject("tc")
@ -73,7 +74,7 @@ public class TokenCountFieldMapperTests extends ESSingleNodeTestCase {
.field("analyzer", "standard")
.endObject()
.endObject()
.endObject().endObject().string();
.endObject().endObject());
DocumentMapper stage2 = mapperService.merge("person",
new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE);
@ -131,7 +132,7 @@ public class TokenCountFieldMapperTests extends ESSingleNodeTestCase {
public void testEmptyName() throws IOException {
IndexService indexService = createIndex("test");
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject()
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("type")
.startObject("properties")
.startObject("")
@ -139,7 +140,7 @@ public class TokenCountFieldMapperTests extends ESSingleNodeTestCase {
.field("analyzer", "standard")
.endObject()
.endObject()
.endObject().endObject().string();
.endObject().endObject());
// Empty name not allowed in index created after 5.0
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
@ -167,7 +168,7 @@ public class TokenCountFieldMapperTests extends ESSingleNodeTestCase {
}
private DocumentMapper createIndexWithTokenCountField() throws IOException {
final String content = XContentFactory.jsonBuilder().startObject()
final String content = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("person")
.startObject("properties")
.startObject("test")
@ -180,16 +181,16 @@ public class TokenCountFieldMapperTests extends ESSingleNodeTestCase {
.endObject()
.endObject()
.endObject()
.endObject().endObject().string();
.endObject().endObject());
return createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(content));
}
private SourceToParse createDocument(String fieldValue) throws Exception {
BytesReference request = XContentFactory.jsonBuilder()
BytesReference request = BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("test", fieldValue)
.endObject().bytes();
.endObject());
return SourceToParse.source("test", "person", "1", request, XContentType.JSON);
}

View File

@ -19,6 +19,8 @@
package org.elasticsearch.join.mapper;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
@ -45,7 +47,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
}
public void testSingleLevel() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject()
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject("join_field")
.field("type", "join")
@ -54,7 +56,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
.endObject()
.endObject()
.endObject()
.endObject().string();
.endObject());
IndexService service = createIndex("test");
DocumentMapper docMapper = service.mapperService().merge("type", new CompressedXContent(mapping),
MapperService.MergeReason.MAPPING_UPDATE);
@ -62,39 +64,39 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
// Doc without join
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "0",
XContentFactory.jsonBuilder().startObject().endObject().bytes(), XContentType.JSON));
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().endObject()), XContentType.JSON));
assertNull(doc.rootDoc().getBinaryValue("join_field"));
// Doc parent
doc = docMapper.parse(SourceToParse.source("test", "type", "1",
XContentFactory.jsonBuilder().startObject()
BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.field("join_field", "parent")
.endObject().bytes(), XContentType.JSON));
.endObject()), XContentType.JSON));
assertEquals("1", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString());
assertEquals("parent", doc.rootDoc().getBinaryValue("join_field").utf8ToString());
// Doc child
doc = docMapper.parse(SourceToParse.source("test", "type", "2",
XContentFactory.jsonBuilder().startObject()
BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.startObject("join_field")
.field("name", "child")
.field("parent", "1")
.endObject()
.endObject().bytes(), XContentType.JSON).routing("1"));
.endObject()), XContentType.JSON).routing("1"));
assertEquals("1", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString());
assertEquals("child", doc.rootDoc().getBinaryValue("join_field").utf8ToString());
// Unkwnown join name
MapperException exc = expectThrows(MapperParsingException.class,
() -> docMapper.parse(SourceToParse.source("test", "type", "1",
XContentFactory.jsonBuilder().startObject()
BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.field("join_field", "unknown")
.endObject().bytes(), XContentType.JSON)));
.endObject()), XContentType.JSON)));
assertThat(exc.getRootCause().getMessage(), containsString("unknown join name [unknown] for field [join_field]"));
}
public void testParentIdSpecifiedAsNumber() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject()
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject("join_field")
.field("type", "join")
@ -103,32 +105,32 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
.endObject()
.endObject()
.endObject()
.endObject().string();
.endObject());
IndexService service = createIndex("test");
DocumentMapper docMapper = service.mapperService().merge("type", new CompressedXContent(mapping),
MapperService.MergeReason.MAPPING_UPDATE);
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "2",
XContentFactory.jsonBuilder().startObject()
BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.startObject("join_field")
.field("name", "child")
.field("parent", 1)
.endObject()
.endObject().bytes(), XContentType.JSON).routing("1"));
.endObject()), XContentType.JSON).routing("1"));
assertEquals("1", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString());
assertEquals("child", doc.rootDoc().getBinaryValue("join_field").utf8ToString());
doc = docMapper.parse(SourceToParse.source("test", "type", "2",
XContentFactory.jsonBuilder().startObject()
BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.startObject("join_field")
.field("name", "child")
.field("parent", 1.0)
.endObject()
.endObject().bytes(), XContentType.JSON).routing("1"));
.endObject()), XContentType.JSON).routing("1"));
assertEquals("1.0", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString());
assertEquals("child", doc.rootDoc().getBinaryValue("join_field").utf8ToString());
}
public void testMultipleLevels() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject()
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject("join_field")
.field("type", "join")
@ -138,7 +140,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
.endObject()
.endObject()
.endObject()
.endObject().string();
.endObject());
IndexService service = createIndex("test");
DocumentMapper docMapper = service.mapperService().merge("type", new CompressedXContent(mapping),
MapperService.MergeReason.MAPPING_UPDATE);
@ -146,26 +148,26 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
// Doc without join
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "0",
XContentFactory.jsonBuilder().startObject().endObject().bytes(), XContentType.JSON));
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().endObject()), XContentType.JSON));
assertNull(doc.rootDoc().getBinaryValue("join_field"));
// Doc parent
doc = docMapper.parse(SourceToParse.source("test", "type", "1",
XContentFactory.jsonBuilder()
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("join_field", "parent")
.endObject().bytes(), XContentType.JSON));
.endObject()), XContentType.JSON));
assertEquals("1", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString());
assertEquals("parent", doc.rootDoc().getBinaryValue("join_field").utf8ToString());
// Doc child
doc = docMapper.parse(SourceToParse.source("test", "type", "2",
XContentFactory.jsonBuilder().startObject()
BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.startObject("join_field")
.field("name", "child")
.field("parent", "1")
.endObject()
.endObject().bytes(), XContentType.JSON).routing("1"));
.endObject()), XContentType.JSON).routing("1"));
assertEquals("1", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString());
assertEquals("2", doc.rootDoc().getBinaryValue("join_field#child").utf8ToString());
assertEquals("child", doc.rootDoc().getBinaryValue("join_field").utf8ToString());
@ -173,44 +175,44 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
// Doc child missing parent
MapperException exc = expectThrows(MapperParsingException.class,
() -> docMapper.parse(SourceToParse.source("test", "type", "2",
XContentFactory.jsonBuilder().startObject()
BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.field("join_field", "child")
.endObject().bytes(), XContentType.JSON).routing("1")));
.endObject()), XContentType.JSON).routing("1")));
assertThat(exc.getRootCause().getMessage(), containsString("[parent] is missing for join field [join_field]"));
// Doc child missing routing
exc = expectThrows(MapperParsingException.class,
() -> docMapper.parse(SourceToParse.source("test", "type", "2",
XContentFactory.jsonBuilder().startObject()
BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.startObject("join_field")
.field("name", "child")
.field("parent", "1")
.endObject()
.endObject().bytes(), XContentType.JSON)));
.endObject()), XContentType.JSON)));
assertThat(exc.getRootCause().getMessage(), containsString("[routing] is missing for join field [join_field]"));
// Doc grand_child
doc = docMapper.parse(SourceToParse.source("test", "type", "3",
XContentFactory.jsonBuilder().startObject()
BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.startObject("join_field")
.field("name", "grand_child")
.field("parent", "2")
.endObject()
.endObject().bytes(), XContentType.JSON).routing("1"));
.endObject()), XContentType.JSON).routing("1"));
assertEquals("2", doc.rootDoc().getBinaryValue("join_field#child").utf8ToString());
assertEquals("grand_child", doc.rootDoc().getBinaryValue("join_field").utf8ToString());
// Unkwnown join name
exc = expectThrows(MapperParsingException.class,
() -> docMapper.parse(SourceToParse.source("test", "type", "1",
XContentFactory.jsonBuilder().startObject()
BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.field("join_field", "unknown")
.endObject().bytes(), XContentType.JSON)));
.endObject()), XContentType.JSON)));
assertThat(exc.getRootCause().getMessage(), containsString("unknown join name [unknown] for field [join_field]"));
}
public void testUpdateRelations() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("properties")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties")
.startObject("join_field")
.field("type", "join")
.startObject("relations")
@ -218,21 +220,21 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
.array("child", "grand_child1", "grand_child2")
.endObject()
.endObject()
.endObject().endObject().string();
.endObject().endObject());
IndexService indexService = createIndex("test");
DocumentMapper docMapper = indexService.mapperService().merge("type", new CompressedXContent(mapping),
MapperService.MergeReason.MAPPING_UPDATE);
assertTrue(docMapper.mappers().getMapper("join_field") == ParentJoinFieldMapper.getMapper(indexService.mapperService()));
{
final String updateMapping = XContentFactory.jsonBuilder().startObject().startObject("properties")
final String updateMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties")
.startObject("join_field")
.field("type", "join")
.startObject("relations")
.array("child", "grand_child1", "grand_child2")
.endObject()
.endObject()
.endObject().endObject().string();
.endObject().endObject());
IllegalStateException exc = expectThrows(IllegalStateException.class,
() -> indexService.mapperService().merge("type", new CompressedXContent(updateMapping),
MapperService.MergeReason.MAPPING_UPDATE));
@ -240,7 +242,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
}
{
final String updateMapping = XContentFactory.jsonBuilder().startObject().startObject("properties")
final String updateMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties")
.startObject("join_field")
.field("type", "join")
.startObject("relations")
@ -248,7 +250,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
.field("child", "grand_child1")
.endObject()
.endObject()
.endObject().endObject().string();
.endObject().endObject());
IllegalStateException exc = expectThrows(IllegalStateException.class,
() -> indexService.mapperService().merge("type", new CompressedXContent(updateMapping),
MapperService.MergeReason.MAPPING_UPDATE));
@ -256,7 +258,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
}
{
final String updateMapping = XContentFactory.jsonBuilder().startObject().startObject("properties")
final String updateMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties")
.startObject("join_field")
.field("type", "join")
.startObject("relations")
@ -265,7 +267,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
.array("child", "grand_child1", "grand_child2")
.endObject()
.endObject()
.endObject().endObject().string();
.endObject().endObject());
IllegalStateException exc = expectThrows(IllegalStateException.class,
() -> indexService.mapperService().merge("type", new CompressedXContent(updateMapping),
MapperService.MergeReason.MAPPING_UPDATE));
@ -273,7 +275,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
}
{
final String updateMapping = XContentFactory.jsonBuilder().startObject().startObject("properties")
final String updateMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties")
.startObject("join_field")
.field("type", "join")
.startObject("relations")
@ -282,7 +284,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
.field("grand_child2", "grand_grand_child")
.endObject()
.endObject()
.endObject().endObject().string();
.endObject().endObject());
IllegalStateException exc = expectThrows(IllegalStateException.class,
() -> indexService.mapperService().merge("type", new CompressedXContent(updateMapping),
MapperService.MergeReason.MAPPING_UPDATE));
@ -290,7 +292,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
}
{
final String updateMapping = XContentFactory.jsonBuilder().startObject().startObject("properties")
final String updateMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties")
.startObject("join_field")
.field("type", "join")
.startObject("relations")
@ -298,7 +300,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
.array("child", "grand_child1", "grand_child2")
.endObject()
.endObject()
.endObject().endObject().string();
.endObject().endObject());
docMapper = indexService.mapperService().merge("type", new CompressedXContent(updateMapping),
MapperService.MergeReason.MAPPING_UPDATE);
assertTrue(docMapper.mappers().getMapper("join_field") == ParentJoinFieldMapper.getMapper(indexService.mapperService()));
@ -310,7 +312,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
}
{
final String updateMapping = XContentFactory.jsonBuilder().startObject().startObject("properties")
final String updateMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties")
.startObject("join_field")
.field("type", "join")
.startObject("relations")
@ -319,7 +321,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
.array("other", "child_other1", "child_other2")
.endObject()
.endObject()
.endObject().endObject().string();
.endObject().endObject());
docMapper = indexService.mapperService().merge("type", new CompressedXContent(updateMapping),
MapperService.MergeReason.MAPPING_UPDATE);
assertTrue(docMapper.mappers().getMapper("join_field") == ParentJoinFieldMapper.getMapper(indexService.mapperService()));
@ -334,7 +336,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
}
public void testInvalidJoinFieldInsideObject() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("properties")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties")
.startObject("object")
.startObject("properties")
.startObject("join_field")
@ -345,7 +347,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
.endObject()
.endObject()
.endObject()
.endObject().endObject().string();
.endObject().endObject());
IndexService indexService = createIndex("test");
MapperParsingException exc = expectThrows(MapperParsingException.class,
() -> indexService.mapperService().merge("type", new CompressedXContent(mapping),
@ -355,7 +357,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
}
public void testInvalidJoinFieldInsideMultiFields() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("properties")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties")
.startObject("number")
.field("type", "integer")
.startObject("fields")
@ -367,7 +369,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
.endObject()
.endObject()
.endObject()
.endObject().endObject().string();
.endObject().endObject());
IndexService indexService = createIndex("test");
MapperParsingException exc = expectThrows(MapperParsingException.class,
() -> indexService.mapperService().merge("type", new CompressedXContent(mapping),
@ -379,7 +381,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
public void testMultipleJoinFields() throws Exception {
IndexService indexService = createIndex("test");
{
String mapping = XContentFactory.jsonBuilder().startObject()
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject("join_field")
.field("type", "join")
@ -395,14 +397,14 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
.endObject()
.endObject()
.endObject()
.endObject().string();
.endObject());
IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> indexService.mapperService().merge("type",
new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE));
assertThat(exc.getMessage(), containsString("Field [_parent_join] is defined twice in [type]"));
}
{
String mapping = XContentFactory.jsonBuilder().startObject()
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject("join_field")
.field("type", "join")
@ -412,16 +414,16 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
.endObject()
.endObject()
.endObject()
.endObject().string();
.endObject());
indexService.mapperService().merge("type",
new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE);
String updateMapping = XContentFactory.jsonBuilder().startObject()
String updateMapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject("another_join_field")
.field("type", "join")
.endObject()
.endObject()
.endObject().string();
.endObject());
IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> indexService.mapperService().merge("type",
new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE));
assertThat(exc.getMessage(), containsString("Field [_parent_join] is defined twice in [type]"));
@ -429,7 +431,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
}
public void testEagerGlobalOrdinals() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject()
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject("join_field")
.field("type", "join")
@ -439,7 +441,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
.endObject()
.endObject()
.endObject()
.endObject().string();
.endObject());
IndexService service = createIndex("test");
DocumentMapper docMapper = service.mapperService().merge("type", new CompressedXContent(mapping),
MapperService.MergeReason.MAPPING_UPDATE);
@ -450,7 +452,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
assertNotNull(service.mapperService().fullName("join_field#child"));
assertTrue(service.mapperService().fullName("join_field#child").eagerGlobalOrdinals());
mapping = XContentFactory.jsonBuilder().startObject()
mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject("join_field")
.field("type", "join")
@ -461,7 +463,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
.endObject()
.endObject()
.endObject()
.endObject().string();
.endObject());
service.mapperService().merge("type", new CompressedXContent(mapping),
MapperService.MergeReason.MAPPING_UPDATE);
assertFalse(service.mapperService().fullName("join_field").eagerGlobalOrdinals());

View File

@ -33,6 +33,7 @@ import org.apache.lucene.search.similarities.PerFieldSimilarityWrapper;
import org.apache.lucene.search.similarities.Similarity;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -132,7 +133,7 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
.endObject().endObject().endObject();
mapperService.merge(TYPE,
new CompressedXContent(mapping.string()), MapperService.MergeReason.MAPPING_UPDATE);
new CompressedXContent(Strings.toString(mapping)), MapperService.MergeReason.MAPPING_UPDATE);
}
/**

View File

@ -24,6 +24,7 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.join.ScoreMode;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -112,7 +113,7 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase<HasParentQ
.endObject().endObject().endObject();
mapperService.merge(TYPE,
new CompressedXContent(mapping.string()), MapperService.MergeReason.MAPPING_UPDATE);
new CompressedXContent(Strings.toString(mapping)), MapperService.MergeReason.MAPPING_UPDATE);
}
/**

View File

@ -34,6 +34,7 @@ import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.mapper.MapperService;
@ -89,7 +90,7 @@ public class LegacyHasChildQueryBuilderTests extends AbstractQueryTestCase<HasCh
similarity = randomFrom("classic", "BM25");
// TODO: use a single type when inner hits have been changed to work with join field,
// this test randomly generates queries with inner hits
mapperService.merge(PARENT_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE,
mapperService.merge(PARENT_TYPE, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE,
STRING_FIELD_NAME, "type=text",
STRING_FIELD_NAME_2, "type=keyword",
INT_FIELD_NAME, "type=integer",
@ -97,8 +98,8 @@ public class LegacyHasChildQueryBuilderTests extends AbstractQueryTestCase<HasCh
BOOLEAN_FIELD_NAME, "type=boolean",
DATE_FIELD_NAME, "type=date",
OBJECT_FIELD_NAME, "type=object"
).string()), MapperService.MergeReason.MAPPING_UPDATE);
mapperService.merge(CHILD_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(CHILD_TYPE,
))), MapperService.MergeReason.MAPPING_UPDATE);
mapperService.merge(CHILD_TYPE, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(CHILD_TYPE,
"_parent", "type=" + PARENT_TYPE,
STRING_FIELD_NAME, "type=text",
"custom_string", "type=text,similarity=" + similarity,
@ -107,7 +108,7 @@ public class LegacyHasChildQueryBuilderTests extends AbstractQueryTestCase<HasCh
BOOLEAN_FIELD_NAME, "type=boolean",
DATE_FIELD_NAME, "type=date",
OBJECT_FIELD_NAME, "type=object"
).string()), MapperService.MergeReason.MAPPING_UPDATE);
))), MapperService.MergeReason.MAPPING_UPDATE);
}
@Override

View File

@ -24,6 +24,7 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.join.ScoreMode;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.mapper.MapperService;
@ -80,7 +81,7 @@ public class LegacyHasParentQueryBuilderTests extends AbstractQueryTestCase<HasP
protected void initializeAdditionalMappings(MapperService mapperService) throws IOException {
// TODO: use a single type when inner hits have been changed to work with join field,
// this test randomly generates queries with inner hits
mapperService.merge(PARENT_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE,
mapperService.merge(PARENT_TYPE, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE,
STRING_FIELD_NAME, "type=text",
STRING_FIELD_NAME_2, "type=keyword",
INT_FIELD_NAME, "type=integer",
@ -88,8 +89,8 @@ public class LegacyHasParentQueryBuilderTests extends AbstractQueryTestCase<HasP
BOOLEAN_FIELD_NAME, "type=boolean",
DATE_FIELD_NAME, "type=date",
OBJECT_FIELD_NAME, "type=object"
).string()), MapperService.MergeReason.MAPPING_UPDATE);
mapperService.merge(CHILD_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(CHILD_TYPE,
))), MapperService.MergeReason.MAPPING_UPDATE);
mapperService.merge(CHILD_TYPE, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(CHILD_TYPE,
"_parent", "type=" + PARENT_TYPE,
STRING_FIELD_NAME, "type=text",
STRING_FIELD_NAME_2, "type=keyword",
@ -98,9 +99,9 @@ public class LegacyHasParentQueryBuilderTests extends AbstractQueryTestCase<HasP
BOOLEAN_FIELD_NAME, "type=boolean",
DATE_FIELD_NAME, "type=date",
OBJECT_FIELD_NAME, "type=object"
).string()), MapperService.MergeReason.MAPPING_UPDATE);
mapperService.merge("just_a_type", new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef("just_a_type"
).string()), MapperService.MergeReason.MAPPING_UPDATE);
))), MapperService.MergeReason.MAPPING_UPDATE);
mapperService.merge("just_a_type", new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef("just_a_type"
))), MapperService.MergeReason.MAPPING_UPDATE);
}
/**

View File

@ -26,6 +26,7 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.mapper.MapperService;
@ -65,15 +66,15 @@ public class LegacyParentIdQueryBuilderTests extends AbstractQueryTestCase<Paren
@Override
protected void initializeAdditionalMappings(MapperService mapperService) throws IOException {
mapperService.merge(PARENT_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE,
mapperService.merge(PARENT_TYPE, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE,
STRING_FIELD_NAME, "type=text",
INT_FIELD_NAME, "type=integer",
DOUBLE_FIELD_NAME, "type=double",
BOOLEAN_FIELD_NAME, "type=boolean",
DATE_FIELD_NAME, "type=date",
OBJECT_FIELD_NAME, "type=object"
).string()), MapperService.MergeReason.MAPPING_UPDATE);
mapperService.merge(CHILD_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(CHILD_TYPE,
))), MapperService.MergeReason.MAPPING_UPDATE);
mapperService.merge(CHILD_TYPE, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(CHILD_TYPE,
"_parent", "type=" + PARENT_TYPE,
STRING_FIELD_NAME, "type=text",
INT_FIELD_NAME, "type=integer",
@ -81,7 +82,7 @@ public class LegacyParentIdQueryBuilderTests extends AbstractQueryTestCase<Paren
BOOLEAN_FIELD_NAME, "type=boolean",
DATE_FIELD_NAME, "type=date",
OBJECT_FIELD_NAME, "type=object"
).string()), MapperService.MergeReason.MAPPING_UPDATE);
))), MapperService.MergeReason.MAPPING_UPDATE);
}
@Override

View File

@ -20,6 +20,7 @@ package org.elasticsearch.join.query;
import org.elasticsearch.Version;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper;
@ -83,7 +84,7 @@ public abstract class ParentChildTestCase extends ESIntegTestCase {
protected IndexRequestBuilder createIndexRequest(String index, String type, String id, String parentId,
XContentBuilder builder) throws IOException {
Map<String, Object> source = XContentHelper.convertToMap(JsonXContent.jsonXContent, builder.string(), false);
Map<String, Object> source = XContentHelper.convertToMap(JsonXContent.jsonXContent, Strings.toString(builder), false);
return createIndexRequest(index, type, id, parentId, source);
}

View File

@ -22,18 +22,16 @@ package org.elasticsearch.join.query;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.DocValuesTermsQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.TypeFieldMapper;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.join.ParentJoinPlugin;
import org.elasticsearch.plugins.Plugin;
@ -104,7 +102,7 @@ public class ParentIdQueryBuilderTests extends AbstractQueryTestCase<ParentIdQue
.endObject().endObject().endObject();
mapperService.merge(TYPE,
new CompressedXContent(mapping.string()), MapperService.MergeReason.MAPPING_UPDATE);
new CompressedXContent(Strings.toString(mapping)), MapperService.MergeReason.MAPPING_UPDATE);
}
@Override

View File

@ -416,7 +416,7 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
builder.copyCurrentStructure(parser);
builder.flush();
documents.add(builder.bytes());
documents.add(BytesReference.bytes(builder));
}
} else {
throw new ParsingException(parser.getTokenLocation(), "[" + PercolateQueryBuilder.NAME +
@ -437,7 +437,7 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
builder.copyCurrentStructure(parser);
builder.flush();
documents.add(builder.bytes());
documents.add(BytesReference.bytes(builder));
}
} else {
throw new ParsingException(parser.getTokenLocation(), "[" + PercolateQueryBuilder.NAME +

View File

@ -422,7 +422,7 @@ public class PercolatorFieldMapper extends FieldMapper {
try (XContentBuilder builder = XContentFactory.contentBuilder(QUERY_BUILDER_CONTENT_TYPE)) {
queryBuilder.toXContent(builder, new MapParams(Collections.emptyMap()));
builder.flush();
byte[] queryBuilderAsBytes = BytesReference.toBytes(builder.bytes());
byte[] queryBuilderAsBytes = BytesReference.toBytes(BytesReference.bytes(builder));
context.doc().add(new Field(qbField.name(), queryBuilderAsBytes, qbField.fieldType()));
}
}

View File

@ -75,6 +75,7 @@ import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.common.CheckedFunction;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
@ -140,7 +141,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase {
IndexService indexService = createIndex(indexName, Settings.EMPTY);
mapperService = indexService.mapperService();
String mapper = XContentFactory.jsonBuilder().startObject().startObject("type")
String mapper = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("int_field").field("type", "integer").endObject()
.startObject("long_field").field("type", "long").endObject()
@ -149,13 +150,13 @@ public class CandidateQueryTests extends ESSingleNodeTestCase {
.startObject("double_field").field("type", "double").endObject()
.startObject("ip_field").field("type", "ip").endObject()
.startObject("field").field("type", "keyword").endObject()
.endObject().endObject().endObject().string();
.endObject().endObject().endObject());
documentMapper = mapperService.merge("type", new CompressedXContent(mapper), MapperService.MergeReason.MAPPING_UPDATE);
String queryField = "query_field";
String percolatorMapper = XContentFactory.jsonBuilder().startObject().startObject("type")
String percolatorMapper = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject(queryField).field("type", "percolator").endObject().endObject()
.endObject().endObject().string();
.endObject().endObject());
mapperService.merge("type", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE);
fieldMapper = (PercolatorFieldMapper) mapperService.documentMapper("type").mappers().getMapper(queryField);
fieldType = (PercolatorFieldMapper.FieldType) fieldMapper.fieldType();

View File

@ -31,6 +31,7 @@ import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
@ -96,12 +97,12 @@ public class PercolateQueryBuilderTests extends AbstractQueryTestCase<PercolateQ
protected void initializeAdditionalMappings(MapperService mapperService) throws IOException {
queryField = randomAlphaOfLength(4);
String docType = "_doc";
mapperService.merge(docType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(docType,
mapperService.merge(docType, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(docType,
queryField, "type=percolator"
).string()), MapperService.MergeReason.MAPPING_UPDATE);
mapperService.merge(docType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(docType,
))), MapperService.MergeReason.MAPPING_UPDATE);
mapperService.merge(docType, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(docType,
STRING_FIELD_NAME, "type=text"
).string()), MapperService.MergeReason.MAPPING_UPDATE);
))), MapperService.MergeReason.MAPPING_UPDATE);
if (mapperService.getIndexSettings().isSingleType() == false) {
PercolateQueryBuilderTests.docType = docType;
}
@ -339,7 +340,7 @@ public class PercolateQueryBuilderTests extends AbstractQueryTestCase<PercolateQ
XContentBuilder xContent = XContentFactory.jsonBuilder();
xContent.map(source);
return xContent.bytes();
return BytesReference.bytes(xContent);
} catch (IOException e) {
throw new UncheckedIOException(e);
}

View File

@ -42,7 +42,9 @@ import org.apache.lucene.search.join.ScoreMode;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.action.support.PlainActionFuture;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.hash.MurmurHash3;
@ -141,7 +143,7 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
indexService = createIndex("test");
mapperService = indexService.mapperService();
String mapper = XContentFactory.jsonBuilder().startObject().startObject("doc")
String mapper = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("doc")
.startObject("_field_names").field("enabled", false).endObject() // makes testing easier
.startObject("properties")
.startObject("field").field("type", "text").endObject()
@ -157,15 +159,15 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
.startObject("number_field6").field("type", "double").endObject()
.startObject("number_field7").field("type", "ip").endObject()
.startObject("date_field").field("type", "date").endObject()
.endObject().endObject().endObject().string();
.endObject().endObject().endObject());
mapperService.merge("doc", new CompressedXContent(mapper), MapperService.MergeReason.MAPPING_UPDATE);
}
private void addQueryFieldMappings() throws Exception {
fieldName = randomAlphaOfLength(4);
String percolatorMapper = XContentFactory.jsonBuilder().startObject().startObject("doc")
String percolatorMapper = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("doc")
.startObject("properties").startObject(fieldName).field("type", "percolator").endObject().endObject()
.endObject().endObject().string();
.endObject().endObject());
mapperService.merge("doc", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE);
fieldType = (PercolatorFieldMapper.FieldType) mapperService.fullName(fieldName);
}
@ -459,12 +461,13 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
public void testPercolatorFieldMapper() throws Exception {
addQueryFieldMappings();
QueryBuilder queryBuilder = termQuery("field", "value");
ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", XContentFactory
.jsonBuilder()
.startObject()
.field(fieldName, queryBuilder)
.endObject().bytes(),
XContentType.JSON));
ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1",
BytesReference.bytes(XContentFactory
.jsonBuilder()
.startObject()
.field(fieldName, queryBuilder)
.endObject()),
XContentType.JSON));
assertThat(doc.rootDoc().getFields(fieldType.queryTermsField.name()).length, equalTo(1));
assertThat(doc.rootDoc().getFields(fieldType.queryTermsField.name())[0].binaryValue().utf8ToString(), equalTo("field\0value"));
@ -477,11 +480,11 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
// add an query for which we don't extract terms from
queryBuilder = rangeQuery("field").from("a").to("z");
doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", XContentFactory
doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", BytesReference.bytes(XContentFactory
.jsonBuilder()
.startObject()
.field(fieldName, queryBuilder)
.endObject().bytes(),
.endObject()),
XContentType.JSON));
assertThat(doc.rootDoc().getFields(fieldType.extractionResultField.name()).length, equalTo(1));
assertThat(doc.rootDoc().getFields(fieldType.extractionResultField.name())[0].stringValue(),
@ -504,9 +507,9 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
for (QueryBuilder query : queries) {
ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1",
XContentFactory.jsonBuilder().startObject()
BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.field(fieldName, query)
.endObject().bytes(),
.endObject()),
XContentType.JSON));
BytesRef qbSource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue();
assertQueryBuilder(qbSource, query);
@ -517,12 +520,13 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
addQueryFieldMappings();
client().prepareIndex("remote", "doc", "1").setSource("field", "value").get();
QueryBuilder queryBuilder = termsLookupQuery("field", new TermsLookup("remote", "doc", "1", "field"));
ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", XContentFactory
.jsonBuilder()
.startObject()
.field(fieldName, queryBuilder)
.endObject().bytes(),
XContentType.JSON));
ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1",
BytesReference.bytes(XContentFactory
.jsonBuilder()
.startObject()
.field(fieldName, queryBuilder)
.endObject()),
XContentType.JSON));
BytesRef qbSource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue();
QueryShardContext shardContext = indexService.newQueryShardContext(
randomInt(20), null, () -> {
@ -537,11 +541,11 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
public void testPercolatorFieldMapperUnMappedField() throws Exception {
addQueryFieldMappings();
MapperParsingException exception = expectThrows(MapperParsingException.class, () -> {
mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", XContentFactory
mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", BytesReference.bytes(XContentFactory
.jsonBuilder()
.startObject()
.field(fieldName, termQuery("unmapped_field", "value"))
.endObject().bytes(),
.endObject()),
XContentType.JSON));
});
assertThat(exception.getCause(), instanceOf(QueryShardException.class));
@ -551,20 +555,20 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
public void testPercolatorFieldMapper_noQuery() throws Exception {
addQueryFieldMappings();
ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", XContentFactory
.jsonBuilder()
.startObject()
.endObject()
.bytes(),
ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", BytesReference
.bytes(XContentFactory
.jsonBuilder()
.startObject()
.endObject()),
XContentType.JSON));
assertThat(doc.rootDoc().getFields(fieldType.queryBuilderField.name()).length, equalTo(0));
try {
mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", XContentFactory
mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", BytesReference.bytes(XContentFactory
.jsonBuilder()
.startObject()
.nullField(fieldName)
.endObject().bytes(),
.endObject()),
XContentType.JSON));
} catch (MapperParsingException e) {
assertThat(e.getDetailedMessage(), containsString("query malformed, must start with start_object"));
@ -576,9 +580,9 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
IndexService indexService = createIndex("test1", Settings.EMPTY);
MapperService mapperService = indexService.mapperService();
String percolatorMapper = XContentFactory.jsonBuilder().startObject().startObject("doc")
String percolatorMapper = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("doc")
.startObject("properties").startObject(fieldName).field("type", "percolator").field("index", "no").endObject().endObject()
.endObject().endObject().string();
.endObject().endObject());
MapperParsingException e = expectThrows(MapperParsingException.class, () ->
mapperService.merge("doc", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE));
assertThat(e.getMessage(), containsString("Mapping definition for [" + fieldName + "] has unsupported parameters: [index : no]"));
@ -587,21 +591,21 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
// multiple percolator fields are allowed in the mapping, but only one field can be used at index time.
public void testMultiplePercolatorFields() throws Exception {
String typeName = "doc";
String percolatorMapper = XContentFactory.jsonBuilder().startObject().startObject(typeName)
String percolatorMapper = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject(typeName)
.startObject("_field_names").field("enabled", false).endObject() // makes testing easier
.startObject("properties")
.startObject("query_field1").field("type", "percolator").endObject()
.startObject("query_field2").field("type", "percolator").endObject()
.endObject()
.endObject().endObject().string();
.endObject().endObject());
mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE);
QueryBuilder queryBuilder = matchQuery("field", "value");
ParsedDocument doc = mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1",
jsonBuilder().startObject()
BytesReference.bytes(jsonBuilder().startObject()
.field("query_field1", queryBuilder)
.field("query_field2", queryBuilder)
.endObject().bytes(),
.endObject()),
XContentType.JSON));
assertThat(doc.rootDoc().getFields().size(), equalTo(14)); // also includes all other meta fields
BytesRef queryBuilderAsBytes = doc.rootDoc().getField("query_field1.query_builder_field").binaryValue();
@ -614,7 +618,7 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
// percolator field can be nested under an object field, but only one query can be specified per document
public void testNestedPercolatorField() throws Exception {
String typeName = "doc";
String percolatorMapper = XContentFactory.jsonBuilder().startObject().startObject(typeName)
String percolatorMapper = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject(typeName)
.startObject("_field_names").field("enabled", false).endObject() // makes testing easier
.startObject("properties")
.startObject("object_field")
@ -624,25 +628,25 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
.endObject()
.endObject()
.endObject()
.endObject().endObject().string();
.endObject().endObject());
mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE);
QueryBuilder queryBuilder = matchQuery("field", "value");
ParsedDocument doc = mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1",
jsonBuilder().startObject().startObject("object_field")
BytesReference.bytes(jsonBuilder().startObject().startObject("object_field")
.field("query_field", queryBuilder)
.endObject().endObject().bytes(),
.endObject().endObject()),
XContentType.JSON));
assertThat(doc.rootDoc().getFields().size(), equalTo(10)); // also includes all other meta fields
BytesRef queryBuilderAsBytes = doc.rootDoc().getField("object_field.query_field.query_builder_field").binaryValue();
assertQueryBuilder(queryBuilderAsBytes, queryBuilder);
doc = mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1",
jsonBuilder().startObject()
BytesReference.bytes(jsonBuilder().startObject()
.startArray("object_field")
.startObject().field("query_field", queryBuilder).endObject()
.endArray()
.endObject().bytes(),
.endObject()),
XContentType.JSON));
assertThat(doc.rootDoc().getFields().size(), equalTo(10)); // also includes all other meta fields
queryBuilderAsBytes = doc.rootDoc().getField("object_field.query_field.query_builder_field").binaryValue();
@ -650,12 +654,12 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
MapperParsingException e = expectThrows(MapperParsingException.class, () -> {
mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1",
jsonBuilder().startObject()
BytesReference.bytes(jsonBuilder().startObject()
.startArray("object_field")
.startObject().field("query_field", queryBuilder).endObject()
.startObject().field("query_field", queryBuilder).endObject()
.endArray()
.endObject().bytes(),
.endObject()),
XContentType.JSON));
}
);
@ -708,9 +712,9 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
}
public void testEmptyName() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("").field("type", "percolator").endObject().endObject()
.endObject().endObject().string();
.endObject().endObject());
DocumentMapperParser parser = mapperService.documentMapperParser();
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
@ -735,9 +739,9 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
query.endObject();
ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1",
XContentFactory.jsonBuilder().startObject()
.rawField(fieldName, new BytesArray(query.string()), query.contentType())
.endObject().bytes(),
BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.rawField(fieldName, new BytesArray(Strings.toString(query)).streamInput(), query.contentType())
.endObject()),
XContentType.JSON));
BytesRef querySource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue();
try (InputStream in = new ByteArrayInputStream(querySource.bytes, querySource.offset, querySource.length)) {
@ -773,9 +777,9 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
query.endObject();
doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1",
XContentFactory.jsonBuilder().startObject()
.rawField(fieldName, new BytesArray(query.string()), query.contentType())
.endObject().bytes(),
BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.rawField(fieldName, new BytesArray(Strings.toString(query)).streamInput(), query.contentType())
.endObject()),
XContentType.JSON));
querySource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue();
try (InputStream in = new ByteArrayInputStream(querySource.bytes, querySource.offset, querySource.length)) {
@ -859,9 +863,9 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
.must(boolQuery().must(termQuery("field", "value1")).must(termQuery("field", "value2")))
.must(boolQuery().must(termQuery("field", "value2")).must(termQuery("field", "value3")));
ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1",
XContentFactory.jsonBuilder().startObject()
BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.field(fieldName, qb)
.endObject().bytes(),
.endObject()),
XContentType.JSON));
List<String> values = Arrays.stream(doc.rootDoc().getFields(fieldType.queryTermsField.name()))
@ -881,9 +885,9 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
.must(boolQuery().must(termQuery("field", "value3")).must(termQuery("field", "value4")))
.must(boolQuery().should(termQuery("field", "value4")).should(termQuery("field", "value5")));
doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1",
XContentFactory.jsonBuilder().startObject()
BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.field(fieldName, qb)
.endObject().bytes(),
.endObject()),
XContentType.JSON));
values = Arrays.stream(doc.rootDoc().getFields(fieldType.queryTermsField.name()))
@ -906,9 +910,9 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
.should(boolQuery().should(termQuery("field", "value3")).should(termQuery("field", "value4")))
.should(boolQuery().should(termQuery("field", "value4")).should(termQuery("field", "value5")));
doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1",
XContentFactory.jsonBuilder().startObject()
BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.field(fieldName, qb)
.endObject().bytes(),
.endObject()),
XContentType.JSON));
values = Arrays.stream(doc.rootDoc().getFields(fieldType.queryTermsField.name()))

View File

@ -85,7 +85,7 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
).endObject()).get();
client().admin().indices().prepareRefresh().get();
BytesReference source = jsonBuilder().startObject().endObject().bytes();
BytesReference source = BytesReference.bytes(jsonBuilder().startObject().endObject());
logger.info("percolating empty doc");
SearchResponse response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON))
@ -93,7 +93,7 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
assertHitCount(response, 1);
assertThat(response.getHits().getAt(0).getId(), equalTo("1"));
source = jsonBuilder().startObject().field("field1", "value").endObject().bytes();
source = BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").endObject());
logger.info("percolating doc with 1 field");
response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON))
@ -105,7 +105,7 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().getAt(1).getId(), equalTo("2"));
assertThat(response.getHits().getAt(1).getFields().get("_percolator_document_slot").getValue(), equalTo(0));
source = jsonBuilder().startObject().field("field1", "value").field("field2", "value").endObject().bytes();
source = BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").field("field2", "value").endObject());
logger.info("percolating doc with 2 fields");
response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON))
@ -122,8 +122,8 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
logger.info("percolating doc with 2 fields");
response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", Arrays.asList(
jsonBuilder().startObject().field("field1", "value").endObject().bytes(),
jsonBuilder().startObject().field("field1", "value").field("field2", "value").endObject().bytes()
BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").endObject()),
BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").field("field2", "value").endObject())
), XContentType.JSON))
.addSort("_uid", SortOrder.ASC)
.get();
@ -189,7 +189,7 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
client().admin().indices().prepareRefresh().get();
// Test long range:
BytesReference source = jsonBuilder().startObject().field("field1", 12).endObject().bytes();
BytesReference source = BytesReference.bytes(jsonBuilder().startObject().field("field1", 12).endObject());
SearchResponse response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON))
.get();
@ -198,7 +198,7 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().getAt(0).getId(), equalTo("3"));
assertThat(response.getHits().getAt(1).getId(), equalTo("1"));
source = jsonBuilder().startObject().field("field1", 11).endObject().bytes();
source = BytesReference.bytes(jsonBuilder().startObject().field("field1", 11).endObject());
response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON))
.get();
@ -206,7 +206,7 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().getAt(0).getId(), equalTo("1"));
// Test double range:
source = jsonBuilder().startObject().field("field2", 12).endObject().bytes();
source = BytesReference.bytes(jsonBuilder().startObject().field("field2", 12).endObject());
response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON))
.get();
@ -214,7 +214,7 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().getAt(0).getId(), equalTo("6"));
assertThat(response.getHits().getAt(1).getId(), equalTo("4"));
source = jsonBuilder().startObject().field("field2", 11).endObject().bytes();
source = BytesReference.bytes(jsonBuilder().startObject().field("field2", 11).endObject());
response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON))
.get();
@ -222,7 +222,7 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().getAt(0).getId(), equalTo("4"));
// Test IP range:
source = jsonBuilder().startObject().field("field3", "192.168.1.5").endObject().bytes();
source = BytesReference.bytes(jsonBuilder().startObject().field("field3", "192.168.1.5").endObject());
response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON))
.get();
@ -230,7 +230,7 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().getAt(0).getId(), equalTo("9"));
assertThat(response.getHits().getAt(1).getId(), equalTo("7"));
source = jsonBuilder().startObject().field("field3", "192.168.1.4").endObject().bytes();
source = BytesReference.bytes(jsonBuilder().startObject().field("field3", "192.168.1.4").endObject());
response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON))
.get();
@ -238,7 +238,7 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().getAt(0).getId(), equalTo("7"));
// Test date range:
source = jsonBuilder().startObject().field("field4", "2016-05-15").endObject().bytes();
source = BytesReference.bytes(jsonBuilder().startObject().field("field4", "2016-05-15").endObject());
response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON))
.get();
@ -267,9 +267,9 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
.endObject()).get();
refresh();
BytesReference source = jsonBuilder().startObject()
BytesReference source = BytesReference.bytes(jsonBuilder().startObject()
.startObject("field1").field("lat", 52.20).field("lon", 4.51).endObject()
.endObject().bytes();
.endObject());
SearchResponse response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON))
.addSort("_id", SortOrder.ASC)
@ -402,10 +402,10 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
.get();
client().admin().indices().prepareRefresh().get();
BytesReference source = jsonBuilder().startObject()
BytesReference source = BytesReference.bytes(jsonBuilder().startObject()
.field("field1", "the quick brown fox jumps over the lazy dog")
.field("field2", "the quick brown fox falls down into the well")
.endObject().bytes();
.endObject());
SearchResponse response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON))
.addSort("_uid", SortOrder.ASC)
@ -449,9 +449,9 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
.execute().actionGet();
client().admin().indices().prepareRefresh().get();
BytesReference document = jsonBuilder().startObject()
BytesReference document = BytesReference.bytes(jsonBuilder().startObject()
.field("field1", "The quick brown fox jumps over the lazy dog")
.endObject().bytes();
.endObject());
SearchResponse searchResponse = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", document, XContentType.JSON))
.highlighter(new HighlightBuilder().field("field1"))
@ -470,12 +470,12 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
assertThat(searchResponse.getHits().getAt(4).getHighlightFields().get("field1").fragments()[0].string(),
equalTo("The quick brown <em>fox</em> jumps over the lazy dog"));
BytesReference document1 = jsonBuilder().startObject()
BytesReference document1 = BytesReference.bytes(jsonBuilder().startObject()
.field("field1", "The quick brown fox jumps")
.endObject().bytes();
BytesReference document2 = jsonBuilder().startObject()
.endObject());
BytesReference document2 = BytesReference.bytes(jsonBuilder().startObject()
.field("field1", "over the lazy dog")
.endObject().bytes();
.endObject());
searchResponse = client().prepareSearch()
.setQuery(boolQuery()
.should(new PercolateQueryBuilder("query", document1, XContentType.JSON).setName("query1"))
@ -500,10 +500,10 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
searchResponse = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", Arrays.asList(
jsonBuilder().startObject().field("field1", "dog").endObject().bytes(),
jsonBuilder().startObject().field("field1", "fox").endObject().bytes(),
jsonBuilder().startObject().field("field1", "jumps").endObject().bytes(),
jsonBuilder().startObject().field("field1", "brown fox").endObject().bytes()
BytesReference.bytes(jsonBuilder().startObject().field("field1", "dog").endObject()),
BytesReference.bytes(jsonBuilder().startObject().field("field1", "fox").endObject()),
BytesReference.bytes(jsonBuilder().startObject().field("field1", "jumps").endObject()),
BytesReference.bytes(jsonBuilder().startObject().field("field1", "brown fox").endObject())
), XContentType.JSON))
.highlighter(new HighlightBuilder().field("field1"))
.addSort("_uid", SortOrder.ASC)
@ -537,12 +537,12 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
searchResponse = client().prepareSearch()
.setQuery(boolQuery()
.should(new PercolateQueryBuilder("query", Arrays.asList(
jsonBuilder().startObject().field("field1", "dog").endObject().bytes(),
jsonBuilder().startObject().field("field1", "fox").endObject().bytes()
BytesReference.bytes(jsonBuilder().startObject().field("field1", "dog").endObject()),
BytesReference.bytes(jsonBuilder().startObject().field("field1", "fox").endObject())
), XContentType.JSON).setName("query1"))
.should(new PercolateQueryBuilder("query", Arrays.asList(
jsonBuilder().startObject().field("field1", "jumps").endObject().bytes(),
jsonBuilder().startObject().field("field1", "brown fox").endObject().bytes()
BytesReference.bytes(jsonBuilder().startObject().field("field1", "jumps").endObject()),
BytesReference.bytes(jsonBuilder().startObject().field("field1", "brown fox").endObject())
), XContentType.JSON).setName("query2"))
)
.highlighter(new HighlightBuilder().field("field1"))
@ -664,7 +664,7 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
.get();
client().admin().indices().prepareRefresh().get();
BytesReference source = jsonBuilder().startObject().field("field", "value").endObject().bytes();
BytesReference source = BytesReference.bytes(jsonBuilder().startObject().field("field", "value").endObject());
SearchResponse response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder(queryFieldName, source, XContentType.JSON))
.setIndices("test1")
@ -718,13 +718,13 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
SearchResponse response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query",
XContentFactory.jsonBuilder()
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject().field("companyname", "stark")
.startArray("employee")
.startObject().field("name", "virginia potts").endObject()
.startObject().field("name", "tony stark").endObject()
.endArray()
.endObject().bytes(), XContentType.JSON))
.endObject()), XContentType.JSON))
.addSort("_doc", SortOrder.ASC)
.get();
assertHitCount(response, 1);
@ -732,20 +732,20 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query",
XContentFactory.jsonBuilder()
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject().field("companyname", "notstark")
.startArray("employee")
.startObject().field("name", "virginia stark").endObject()
.startObject().field("name", "tony stark").endObject()
.endArray()
.endObject().bytes(), XContentType.JSON))
.endObject()), XContentType.JSON))
.addSort("_doc", SortOrder.ASC)
.get();
assertHitCount(response, 0);
response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query",
XContentFactory.jsonBuilder().startObject().field("companyname", "notstark").endObject().bytes(),
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("companyname", "notstark").endObject()),
XContentType.JSON))
.addSort("_doc", SortOrder.ASC)
.get();
@ -753,20 +753,20 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", Arrays.asList(
XContentFactory.jsonBuilder()
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject().field("companyname", "stark")
.startArray("employee")
.startObject().field("name", "virginia potts").endObject()
.startObject().field("name", "tony stark").endObject()
.endArray()
.endObject().bytes(),
XContentFactory.jsonBuilder()
.endObject()),
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject().field("companyname", "stark")
.startArray("employee")
.startObject().field("name", "peter parker").endObject()
.startObject().field("name", "virginia potts").endObject()
.endArray()
.endObject().bytes()
.endObject())
), XContentType.JSON))
.addSort("_doc", SortOrder.ASC)
.get();
@ -803,16 +803,16 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
MultiSearchResponse response = client().prepareMultiSearch()
.add(client().prepareSearch("test")
.setQuery(new PercolateQueryBuilder("query",
jsonBuilder().startObject().field("field1", "b").endObject().bytes(), XContentType.JSON)))
BytesReference.bytes(jsonBuilder().startObject().field("field1", "b").endObject()), XContentType.JSON)))
.add(client().prepareSearch("test")
.setQuery(new PercolateQueryBuilder("query",
yamlBuilder().startObject().field("field1", "c").endObject().bytes(), XContentType.YAML)))
BytesReference.bytes(yamlBuilder().startObject().field("field1", "c").endObject()), XContentType.YAML)))
.add(client().prepareSearch("test")
.setQuery(new PercolateQueryBuilder("query",
smileBuilder().startObject().field("field1", "b c").endObject().bytes(), XContentType.SMILE)))
BytesReference.bytes(smileBuilder().startObject().field("field1", "b c").endObject()), XContentType.SMILE)))
.add(client().prepareSearch("test")
.setQuery(new PercolateQueryBuilder("query",
jsonBuilder().startObject().field("field1", "d").endObject().bytes(), XContentType.JSON)))
BytesReference.bytes(jsonBuilder().startObject().field("field1", "d").endObject()), XContentType.JSON)))
.add(client().prepareSearch("test")
.setQuery(new PercolateQueryBuilder("query", "test", "type", "5", null, null, null)))
.add(client().prepareSearch("test") // non existing doc, so error element

View File

@ -21,6 +21,7 @@ package org.elasticsearch.percolator;
import org.apache.lucene.search.join.ScoreMode;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
@ -79,7 +80,7 @@ public class PercolatorQuerySearchTests extends ESSingleNodeTestCase {
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
.execute().actionGet();
SearchResponse response = client().prepareSearch("index")
.setQuery(new PercolateQueryBuilder("query", jsonBuilder().startObject().field("field1", "b").endObject().bytes(),
.setQuery(new PercolateQueryBuilder("query", BytesReference.bytes(jsonBuilder().startObject().field("field1", "b").endObject()),
XContentType.JSON))
.get();
assertHitCount(response, 1);
@ -108,13 +109,13 @@ public class PercolatorQuerySearchTests extends ESSingleNodeTestCase {
for (int i = 0; i < 32; i++) {
SearchResponse response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query",
XContentFactory.jsonBuilder()
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject().field("companyname", "stark")
.startArray("employee")
.startObject().field("name", "virginia potts").endObject()
.startObject().field("name", "tony stark").endObject()
.endArray()
.endObject().bytes(), XContentType.JSON))
.endObject()), XContentType.JSON))
.addSort("_doc", SortOrder.ASC)
// size 0, because other wise load bitsets for normal document in FetchPhase#findRootDocumentIfNested(...)
.setSize(0)
@ -192,7 +193,7 @@ public class PercolatorQuerySearchTests extends ESSingleNodeTestCase {
doc.endObject();
for (int i = 0; i < 32; i++) {
SearchResponse response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", doc.bytes(), XContentType.JSON))
.setQuery(new PercolateQueryBuilder("query", BytesReference.bytes(doc), XContentType.JSON))
.addSort("_doc", SortOrder.ASC)
.get();
assertHitCount(response, 1);
@ -212,8 +213,9 @@ public class PercolatorQuerySearchTests extends ESSingleNodeTestCase {
client().admin().indices().prepareRefresh().get();
SearchResponse response = client().prepareSearch("test")
.setQuery(new PercolateQueryBuilder("query", jsonBuilder().startObject().field("field1", "value").endObject().bytes(),
XContentType.JSON))
.setQuery(new PercolateQueryBuilder("query",
BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").endObject()),
XContentType.JSON))
.get();
assertHitCount(response, 1);
assertSearchHits(response, "1");

View File

@ -152,7 +152,7 @@ public class RankEvalResponseTests extends ESTestCase {
RankEvalResponse response = new RankEvalResponse(0.123, Collections.singletonMap("coffee_query", coffeeQueryQuality),
Collections.singletonMap("beer_query", new ParsingException(new XContentLocation(0, 0), "someMsg")));
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
String xContent = response.toXContent(builder, ToXContent.EMPTY_PARAMS).bytes().utf8ToString();
String xContent = BytesReference.bytes(response.toXContent(builder, ToXContent.EMPTY_PARAMS)).utf8ToString();
assertEquals(("{" +
" \"quality_level\": 0.123," +
" \"details\": {" +

View File

@ -19,6 +19,7 @@
package org.elasticsearch.index.rankeval;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
@ -85,7 +86,7 @@ public class RankEvalSpecTests extends ESTestCase {
builder.startObject();
builder.field("field", randomAlphaOfLengthBetween(1, 5));
builder.endObject();
script = builder.string();
script = Strings.toString(builder);
}
templates = new HashSet<>();
@ -115,7 +116,7 @@ public class RankEvalSpecTests extends ESTestCase {
public void testXContentRoundtrip() throws IOException {
RankEvalSpec testItem = createTestItem();
XContentBuilder shuffled = shuffleXContent(testItem.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS));
try (XContentParser parser = createParser(JsonXContent.jsonXContent, shuffled.bytes())) {
try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(shuffled))) {
RankEvalSpec parsedItem = RankEvalSpec.parse(parser);
// indices, come from URL parameters, so they don't survive xContent roundtrip

View File

@ -21,8 +21,8 @@ package org.elasticsearch.index.reindex;
import org.elasticsearch.action.GenericAction;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
@ -91,7 +91,7 @@ public abstract class AbstractBulkByQueryRestHandler<
}
}
return parser.contentType().xContent().createParser(parser.getXContentRegistry(),
parser.getDeprecationHandler(), builder.map(body).bytes().streamInput());
parser.getDeprecationHandler(), BytesReference.bytes(builder.map(body)).streamInput());
}
}
}

View File

@ -27,7 +27,6 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
import org.elasticsearch.common.xcontent.ToXContent;
@ -75,7 +74,7 @@ public class RestReindexAction extends AbstractBaseReindexRestHandler<ReindexReq
request.setRemoteInfo(buildRemoteInfo(source));
XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType());
builder.map(source);
try (InputStream stream = builder.bytes().streamInput();
try (InputStream stream = BytesReference.bytes(builder).streamInput();
XContentParser innerParser = parser.contentType().xContent()
.createParser(parser.getXContentRegistry(), parser.getDeprecationHandler(), stream)) {
request.getSearchRequest().source().parseXContent(innerParser);
@ -214,13 +213,13 @@ public class RestReindexAction extends AbstractBaseReindexRestHandler<ReindexReq
XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint();
Object query = source.remove("query");
if (query == null) {
return matchAllQuery().toXContent(builder, ToXContent.EMPTY_PARAMS).bytes();
return BytesReference.bytes(matchAllQuery().toXContent(builder, ToXContent.EMPTY_PARAMS));
}
if (!(query instanceof Map)) {
throw new IllegalArgumentException("Expected [query] to be an object but was [" + query + "]");
}
@SuppressWarnings("unchecked")
Map<String, Object> map = (Map<String, Object>) query;
return builder.map(map).bytes();
return BytesReference.bytes(builder.map(map));
}
}

View File

@ -37,8 +37,8 @@ import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.bulk.BackoffPolicy;
import org.elasticsearch.action.bulk.BulkItemResponse.Failure;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.DeprecationHandler;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.index.reindex.ScrollableHitSource.SearchFailure;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.search.SearchRequest;
@ -345,7 +345,7 @@ public class TransportReindexAction extends HandledTransportAction<ReindexReques
XContentBuilder builder = XContentBuilder.builder(mainRequestXContentType.xContent())) {
parser.nextToken();
builder.copyCurrentStructure(parser);
index.source(builder.bytes(), builder.contentType());
index.source(BytesReference.bytes(builder), builder.contentType());
} catch (IOException e) {
throw new UncheckedIOException("failed to convert hit from " + sourceXContentType + " to "
+ mainRequestXContentType, e);

View File

@ -155,7 +155,7 @@ final class RemoteRequestBuilders {
}
entity.endObject();
BytesRef bytes = entity.bytes().toBytesRef();
BytesRef bytes = BytesReference.bytes(entity).toBytesRef();
return new ByteArrayEntity(bytes.bytes, bytes.offset, bytes.length, ContentType.APPLICATION_JSON);
} catch (IOException e) {
throw new ElasticsearchException("unexpected error building entity", e);
@ -209,9 +209,9 @@ final class RemoteRequestBuilders {
return new StringEntity(scroll, ContentType.TEXT_PLAIN);
}
try (XContentBuilder entity = JsonXContent.contentBuilder()) {
return new StringEntity(entity.startObject()
return new StringEntity(Strings.toString(entity.startObject()
.field("scroll_id", scroll)
.endObject().string(), ContentType.APPLICATION_JSON);
.endObject()), ContentType.APPLICATION_JSON);
} catch (IOException e) {
throw new ElasticsearchException("failed to build scroll entity", e);
}
@ -223,9 +223,9 @@ final class RemoteRequestBuilders {
return new StringEntity(scroll, ContentType.TEXT_PLAIN);
}
try (XContentBuilder entity = JsonXContent.contentBuilder()) {
return new StringEntity(entity.startObject()
return new StringEntity(Strings.toString(entity.startObject()
.array("scroll_id", scroll)
.endObject().string(), ContentType.APPLICATION_JSON);
.endObject()), ContentType.APPLICATION_JSON);
} catch (IOException e) {
throw new ElasticsearchException("failed to build clear scroll entity", e);
}

View File

@ -20,6 +20,7 @@
package org.elasticsearch.index.reindex.remote;
import org.elasticsearch.Version;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.index.reindex.ScrollableHitSource.BasicHit;
import org.elasticsearch.index.reindex.ScrollableHitSource.Hit;
import org.elasticsearch.index.reindex.ScrollableHitSource.Response;
@ -78,7 +79,7 @@ final class RemoteResponseParsers {
try (XContentBuilder b = XContentBuilder.builder(s.xContent())) {
b.copyCurrentStructure(p);
// a hack but this lets us get the right xcontent type to go with the source
return new Tuple<>(b.bytes(), s);
return new Tuple<>(BytesReference.bytes(b), s);
}
} catch (IOException e) {
throw new ParsingException(p.getTokenLocation(), "[hit] failed to parse [_source]", e);

View File

@ -118,7 +118,7 @@ public class RestReindexActionTests extends ESTestCase {
b.endObject();
}
b.endObject();
request = b.bytes();
request = BytesReference.bytes(b);
}
try (XContentParser p = createParser(JsonXContent.jsonXContent, request)) {
ReindexRequest r = new ReindexRequest(new SearchRequest(), new IndexRequest());
@ -144,7 +144,7 @@ public class RestReindexActionTests extends ESTestCase {
body.endObject();
}
body.endObject();
request.withContent(body.bytes(), body.contentType());
request.withContent(BytesReference.bytes(body), body.contentType());
}
request.withParams(singletonMap("pipeline", "doesn't matter"));
Exception e = expectThrows(IllegalArgumentException.class, () -> action.buildRequest(request.build()));

View File

@ -22,6 +22,7 @@ package org.elasticsearch.rest;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity;
import org.elasticsearch.client.Response;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.test.rest.ESRestTestCase;
import org.hamcrest.Matcher;
@ -57,7 +58,7 @@ public class Netty4HeadBodyIsEmptyIT extends ESRestTestCase {
}
builder.endObject();
client().performRequest("PUT", "/" + indexName + "/" + typeName + "/" + "1", emptyMap(),
new StringEntity(builder.string(), ContentType.APPLICATION_JSON));
new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON));
}
}
@ -108,7 +109,8 @@ public class Netty4HeadBodyIsEmptyIT extends ESRestTestCase {
}
builder.endObject();
client().performRequest("POST", "_aliases", emptyMap(), new StringEntity(builder.string(), ContentType.APPLICATION_JSON));
client().performRequest("POST", "_aliases", emptyMap(), new StringEntity(Strings.toString(builder),
ContentType.APPLICATION_JSON));
headTestCase("/_alias/test_alias", emptyMap(), greaterThan(0));
headTestCase("/test/_alias/test_alias", emptyMap(), greaterThan(0));
}
@ -134,7 +136,7 @@ public class Netty4HeadBodyIsEmptyIT extends ESRestTestCase {
builder.endObject();
client().performRequest("PUT", "/_template/template", emptyMap(),
new StringEntity(builder.string(), ContentType.APPLICATION_JSON));
new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON));
headTestCase("/_template/template", emptyMap(), greaterThan(0));
}
}
@ -162,7 +164,8 @@ public class Netty4HeadBodyIsEmptyIT extends ESRestTestCase {
builder.endObject();
}
builder.endObject();
client().performRequest("PUT", "/test-no-source", emptyMap(), new StringEntity(builder.string(), ContentType.APPLICATION_JSON));
client().performRequest("PUT", "/test-no-source", emptyMap(), new StringEntity(Strings.toString(builder),
ContentType.APPLICATION_JSON));
createTestDoc("test-no-source", "test-no-source");
headTestCase("/test-no-source/test-no-source/1/_source", emptyMap(), NOT_FOUND.getStatus(), equalTo(0));
}

View File

@ -29,6 +29,8 @@ import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.IndexableFieldType;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
@ -64,19 +66,19 @@ public class ICUCollationKeywordFieldMapperTests extends ESSingleNodeTestCase {
}
public void testDefaults() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE).endObject().endObject()
.endObject().endObject().string();
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()
.bytes(),
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()),
XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field");
@ -108,19 +110,19 @@ public class ICUCollationKeywordFieldMapperTests extends ESSingleNodeTestCase {
indexService = createIndex("oldindex", Settings.builder().put("index.version.created", Version.V_5_5_0).build());
parser = indexService.mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE).endObject().endObject()
.endObject().endObject().string();
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(SourceToParse.source("oldindex", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()
.bytes(),
ParsedDocument doc = mapper.parse(SourceToParse.source("oldindex", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()),
XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field");
@ -149,44 +151,44 @@ public class ICUCollationKeywordFieldMapperTests extends ESSingleNodeTestCase {
}
public void testNullValue() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE).endObject().endObject()
.endObject().endObject().string();
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.nullField("field")
.endObject()
.bytes(),
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.nullField("field")
.endObject()),
XContentType.JSON));
assertArrayEquals(new IndexableField[0], doc.rootDoc().getFields("field"));
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE)
.field("null_value", "1234").endObject().endObject()
.endObject().endObject().string();
.endObject().endObject());
mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.endObject()
.bytes(),
doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.endObject()),
XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(0, fields.length);
doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.nullField("field")
.endObject()
.bytes(),
doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.nullField("field")
.endObject()),
XContentType.JSON));
Collator collator = Collator.getInstance(ULocale.ROOT);
@ -199,20 +201,20 @@ public class ICUCollationKeywordFieldMapperTests extends ESSingleNodeTestCase {
}
public void testEnableStore() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE)
.field("store", true).endObject().endObject()
.endObject().endObject().string();
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()
.bytes(),
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()),
XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field");
@ -221,20 +223,20 @@ public class ICUCollationKeywordFieldMapperTests extends ESSingleNodeTestCase {
}
public void testDisableIndex() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE)
.field("index", false).endObject().endObject()
.endObject().endObject().string();
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()
.bytes(),
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()),
XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field");
@ -244,20 +246,20 @@ public class ICUCollationKeywordFieldMapperTests extends ESSingleNodeTestCase {
}
public void testDisableDocValues() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE)
.field("doc_values", false).endObject().endObject()
.endObject().endObject().string();
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()
.bytes(),
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()),
XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field");
@ -266,19 +268,19 @@ public class ICUCollationKeywordFieldMapperTests extends ESSingleNodeTestCase {
}
public void testMultipleValues() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE).endObject().endObject()
.endObject().endObject().string();
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", Arrays.asList("1234", "5678"))
.endObject()
.bytes(),
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", Arrays.asList("1234", "5678"))
.endObject()),
XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field");
@ -328,20 +330,20 @@ public class ICUCollationKeywordFieldMapperTests extends ESSingleNodeTestCase {
}
public void testIndexOptions() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE)
.field("index_options", "freqs").endObject().endObject()
.endObject().endObject().string();
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()
.bytes(),
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()),
XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field");
@ -349,10 +351,10 @@ public class ICUCollationKeywordFieldMapperTests extends ESSingleNodeTestCase {
assertEquals(IndexOptions.DOCS_AND_FREQS, fields[0].fieldType().indexOptions());
for (String indexOptions : Arrays.asList("positions", "offsets")) {
final String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type")
final String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE)
.field("index_options", indexOptions).endObject().endObject()
.endObject().endObject().string();
.endObject().endObject());
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> parser.parse("type", new CompressedXContent(mapping2)));
assertEquals("The [" + FIELD_TYPE + "] field does not support positions, got [index_options]=" + indexOptions,
@ -361,20 +363,20 @@ public class ICUCollationKeywordFieldMapperTests extends ESSingleNodeTestCase {
}
public void testEnableNorms() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE)
.field("norms", true).endObject().endObject()
.endObject().endObject().string();
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()
.bytes(),
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()),
XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field");
@ -383,22 +385,22 @@ public class ICUCollationKeywordFieldMapperTests extends ESSingleNodeTestCase {
}
public void testCollator() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field")
.field("type", FIELD_TYPE)
.field("language", "tr")
.field("strength", "primary")
.endObject().endObject().endObject().endObject().string();
.endObject().endObject().endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("field", "I WİLL USE TURKİSH CASING")
.endObject()
.bytes(),
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "I WİLL USE TURKİSH CASING")
.endObject()),
XContentType.JSON));
Collator collator = Collator.getInstance(new ULocale("tr"));
@ -428,19 +430,19 @@ public class ICUCollationKeywordFieldMapperTests extends ESSingleNodeTestCase {
}
public void testUpdateCollator() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field")
.field("type", FIELD_TYPE)
.field("language", "tr")
.field("strength", "primary")
.endObject().endObject().endObject().endObject().string();
.endObject().endObject().endObject().endObject());
indexService.mapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type")
String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field")
.field("type", FIELD_TYPE)
.field("language", "en")
.endObject().endObject().endObject().endObject().string();
.endObject().endObject().endObject().endObject());
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> indexService.mapperService().merge("type",
new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE));

View File

@ -22,6 +22,8 @@ package org.elasticsearch.index.mapper.murmur3;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
@ -71,15 +73,15 @@ public class Murmur3FieldMapperTests extends ESSingleNodeTestCase {
}
public void testDefaults() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field")
.field("type", "murmur3")
.endObject().endObject().endObject().endObject().string();
.endObject().endObject().endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
ParsedDocument parsedDoc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder()
ParsedDocument parsedDoc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
.endObject().bytes(),
.endObject()),
XContentType.JSON));
IndexableField[] fields = parsedDoc.rootDoc().getFields("field");
assertNotNull(fields);
@ -90,11 +92,11 @@ public class Murmur3FieldMapperTests extends ESSingleNodeTestCase {
}
public void testDocValuesSettingNotAllowed() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field")
.field("type", "murmur3")
.field("doc_values", false)
.endObject().endObject().endObject().endObject().string();
.endObject().endObject().endObject().endObject());
try {
parser.parse("type", new CompressedXContent(mapping));
fail("expected a mapper parsing exception");
@ -103,11 +105,11 @@ public class Murmur3FieldMapperTests extends ESSingleNodeTestCase {
}
// even setting to the default is not allowed, the setting is invalid
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field")
.field("type", "murmur3")
.field("doc_values", true)
.endObject().endObject().endObject().endObject().string();
.endObject().endObject().endObject().endObject());
try {
parser.parse("type", new CompressedXContent(mapping));
fail("expected a mapper parsing exception");
@ -117,11 +119,11 @@ public class Murmur3FieldMapperTests extends ESSingleNodeTestCase {
}
public void testIndexSettingNotAllowed() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field")
.field("type", "murmur3")
.field("index", "not_analyzed")
.endObject().endObject().endObject().endObject().string();
.endObject().endObject().endObject().endObject());
try {
parser.parse("type", new CompressedXContent(mapping));
fail("expected a mapper parsing exception");
@ -130,11 +132,11 @@ public class Murmur3FieldMapperTests extends ESSingleNodeTestCase {
}
// even setting to the default is not allowed, the setting is invalid
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field")
.field("type", "murmur3")
.field("index", "no")
.endObject().endObject().endObject().endObject().string();
.endObject().endObject().endObject().endObject());
try {
parser.parse("type", new CompressedXContent(mapping));
fail("expected a mapper parsing exception");
@ -144,10 +146,10 @@ public class Murmur3FieldMapperTests extends ESSingleNodeTestCase {
}
public void testEmptyName() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("")
.field("type", "murmur3")
.endObject().endObject().endObject().endObject().string();
.endObject().endObject().endObject().endObject());
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> parser.parse("type", new CompressedXContent(mapping))

View File

@ -21,8 +21,7 @@ package org.elasticsearch.index.mapper.size;
import java.util.Collection;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
@ -30,11 +29,9 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.SourceToParse;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.plugin.mapper.MapperSizePlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
@ -42,7 +39,6 @@ import org.elasticsearch.test.InternalSettingsPlugin;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.Matchers.instanceOf;
import org.apache.lucene.index.IndexableField;
@ -56,11 +52,11 @@ public class SizeMappingTests extends ESSingleNodeTestCase {
IndexService service = createIndex("test", Settings.EMPTY, "type", "_size", "enabled=true");
DocumentMapper docMapper = service.mapperService().documentMapper("type");
BytesReference source = XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
.endObject()
.bytes();
BytesReference source = BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
.endObject());
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source, XContentType.JSON));
boolean stored = false;
@ -77,11 +73,11 @@ public class SizeMappingTests extends ESSingleNodeTestCase {
IndexService service = createIndex("test", Settings.EMPTY, "type", "_size", "enabled=false");
DocumentMapper docMapper = service.mapperService().documentMapper("type");
BytesReference source = XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
.endObject()
.bytes();
BytesReference source = BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
.endObject());
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source, XContentType.JSON));
assertThat(doc.rootDoc().getField("_size"), nullValue());
@ -91,11 +87,11 @@ public class SizeMappingTests extends ESSingleNodeTestCase {
IndexService service = createIndex("test", Settings.EMPTY, "type");
DocumentMapper docMapper = service.mapperService().documentMapper("type");
BytesReference source = XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
.endObject()
.bytes();
BytesReference source = BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
.endObject());
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source, XContentType.JSON));
assertThat(doc.rootDoc().getField("_size"), nullValue());
@ -106,9 +102,9 @@ public class SizeMappingTests extends ESSingleNodeTestCase {
DocumentMapper docMapper = service.mapperService().documentMapper("type");
assertThat(docMapper.metadataMapper(SizeFieldMapper.class).enabled(), is(true));
String disabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
String disabledMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_size").field("enabled", false).endObject()
.endObject().endObject().string();
.endObject().endObject());
docMapper = service.mapperService().merge("type", new CompressedXContent(disabledMapping),
MapperService.MergeReason.MAPPING_UPDATE);

View File

@ -19,6 +19,7 @@
package org.elasticsearch.repositories.gcs;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.path.PathTrie;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -522,7 +523,7 @@ public class GoogleCloudStorageTestServer {
*/
private static Response newResponse(final RestStatus status, final Map<String, String> headers, final XContentBuilder xContentBuilder) {
try (ByteArrayOutputStream out = new ByteArrayOutputStream()) {
xContentBuilder.bytes().writeTo(out);
BytesReference.bytes(xContentBuilder).writeTo(out);
return new Response(status, headers, XContentType.JSON.mediaType(), out.toByteArray());
} catch (IOException e) {
return newError(RestStatus.INTERNAL_SERVER_ERROR, e.getMessage());
@ -548,7 +549,7 @@ public class GoogleCloudStorageTestServer {
.endArray()
.endObject()
.endObject();
builder.bytes().writeTo(out);
BytesReference.bytes(builder).writeTo(out);
}
return new Response(status, emptyMap(), XContentType.JSON.mediaType(), out.toByteArray());
} catch (IOException e) {

View File

@ -43,6 +43,7 @@ import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.json.JsonXContent;
@ -306,7 +307,7 @@ public class CrossClusterSearchUnavailableClusterIT extends ESRestTestCase {
builder.endObject();
}
builder.endObject();
requestBody = builder.string();
requestBody = Strings.toString(builder);
}
return new NStringEntity(requestBody, ContentType.APPLICATION_JSON);
}

View File

@ -29,6 +29,7 @@ import org.elasticsearch.client.RestClient;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.CheckedFunction;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper;
@ -140,7 +141,7 @@ public class FullClusterRestartIT extends ESRestTestCase {
}
mappingsAndSettings.endObject();
client().performRequest("PUT", "/" + index, Collections.emptyMap(),
new StringEntity(mappingsAndSettings.string(), ContentType.APPLICATION_JSON));
new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON));
count = randomIntBetween(2000, 3000);
byte[] randomByteArray = new byte[16];
@ -203,7 +204,7 @@ public class FullClusterRestartIT extends ESRestTestCase {
}
mappingsAndSettings.endObject();
client().performRequest("PUT", "/" + index, Collections.emptyMap(),
new StringEntity(mappingsAndSettings.string(), ContentType.APPLICATION_JSON));
new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON));
int numDocs = randomIntBetween(2000, 3000);
indexRandomDocuments(numDocs, true, false, i -> {
@ -280,7 +281,7 @@ public class FullClusterRestartIT extends ESRestTestCase {
}
mappingsAndSettings.endObject();
client().performRequest("PUT", "/" + index, Collections.emptyMap(),
new StringEntity(mappingsAndSettings.string(), ContentType.APPLICATION_JSON));
new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON));
String aliasName = "%23" + index; // %23 == #
client().performRequest("PUT", "/" + index + "/_alias/" + aliasName);
@ -328,7 +329,7 @@ public class FullClusterRestartIT extends ESRestTestCase {
}
mappingsAndSettings.endObject();
client().performRequest("PUT", "/_template/template_1", Collections.emptyMap(),
new StringEntity(mappingsAndSettings.string(), ContentType.APPLICATION_JSON));
new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON));
client().performRequest("PUT", "/" + index);
}
@ -379,7 +380,7 @@ public class FullClusterRestartIT extends ESRestTestCase {
}
mappingsAndSettings.endObject();
client().performRequest("PUT", "/" + index, Collections.emptyMap(),
new StringEntity(mappingsAndSettings.string(), ContentType.APPLICATION_JSON));
new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON));
numDocs = randomIntBetween(512, 1024);
indexRandomDocuments(numDocs, true, true, i -> {
@ -446,7 +447,7 @@ public class FullClusterRestartIT extends ESRestTestCase {
}
mappingsAndSettings.endObject();
client().performRequest("PUT", "/" + index, Collections.emptyMap(),
new StringEntity(mappingsAndSettings.string(), ContentType.APPLICATION_JSON));
new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON));
numDocs = randomIntBetween(512, 1024);
indexRandomDocuments(numDocs, true, true, i -> {
@ -836,7 +837,7 @@ public class FullClusterRestartIT extends ESRestTestCase {
}
templateBuilder.endObject().endObject();
client().performRequest("PUT", "/_template/test_template", emptyMap(),
new StringEntity(templateBuilder.string(), ContentType.APPLICATION_JSON));
new StringEntity(Strings.toString(templateBuilder), ContentType.APPLICATION_JSON));
if (runningAgainstOldCluster) {
// Create the repo
@ -850,7 +851,7 @@ public class FullClusterRestartIT extends ESRestTestCase {
}
repoConfig.endObject();
client().performRequest("PUT", "/_snapshot/repo", emptyMap(),
new StringEntity(repoConfig.string(), ContentType.APPLICATION_JSON));
new StringEntity(Strings.toString(repoConfig), ContentType.APPLICATION_JSON));
}
client().performRequest("PUT", "/_snapshot/repo/" + (runningAgainstOldCluster ? "old_snap" : "new_snap"),
@ -875,7 +876,7 @@ public class FullClusterRestartIT extends ESRestTestCase {
}
mappingsAndSettings.endObject();
client().performRequest("PUT", "/" + index, Collections.emptyMap(),
new StringEntity(mappingsAndSettings.string(), ContentType.APPLICATION_JSON));
new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON));
} else {
Response response = client().performRequest("GET", index + "/_stats", singletonMap("level", "shards"));
List<Object> shardStats = ObjectPath.createFromResponse(response).evaluate("indices." + index + ".shards.0");
@ -919,7 +920,7 @@ public class FullClusterRestartIT extends ESRestTestCase {
restoreCommand.field("rename_replacement", "restored_" + index);
restoreCommand.endObject();
client().performRequest("POST", "/_snapshot/repo/" + snapshotName + "/_restore", singletonMap("wait_for_completion", "true"),
new StringEntity(restoreCommand.string(), ContentType.APPLICATION_JSON));
new StringEntity(Strings.toString(restoreCommand), ContentType.APPLICATION_JSON));
// Make sure search finds all documents
String countResponse = toStr(client().performRequest("GET", "/restored_" + index + "/_search", singletonMap("size", "0")));
@ -997,7 +998,7 @@ public class FullClusterRestartIT extends ESRestTestCase {
for (int i = 0; i < count; i++) {
logger.debug("Indexing document [{}]", i);
client().performRequest("POST", "/" + index + "/doc/" + i, emptyMap(),
new StringEntity(docSupplier.apply(i).string(), ContentType.APPLICATION_JSON));
new StringEntity(Strings.toString(docSupplier.apply(i)), ContentType.APPLICATION_JSON));
if (rarely()) {
refresh();
}
@ -1022,7 +1023,7 @@ public class FullClusterRestartIT extends ESRestTestCase {
// Only create the first version so we know how many documents are created when the index is first created
Map<String, String> params = singletonMap("op_type", "create");
client().performRequest("PUT", "/info/doc/" + index + "_" + type, params,
new StringEntity(infoDoc.string(), ContentType.APPLICATION_JSON));
new StringEntity(Strings.toString(infoDoc), ContentType.APPLICATION_JSON));
}
private String loadInfoDocument(String type) throws IOException {

View File

@ -25,6 +25,7 @@ import org.elasticsearch.Version;
import org.elasticsearch.client.Response;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.seqno.SeqNoStats;
@ -42,7 +43,6 @@ import java.util.stream.Collectors;
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomAsciiOfLength;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonMap;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.not;
@ -237,15 +237,15 @@ public class IndexingIT extends ESRestTestCase {
logger.info("cluster discovered: {}", nodes.toString());
// Create the repository before taking the snapshot.
String repoConfig = JsonXContent.contentBuilder()
.startObject()
.field("type", "fs")
.startObject("settings")
.field("compress", randomBoolean())
.field("location", System.getProperty("tests.path.repo"))
.endObject()
.endObject()
.string();
String repoConfig = Strings
.toString(JsonXContent.contentBuilder()
.startObject()
.field("type", "fs")
.startObject("settings")
.field("compress", randomBoolean())
.field("location", System.getProperty("tests.path.repo"))
.endObject()
.endObject());
assertOK(
client().performRequest("PUT", "/_snapshot/repo", emptyMap(),

View File

@ -25,6 +25,7 @@ import org.apache.http.util.EntityUtils;
import org.elasticsearch.Version;
import org.elasticsearch.client.Response;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.InputStreamStreamInput;
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
@ -189,7 +190,7 @@ public class QueryBuilderBWCIT extends ESRestTestCase {
}
mappingsAndSettings.endObject();
Response rsp = client().performRequest("PUT", "/" + index, Collections.emptyMap(),
new StringEntity(mappingsAndSettings.string(), ContentType.APPLICATION_JSON));
new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON));
assertEquals(200, rsp.getStatusLine().getStatusCode());
for (int i = 0; i < CANDIDATES.size(); i++) {

View File

@ -33,6 +33,7 @@ import org.elasticsearch.action.termvectors.MultiTermVectorsRequest;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.Response;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.network.NetworkModule;
import org.elasticsearch.common.settings.Settings;
@ -103,12 +104,12 @@ public class ContextAndHeaderTransportIT extends HttpSmokeTestCase {
@Before
public void createIndices() throws Exception {
String mapping = jsonBuilder().startObject().startObject("type")
String mapping = Strings.toString(jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("location").field("type", "geo_shape").endObject()
.startObject("name").field("type", "text").endObject()
.endObject()
.endObject().endObject().string();
.endObject().endObject());
Settings settings = Settings.builder()
.put(indexSettings())

View File

@ -23,6 +23,7 @@ import org.apache.http.HttpEntity;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity;
import org.elasticsearch.client.Response;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.logging.LoggerMessageFormat;
import org.elasticsearch.common.settings.Setting;
@ -210,7 +211,7 @@ public class DeprecationHttpIT extends HttpSmokeTestCase {
builder.endArray().endObject();
return new StringEntity(builder.string(), ContentType.APPLICATION_JSON);
return new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON);
}
}

View File

@ -31,6 +31,7 @@ import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.Build;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterModule;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.DeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -76,7 +77,7 @@ public class WildflyIT extends LuceneTestCase {
builder.endArray();
}
builder.endObject();
body = builder.string();
body = Strings.toString(builder);
}
put.setEntity(new StringEntity(body, ContentType.APPLICATION_JSON));
try (CloseableHttpResponse response = client.execute(put)) {

View File

@ -23,10 +23,10 @@ import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.action.support.nodes.BaseNodesResponse;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent.Params;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
@ -130,7 +130,7 @@ public class NodesInfoResponse extends BaseNodesResponse<NodeInfo> implements To
builder.startObject();
toXContent(builder, EMPTY_PARAMS);
builder.endObject();
return builder.string();
return Strings.toString(builder);
} catch (IOException e) {
return "{ \"error\" : \"" + e.getMessage() + "\"}";
}

View File

@ -22,6 +22,7 @@ package org.elasticsearch.action.admin.cluster.node.stats;
import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.action.support.nodes.BaseNodesResponse;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ToXContentFragment;
@ -72,7 +73,7 @@ public class NodesStatsResponse extends BaseNodesResponse<NodeStats> implements
builder.startObject();
toXContent(builder, EMPTY_PARAMS);
builder.endObject();
return builder.string();
return Strings.toString(builder);
} catch (IOException e) {
return "{ \"error\" : \"" + e.getMessage() + "\"}";
}

View File

@ -22,9 +22,9 @@ package org.elasticsearch.action.admin.cluster.node.usage;
import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.action.support.nodes.BaseNodesResponse;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ToXContent.Params;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
@ -77,10 +77,10 @@ public class NodesUsageResponse extends BaseNodesResponse<NodeUsage> implements
builder.startObject();
toXContent(builder, EMPTY_PARAMS);
builder.endObject();
return builder.string();
return Strings.toString(builder);
} catch (IOException e) {
return "{ \"error\" : \"" + e.getMessage() + "\"}";
}
}
}
}

View File

@ -22,6 +22,7 @@ package org.elasticsearch.action.admin.cluster.repositories.put;
import org.elasticsearch.ElasticsearchGenerationException;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.support.master.AcknowledgedRequest;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings;
@ -161,7 +162,7 @@ public class PutRepositoryRequest extends AcknowledgedRequest<PutRepositoryReque
try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.map(source);
settings(builder.string(), builder.contentType());
settings(Strings.toString(builder), builder.contentType());
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e);
}

View File

@ -23,6 +23,7 @@ import org.elasticsearch.ElasticsearchGenerationException;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.support.master.AcknowledgedRequest;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings;
@ -76,7 +77,7 @@ public class ClusterUpdateSettingsRequest extends AcknowledgedRequest<ClusterUpd
/**
* Sets the value of "flat_settings".
* Used only by the high-level REST client.
*
*
* @param flatSettings
* value of "flat_settings" flag to be set
* @return this request
@ -89,7 +90,7 @@ public class ClusterUpdateSettingsRequest extends AcknowledgedRequest<ClusterUpd
/**
* Return settings in flat format.
* Used only by the high-level REST client.
*
*
* @return <code>true</code> if settings need to be returned in flat format; <code>false</code> otherwise.
*/
public boolean flatSettings() {
@ -136,7 +137,7 @@ public class ClusterUpdateSettingsRequest extends AcknowledgedRequest<ClusterUpd
try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.map(source);
transientSettings(builder.string(), builder.contentType());
transientSettings(Strings.toString(builder), builder.contentType());
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e);
}
@ -175,7 +176,7 @@ public class ClusterUpdateSettingsRequest extends AcknowledgedRequest<ClusterUpd
try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.map(source);
persistentSettings(builder.string(), builder.contentType());
persistentSettings(Strings.toString(builder), builder.contentType());
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e);
}

View File

@ -338,7 +338,7 @@ public class CreateSnapshotRequest extends MasterNodeRequest<CreateSnapshotReque
try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.map(source);
settings(builder.string(), builder.contentType());
settings(Strings.toString(builder), builder.contentType());
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e);
}

View File

@ -373,7 +373,7 @@ public class RestoreSnapshotRequest extends MasterNodeRequest<RestoreSnapshotReq
try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.map(source);
settings(builder.string(), builder.contentType());
settings(Strings.toString(builder), builder.contentType());
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e);
}
@ -485,7 +485,7 @@ public class RestoreSnapshotRequest extends MasterNodeRequest<RestoreSnapshotReq
try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.map(source);
indexSettings(builder.string(), builder.contentType());
indexSettings(Strings.toString(builder), builder.contentType());
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e);
}

View File

@ -23,6 +23,7 @@ import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.action.support.nodes.BaseNodesResponse;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.health.ClusterHealthStatus;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ToXContentFragment;
@ -130,7 +131,7 @@ public class ClusterStatsResponse extends BaseNodesResponse<ClusterStatsNodeResp
builder.startObject();
toXContent(builder, EMPTY_PARAMS);
builder.endObject();
return builder.string();
return Strings.toString(builder);
} catch (IOException e) {
return "{ \"error\" : \"" + e.getMessage() + "\"}";
}

View File

@ -22,6 +22,7 @@ package org.elasticsearch.action.admin.indices.alias;
import org.elasticsearch.ElasticsearchGenerationException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@ -35,8 +36,8 @@ import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.QueryBuilder;
import java.io.IOException;
import java.io.InputStream;
import java.util.Map;
import java.util.Objects;
/**
* Represents an alias, to be associated with an index
@ -100,7 +101,7 @@ public class Alias implements Streamable, ToXContentObject {
try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.map(filter);
this.filter = builder.string();
this.filter = Strings.toString(builder);
return this;
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + filter + "]", e);
@ -119,7 +120,7 @@ public class Alias implements Streamable, ToXContentObject {
XContentBuilder builder = XContentFactory.jsonBuilder();
filterBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.close();
this.filter = builder.string();
this.filter = Strings.toString(builder);
return this;
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to build json for alias request", e);
@ -227,7 +228,9 @@ public class Alias implements Streamable, ToXContentObject {
builder.startObject(name);
if (filter != null) {
builder.rawField(FILTER.getPreferredName(), new BytesArray(filter), XContentType.JSON);
try (InputStream stream = new BytesArray(filter).streamInput()) {
builder.rawField(FILTER.getPreferredName(), stream, XContentType.JSON);
}
}
if (indexRouting != null && indexRouting.equals(searchRouting)) {

View File

@ -44,6 +44,7 @@ import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.QueryBuilder;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
@ -377,7 +378,7 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.map(filter);
this.filter = builder.string();
this.filter = Strings.toString(builder);
return this;
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + filter + "]", e);
@ -393,7 +394,7 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
XContentBuilder builder = XContentFactory.jsonBuilder();
filter.toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.close();
this.filter = builder.string();
this.filter = Strings.toString(builder);
return this;
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to build json for alias request", e);
@ -432,7 +433,9 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
builder.array(ALIASES.getPreferredName(), aliases);
}
if (false == Strings.isEmpty(filter)) {
builder.rawField(FILTER.getPreferredName(), new BytesArray(filter), XContentType.JSON);
try (InputStream stream = new BytesArray(filter).streamInput()) {
builder.rawField(FILTER.getPreferredName(), stream, XContentType.JSON);
}
}
if (false == Strings.isEmpty(routing)) {
builder.field(ROUTING.getPreferredName(), routing);

View File

@ -78,7 +78,7 @@ public class AnalyzeRequest extends SingleShardRequest<AnalyzeRequest> {
try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.map(definition);
this.definition = Settings.builder().loadFromSource(builder.string(), builder.contentType()).build();
this.definition = Settings.builder().loadFromSource(Strings.toString(builder), builder.contentType()).build();
} catch (IOException e) {
throw new IllegalArgumentException("Failed to parse [" + definition + "]", e);
}

View File

@ -31,6 +31,7 @@ import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.master.AcknowledgedRequest;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.MapBuilder;
@ -48,6 +49,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import java.io.IOException;
import java.io.InputStream;
import java.io.UncheckedIOException;
import java.util.HashMap;
import java.util.HashSet;
@ -180,11 +182,7 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
* Allows to set the settings using a json builder.
*/
public CreateIndexRequest settings(XContentBuilder builder) {
try {
settings(builder.string(), builder.contentType());
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate json settings from builder", e);
}
settings(Strings.toString(builder), builder.contentType());
return this;
}
@ -196,7 +194,7 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.map(source);
settings(builder.string(), XContentType.JSON);
settings(Strings.toString(builder), XContentType.JSON);
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e);
}
@ -249,7 +247,7 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
* @param source The mapping source
*/
public CreateIndexRequest mapping(String type, XContentBuilder source) {
return mapping(type, source.bytes(), source.contentType());
return mapping(type, BytesReference.bytes(source), source.contentType());
}
/**
@ -293,7 +291,7 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
try {
XContentBuilder builder = XContentFactory.jsonBuilder();
builder.map(source);
return aliases(builder.bytes());
return aliases(BytesReference.bytes(builder));
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e);
}
@ -303,7 +301,7 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
* Sets the aliases that will be associated with the index when it gets created
*/
public CreateIndexRequest aliases(XContentBuilder source) {
return aliases(source.bytes());
return aliases(BytesReference.bytes(source));
}
/**
@ -350,7 +348,7 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
* Sets the settings and mappings as a single source.
*/
public CreateIndexRequest source(XContentBuilder source) {
return source(source.bytes(), source.contentType());
return source(BytesReference.bytes(source), source.contentType());
}
/**
@ -536,7 +534,9 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
builder.startObject(MAPPINGS.getPreferredName());
for (Map.Entry<String, String> entry : mappings.entrySet()) {
builder.rawField(entry.getKey(), new BytesArray(entry.getValue()), XContentType.JSON);
try (InputStream stream = new BytesArray(entry.getValue()).streamInput()) {
builder.rawField(entry.getKey(), stream, XContentType.JSON);
}
}
builder.endObject();

View File

@ -31,6 +31,7 @@ import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.mapper.Mapper;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
@ -127,7 +128,9 @@ public class GetFieldMappingsResponse extends ActionResponse implements ToXConte
if (params.paramAsBoolean("pretty", false)) {
builder.field("mapping", sourceAsMap());
} else {
builder.rawField("mapping", source, XContentType.JSON);
try (InputStream stream = source.streamInput()) {
builder.rawField("mapping", stream, XContentType.JSON);
}
}
return builder;
}

View File

@ -250,11 +250,7 @@ public class PutMappingRequest extends AcknowledgedRequest<PutMappingRequest> im
* The mapping source definition.
*/
public PutMappingRequest source(XContentBuilder mappingBuilder) {
try {
return source(mappingBuilder.string(), mappingBuilder.contentType());
} catch (IOException e) {
throw new IllegalArgumentException("Failed to build json for mapping request", e);
}
return source(Strings.toString(mappingBuilder), mappingBuilder.contentType());
}
/**
@ -265,7 +261,7 @@ public class PutMappingRequest extends AcknowledgedRequest<PutMappingRequest> im
try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.map(mappingSource);
return source(builder.string(), XContentType.JSON);
return source(Strings.toString(builder), XContentType.JSON);
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + mappingSource + "]", e);
}

View File

@ -24,6 +24,7 @@ import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.IndicesRequest;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.master.AcknowledgedRequest;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings;
@ -153,7 +154,7 @@ public class UpdateSettingsRequest extends AcknowledgedRequest<UpdateSettingsReq
try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.map(source);
settings(builder.string(), builder.contentType());
settings(Strings.toString(builder), builder.contentType());
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e);
}

View File

@ -22,6 +22,7 @@ package org.elasticsearch.action.admin.indices.stats;
import org.elasticsearch.action.support.DefaultShardOperationFailedException;
import org.elasticsearch.action.support.broadcast.BroadcastResponse;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ToXContentFragment;
@ -213,7 +214,7 @@ public class IndicesStatsResponse extends BroadcastResponse implements ToXConten
builder.startObject();
toXContent(builder, EMPTY_PARAMS);
builder.endObject();
return builder.string();
return Strings.toString(builder);
} catch (IOException e) {
return "{ \"error\" : \"" + e.getMessage() + "\"}";
}

View File

@ -28,6 +28,7 @@ import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.master.MasterNodeRequest;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.MapBuilder;
@ -196,7 +197,7 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.map(source);
settings(builder.string(), XContentType.JSON);
settings(Strings.toString(builder), XContentType.JSON);
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e);
}
@ -237,7 +238,7 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
* @param source The mapping source
*/
public PutIndexTemplateRequest mapping(String type, XContentBuilder source) {
return mapping(type, source.bytes(), source.contentType());
return mapping(type, BytesReference.bytes(source), source.contentType());
}
/**
@ -295,7 +296,7 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
*/
public PutIndexTemplateRequest source(XContentBuilder templateBuilder) {
try {
return source(templateBuilder.bytes(), templateBuilder.contentType());
return source(BytesReference.bytes(templateBuilder), templateBuilder.contentType());
} catch (Exception e) {
throw new IllegalArgumentException("Failed to build json for template request", e);
}
@ -412,7 +413,7 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
try {
XContentBuilder builder = XContentFactory.jsonBuilder();
builder.map(source);
return aliases(builder.bytes());
return aliases(BytesReference.bytes(builder));
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e);
}
@ -422,7 +423,7 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
* Sets the aliases that will be associated with the index when it gets created
*/
public PutIndexTemplateRequest aliases(XContentBuilder source) {
return aliases(source.bytes());
return aliases(BytesReference.bytes(source));
}
/**

View File

@ -43,7 +43,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.shard.ShardId;
@ -75,7 +74,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError;
public class IndexRequest extends ReplicatedWriteRequest<IndexRequest> implements DocWriteRequest<IndexRequest>, CompositeIndicesRequest {
/**
* Max length of the source document to include into toString()
* Max length of the source document to include into string()
*
* @see ReplicationRequest#createTask
*/
@ -332,7 +331,7 @@ public class IndexRequest extends ReplicatedWriteRequest<IndexRequest> implement
* Sets the content source to index.
*/
public IndexRequest source(XContentBuilder sourceBuilder) {
return source(sourceBuilder.bytes(), sourceBuilder.contentType());
return source(BytesReference.bytes(sourceBuilder), sourceBuilder.contentType());
}
/**

View File

@ -306,7 +306,7 @@ public class MultiSearchRequest extends ActionRequest implements CompositeIndice
xContentBuilder.field("allow_partial_search_results", request.allowPartialSearchResults());
}
xContentBuilder.endObject();
xContentBuilder.bytes().writeTo(output);
BytesReference.bytes(xContentBuilder).writeTo(output);
}
output.write(xContent.streamSeparator());
try (XContentBuilder xContentBuilder = XContentBuilder.builder(xContent)) {
@ -316,7 +316,7 @@ public class MultiSearchRequest extends ActionRequest implements CompositeIndice
xContentBuilder.startObject();
xContentBuilder.endObject();
}
xContentBuilder.bytes().writeTo(output);
BytesReference.bytes(xContentBuilder).writeTo(output);
}
output.write(xContent.streamSeparator());
}

View File

@ -256,7 +256,7 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
* Sets an artificial document from which term vectors are requested for.
*/
public TermVectorsRequest doc(XContentBuilder documentBuilder) {
return this.doc(documentBuilder.bytes(), true, documentBuilder.contentType());
return this.doc(BytesReference.bytes(documentBuilder), true, documentBuilder.contentType());
}
/**

View File

@ -259,7 +259,8 @@ public class TermVectorsResponse extends ActionResponse implements ToXContentObj
builder.field(FieldStrings.END_OFFSET, currentEndOffset[i]);
}
if (curTerms.hasPayloads() && (currentPayloads[i].length() > 0)) {
builder.field(FieldStrings.PAYLOAD, currentPayloads[i]);
BytesRef bytesRef = currentPayloads[i].toBytesRef();
builder.field(FieldStrings.PAYLOAD, bytesRef.bytes, bytesRef.offset, bytesRef.length);
}
builder.endObject();
}

View File

@ -356,7 +356,7 @@ public class UpdateHelper extends AbstractComponent {
BytesStreamOutput streamOutput = new BytesStreamOutput(initialCapacity);
try (XContentBuilder builder = new XContentBuilder(sourceContentType.xContent(), streamOutput)) {
builder.value(value);
sourceFilteredAsBytes = builder.bytes();
sourceFilteredAsBytes = BytesReference.bytes(builder);
}
} catch (IOException e) {
throw new ElasticsearchException("Error filtering source", e);

View File

@ -24,6 +24,7 @@ import org.elasticsearch.cluster.AbstractDiffable;
import org.elasticsearch.cluster.Diff;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@ -244,7 +245,7 @@ public class AliasMetaData extends AbstractDiffable<AliasMetaData> {
}
try {
XContentBuilder builder = XContentFactory.jsonBuilder().map(filter);
this.filter = new CompressedXContent(builder.bytes());
this.filter = new CompressedXContent(BytesReference.bytes(builder));
return this;
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to build json for alias request", e);
@ -252,11 +253,7 @@ public class AliasMetaData extends AbstractDiffable<AliasMetaData> {
}
public Builder filter(XContentBuilder filterBuilder) {
try {
return filter(filterBuilder.string());
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to build json for alias request", e);
}
return filter(Strings.toString(filterBuilder));
}
public Builder routing(String routing) {

View File

@ -25,6 +25,7 @@ import org.elasticsearch.Version;
import org.elasticsearch.cluster.AbstractDiffable;
import org.elasticsearch.cluster.Diff;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.collect.MapBuilder;
@ -459,7 +460,7 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
String mappingType = currentFieldName;
Map<String, Object> mappingSource =
MapBuilder.<String, Object>newMapBuilder().put(mappingType, parser.mapOrdered()).map();
builder.putMapping(mappingType, XContentFactory.jsonBuilder().map(mappingSource).string());
builder.putMapping(mappingType, Strings.toString(XContentFactory.jsonBuilder().map(mappingSource)));
}
}
} else if ("aliases".equals(currentFieldName)) {
@ -483,7 +484,7 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
Map<String, Object> mapping = parser.mapOrdered();
if (mapping.size() == 1) {
String mappingType = mapping.keySet().iterator().next();
String mappingSource = XContentFactory.jsonBuilder().map(mapping).string();
String mappingSource = Strings.toString(XContentFactory.jsonBuilder().map(mapping));
if (mappingSource == null) {
// crap, no mapping source, warn?

View File

@ -23,6 +23,7 @@ import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.AbstractDiffable;
import org.elasticsearch.cluster.Diff;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@ -99,7 +100,7 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
public MappingMetaData(String type, Map<String, Object> mapping) throws IOException {
this.type = type;
XContentBuilder mappingBuilder = XContentFactory.jsonBuilder().map(mapping);
this.source = new CompressedXContent(mappingBuilder.bytes());
this.source = new CompressedXContent(BytesReference.bytes(mappingBuilder));
Map<String, Object> withoutType = mapping;
if (mapping.size() == 1 && mapping.containsKey(type)) {
withoutType = (Map<String, Object>) mapping.get(type);

View File

@ -1081,7 +1081,7 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData>, To
builder.startObject();
toXContent(metaData, builder, ToXContent.EMPTY_PARAMS);
builder.endObject();
return builder.string();
return Strings.toString(builder);
}
public static void toXContent(MetaData metaData, XContentBuilder builder, ToXContent.Params params) throws IOException {

View File

@ -755,6 +755,14 @@ public class Strings {
return toString(toXContent, false, false);
}
/**
* Returns a string representation of the builder (only applicable for text based xcontent).
* @param xContentBuilder
*/
public static String toString(XContentBuilder xContentBuilder) {
return BytesReference.bytes(xContentBuilder).utf8ToString();
}
/**
* Return a {@link String} that is the json representation of the provided {@link ToXContent}.
* Wraps the output into an anonymous object if needed. Allows to control whether the outputted
@ -771,7 +779,7 @@ public class Strings {
if (toXContent.isFragment()) {
builder.endObject();
}
return builder.string();
return toString(builder);
} catch (IOException e) {
try {
XContentBuilder builder = createBuilder(pretty, human);
@ -779,7 +787,7 @@ public class Strings {
builder.field("error", "error building toString out of XContent: " + e.getMessage());
builder.field("stack_trace", ExceptionsHelper.stackTrace(e));
builder.endObject();
return builder.string();
return toString(builder);
} catch (IOException e2) {
throw new ElasticsearchException("cannot generate error message for deserialization", e);
}
@ -845,5 +853,4 @@ public class Strings {
return sb.toString();
}
}
}

View File

@ -21,8 +21,11 @@ package org.elasticsearch.common.bytes;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefIterator;
import org.elasticsearch.common.io.stream.BytesStream;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.ByteArrayOutputStream;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
@ -38,6 +41,20 @@ public abstract class BytesReference implements Accountable, Comparable<BytesRef
private Integer hash = null; // we cache the hash of this reference since it can be quite costly to re-calculated it
/**
* Convert an {@link XContentBuilder} into a BytesReference. This method closes the builder,
* so no further fields may be added.
*/
public static BytesReference bytes(XContentBuilder xContentBuilder) {
xContentBuilder.close();
OutputStream stream = xContentBuilder.getOutputStream();
if (stream instanceof ByteArrayOutputStream) {
return new BytesArray(((ByteArrayOutputStream) stream).toByteArray());
} else {
return ((BytesStream) stream).bytes();
}
}
/**
* Returns the byte at the specified index. Need to be between 0 and length.
*/

View File

@ -19,6 +19,7 @@
package org.elasticsearch.common.document;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
@ -124,9 +125,13 @@ public class DocumentField implements Streamable, ToXContentFragment, Iterable<O
for (Object value : values) {
// this call doesn't really need to support writing any kind of object.
// Stored fields values are converted using MappedFieldType#valueForDisplay.
// As a result they can either be Strings, Numbers, Booleans, or BytesReference, that's
// As a result they can either be Strings, Numbers, or Booleans, that's
// all.
builder.value(value);
if (value instanceof BytesReference) {
builder.binaryValue(((BytesReference) value).toBytesRef());
} else {
builder.value(value);
}
}
builder.endArray();
return builder;
@ -168,4 +173,4 @@ public class DocumentField implements Streamable, ToXContentFragment, Iterable<O
", values=" + values +
'}';
}
}
}

View File

@ -790,7 +790,7 @@ public class Setting<T> implements ToXContentObject {
builder.startObject();
subSettings.toXContent(builder, EMPTY_PARAMS);
builder.endObject();
return builder.string();
return Strings.toString(builder);
} catch (IOException e) {
throw new RuntimeException(e);
}
@ -1172,7 +1172,7 @@ public class Setting<T> implements ToXContentObject {
builder.value(element);
}
builder.endArray();
return builder.string();
return Strings.toString(builder);
} catch (IOException ex) {
throw new ElasticsearchException(ex);
}

View File

@ -1442,7 +1442,7 @@ public final class Settings implements ToXContentFragment {
builder.startObject();
toXContent(builder, new MapParams(Collections.singletonMap("flat_settings", "true")));
builder.endObject();
return builder.string();
return Strings.toString(builder);
} catch (IOException e) {
throw new UncheckedIOException(e);
}

View File

@ -20,6 +20,7 @@
package org.elasticsearch.common.settings;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Binder;
import org.elasticsearch.common.inject.Module;
import org.elasticsearch.common.logging.Loggers;
@ -117,7 +118,7 @@ public class SettingsModule implements Module {
xContentBuilder.startObject();
indexSettings.toXContent(xContentBuilder, new ToXContent.MapParams(Collections.singletonMap("flat_settings", "true")));
xContentBuilder.endObject();
builder.append(xContentBuilder.string());
builder.append(Strings.toString(xContentBuilder));
}
builder.append("'");
builder.append(System.lineSeparator());

View File

@ -219,7 +219,7 @@ public abstract class AbstractObjectParser<Value, Context>
try (XContentBuilder builder = JsonXContent.contentBuilder()) {
builder.prettyPrint();
builder.copyCurrentStructure(p);
return builder.bytes();
return BytesReference.bytes(builder);
}
};
declareField(consumer, bytesParser, field, ValueType.OBJECT);

View File

@ -20,10 +20,7 @@
package org.elasticsearch.common.xcontent;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.io.stream.BytesStream;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.unit.ByteSizeValue;
@ -173,6 +170,13 @@ public final class XContentBuilder implements Releasable, Flushable {
return generator.contentType();
}
/**
* @return the output stream to which the built object is being written. Note that is dangerous to modify the stream.
*/
public OutputStream getOutputStream() {
return bos;
}
public XContentBuilder prettyPrint() {
generator.usePrettyPrint();
return this;
@ -626,24 +630,6 @@ public final class XContentBuilder implements Releasable, Flushable {
return this;
}
/**
* Writes the binary content of the given {@link BytesReference}.
*
* Use {@link org.elasticsearch.common.xcontent.XContentParser#binaryValue()} to read the value back
*/
public XContentBuilder field(String name, BytesReference value) throws IOException {
return field(name).value(value);
}
/**
* Writes the binary content of the given {@link BytesReference}.
*
* Use {@link org.elasticsearch.common.xcontent.XContentParser#binaryValue()} to read the value back
*/
public XContentBuilder value(BytesReference value) throws IOException {
return (value == null) ? nullValue() : binaryValue(value.toBytesRef());
}
////////////////////////////////////////////////////////////////////////////
// Text
//////////////////////////////////
@ -810,8 +796,6 @@ public final class XContentBuilder implements Releasable, Flushable {
value((Calendar) value);
} else if (value instanceof ReadableInstant) {
value((ReadableInstant) value);
} else if (value instanceof BytesReference) {
value((BytesReference) value);
} else if (value instanceof ToXContent) {
value((ToXContent) value);
} else {
@ -982,28 +966,6 @@ public final class XContentBuilder implements Releasable, Flushable {
return this;
}
/**
* Writes a raw field with the given bytes as the value
* @deprecated use {@link #rawField(String name, BytesReference, XContentType)} to avoid content type auto-detection
*/
@Deprecated
public XContentBuilder rawField(String name, BytesReference value) throws IOException {
try (InputStream stream = value.streamInput()) {
generator.writeRawField(name, stream);
}
return this;
}
/**
* Writes a raw field with the given bytes as the value
*/
public XContentBuilder rawField(String name, BytesReference value, XContentType contentType) throws IOException {
try (InputStream stream = value.streamInput()) {
generator.writeRawField(name, stream, contentType);
}
return this;
}
/**
* Writes a value with the source coming directly from the bytes in the stream
*/
@ -1035,22 +997,6 @@ public final class XContentBuilder implements Releasable, Flushable {
return this.generator;
}
public BytesReference bytes() {
close();
if (bos instanceof ByteArrayOutputStream) {
return new BytesArray(((ByteArrayOutputStream) bos).toByteArray());
} else {
return ((BytesStream) bos).bytes();
}
}
/**
* Returns a string representation of the builder (only applicable for text based xcontent).
*/
public String string() throws IOException {
return bytes().utf8ToString();
}
static void ensureNameNotNull(String name) {
ensureNotNull(name, "Field name cannot be null");
}

View File

@ -20,6 +20,7 @@
package org.elasticsearch.common.xcontent;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.compress.Compressor;
@ -174,7 +175,7 @@ public class XContentHelper {
builder.prettyPrint();
}
builder.copyCurrentStructure(parser);
return builder.string();
return Strings.toString(builder);
}
}
@ -371,7 +372,7 @@ public class XContentHelper {
/**
* Writes a "raw" (bytes) field, handling cases where the bytes are compressed, and tries to optimize writing using
* {@link XContentBuilder#rawField(String, org.elasticsearch.common.bytes.BytesReference)}.
* {@link XContentBuilder#rawField(String, InputStream)}.
* @deprecated use {@link #writeRawField(String, BytesReference, XContentType, XContentBuilder, Params)} to avoid content type
* auto-detection
*/
@ -383,13 +384,15 @@ public class XContentHelper {
builder.rawField(field, compressedStreamInput);
}
} else {
builder.rawField(field, source);
try (InputStream stream = source.streamInput()) {
builder.rawField(field, stream);
}
}
}
/**
* Writes a "raw" (bytes) field, handling cases where the bytes are compressed, and tries to optimize writing using
* {@link XContentBuilder#rawField(String, org.elasticsearch.common.bytes.BytesReference, XContentType)}.
* {@link XContentBuilder#rawField(String, InputStream, XContentType)}.
*/
public static void writeRawField(String field, BytesReference source, XContentType xContentType, XContentBuilder builder,
ToXContent.Params params) throws IOException {
@ -400,7 +403,9 @@ public class XContentHelper {
builder.rawField(field, compressedStreamInput, xContentType);
}
} else {
builder.rawField(field, source, xContentType);
try (InputStream stream = source.streamInput()) {
builder.rawField(field, stream, xContentType);
}
}
}
@ -428,7 +433,7 @@ public class XContentHelper {
if (toXContent.isFragment()) {
builder.endObject();
}
return builder.bytes();
return BytesReference.bytes(builder);
}
}
}

View File

@ -229,7 +229,6 @@ public interface XContentParser extends Closeable {
*
* <ul>
* <li>{@link XContentBuilder#field(String, org.apache.lucene.util.BytesRef)}</li>
* <li>{@link XContentBuilder#field(String, org.elasticsearch.common.bytes.BytesReference)}</li>
* <li>{@link XContentBuilder#field(String, byte[], int, int)}}</li>
* <li>{@link XContentBuilder#field(String, byte[])}}</li>
* </ul>

View File

@ -304,7 +304,7 @@ public class GetResult implements Streamable, Iterable<DocumentField>, ToXConten
//the original document gets slightly modified: whitespaces or pretty printing are not preserved,
//it all depends on the current builder settings
builder.copyCurrentStructure(parser);
source = builder.bytes();
source = BytesReference.bytes(builder);
}
} else if (FIELDS.equals(currentFieldName)) {
while(parser.nextToken() != XContentParser.Token.END_OBJECT) {

View File

@ -227,7 +227,7 @@ public final class ShardGetService extends AbstractIndexShardComponent {
sourceAsMap = typeMapTuple.v2();
sourceAsMap = XContentMapValues.filter(sourceAsMap, fetchSourceContext.includes(), fetchSourceContext.excludes());
try {
source = XContentFactory.contentBuilder(sourceContentType).map(sourceAsMap).bytes();
source = BytesReference.bytes(XContentFactory.contentBuilder(sourceContentType).map(sourceAsMap));
} catch (IOException e) {
throw new ElasticsearchException("Failed to get type [" + type + "] and id [" + id + "] with includes/excludes set", e);
}

View File

@ -31,6 +31,7 @@ import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.logging.Loggers;
@ -256,7 +257,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
Map<String, CompressedXContent> mappingSourcesCompressed = new LinkedHashMap<>(mappings.size());
for (Map.Entry<String, Map<String, Object>> entry : mappings.entrySet()) {
try {
mappingSourcesCompressed.put(entry.getKey(), new CompressedXContent(XContentFactory.jsonBuilder().map(entry.getValue()).string()));
mappingSourcesCompressed.put(entry.getKey(), new CompressedXContent(Strings.toString(XContentFactory.jsonBuilder().map(entry.getValue()))));
} catch (Exception e) {
throw new MapperParsingException("Failed to parse mapping [{}]: {}", e, entry.getKey(), e.getMessage());
}

View File

@ -20,6 +20,7 @@
package org.elasticsearch.index.mapper;
import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -146,7 +147,7 @@ public final class Mapping implements ToXContentFragment {
try {
XContentBuilder builder = XContentFactory.jsonBuilder().startObject();
toXContent(builder, new ToXContent.MapParams(emptyMap()));
return builder.endObject().string();
return Strings.toString(builder.endObject());
} catch (IOException bogus) {
throw new UncheckedIOException(bogus);
}

View File

@ -57,6 +57,7 @@ import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType;
import org.elasticsearch.index.mapper.UidFieldMapper;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
@ -208,7 +209,7 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
}
this.index = index;
this.type = type;
this.doc = doc.bytes();
this.doc = BytesReference.bytes(doc);
this.xContentType = doc.contentType();
}
@ -373,7 +374,7 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
} else if (ID.match(currentFieldName, parser.getDeprecationHandler())) {
item.id = parser.text();
} else if (DOC.match(currentFieldName, parser.getDeprecationHandler())) {
item.doc = jsonBuilder().copyCurrentStructure(parser).bytes();
item.doc = BytesReference.bytes(jsonBuilder().copyCurrentStructure(parser));
item.xContentType = XContentType.JSON;
} else if (FIELDS.match(currentFieldName, parser.getDeprecationHandler())) {
if (token == XContentParser.Token.START_ARRAY) {
@ -424,7 +425,9 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
builder.field(ID.getPreferredName(), this.id);
}
if (this.doc != null) {
builder.rawField(DOC.getPreferredName(), this.doc, xContentType);
try (InputStream stream = this.doc.streamInput()) {
builder.rawField(DOC.getPreferredName(), stream, xContentType);
}
}
if (this.fields != null) {
builder.array(FIELDS.getPreferredName(), this.fields);
@ -450,7 +453,7 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
XContentBuilder builder = XContentFactory.jsonBuilder();
builder.prettyPrint();
toXContent(builder, EMPTY_PARAMS);
return builder.string();
return Strings.toString(builder);
} catch (Exception e) {
return "{ \"error\" : \"" + ExceptionsHelper.detailedMessage(e) + "\"}";
}

View File

@ -104,7 +104,7 @@ public abstract class DecayFunctionBuilder<DFB extends DecayFunctionBuilder<DFB>
}
builder.field(DECAY, decay);
builder.endObject();
this.functionBytes = builder.bytes();
this.functionBytes = BytesReference.bytes(builder);
} catch (IOException e) {
throw new IllegalArgumentException("unable to build inner function object",e);
}
@ -149,7 +149,9 @@ public abstract class DecayFunctionBuilder<DFB extends DecayFunctionBuilder<DFB>
@Override
public void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(getName());
builder.rawField(fieldName, functionBytes);
try (InputStream stream = functionBytes.streamInput()) {
builder.rawField(fieldName, stream);
}
builder.field(DecayFunctionParser.MULTI_VALUE_MODE.getPreferredName(), multiValueMode.name());
builder.endObject();
}

View File

@ -109,7 +109,7 @@ public final class DecayFunctionParser<DFB extends DecayFunctionBuilder<DFB>> im
fieldName = currentFieldName;
XContentBuilder builder = XContentFactory.jsonBuilder();
builder.copyCurrentStructure(parser);
functionBytes = builder.bytes();
functionBytes = BytesReference.bytes(builder);
} else if (MULTI_VALUE_MODE.match(currentFieldName, parser.getDeprecationHandler())) {
multiValueMode = MultiValueMode.fromString(parser.text());
} else {

View File

@ -23,6 +23,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.RecoverySource;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
@ -936,7 +937,7 @@ public class RecoveryState implements ToXContentFragment, Streamable {
builder.startObject();
toXContent(builder, EMPTY_PARAMS);
builder.endObject();
return builder.string();
return Strings.toString(builder);
} catch (IOException e) {
return "{ \"error\" : \"" + e.getMessage() + "\"}";
}

View File

@ -28,7 +28,6 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ContextParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContent.Params;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
@ -50,7 +49,7 @@ public final class PipelineConfiguration extends AbstractDiffable<PipelineConfig
PARSER.declareField((parser, builder, aVoid) -> {
XContentBuilder contentBuilder = XContentBuilder.builder(parser.contentType().xContent());
XContentHelper.copyCurrentStructure(contentBuilder.generator(), parser);
builder.setConfig(contentBuilder.bytes(), contentBuilder.contentType());
builder.setConfig(BytesReference.bytes(contentBuilder), contentBuilder.contentType());
}, new ParseField("config"), ObjectParser.ValueType.OBJECT);
}

View File

@ -54,7 +54,7 @@ public class BytesRestResponse extends RestResponse {
* Creates a new response based on {@link XContentBuilder}.
*/
public BytesRestResponse(RestStatus status, XContentBuilder builder) {
this(status, builder.contentType().mediaType(), builder.bytes());
this(status, builder.contentType().mediaType(), BytesReference.bytes(builder));
}
/**
@ -94,7 +94,7 @@ public class BytesRestResponse extends RestResponse {
public BytesRestResponse(RestChannel channel, RestStatus status, Exception e) throws IOException {
this.status = status;
try (XContentBuilder builder = build(channel, status, e)) {
this.content = builder.bytes();
this.content = BytesReference.bytes(builder);
this.contentType = builder.contentType().mediaType();
}
if (e instanceof ElasticsearchException) {

View File

@ -21,7 +21,9 @@ package org.elasticsearch.script;
import org.elasticsearch.Version;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
@ -159,7 +161,7 @@ public final class Script implements ToXContentObject, Writeable {
if (parser.currentToken() == Token.START_OBJECT) {
//this is really for search templates, that need to be converted to json format
XContentBuilder builder = XContentFactory.jsonBuilder();
idOrCode = builder.copyCurrentStructure(parser).string();
idOrCode = Strings.toString(builder.copyCurrentStructure(parser));
options.put(CONTENT_TYPE_OPTION, XContentType.JSON.mediaType());
} else {
idOrCode = parser.text();
@ -283,7 +285,7 @@ public final class Script implements ToXContentObject, Writeable {
builder.startObject();
settings.toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.endObject();
try (InputStream stream = builder.bytes().streamInput();
try (InputStream stream = BytesReference.bytes(builder).streamInput();
XContentParser parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY,
LoggingDeprecationHandler.INSTANCE, stream)) {
return parse(parser);
@ -639,7 +641,9 @@ public final class Script implements ToXContentObject, Writeable {
if (type == ScriptType.INLINE) {
if (contentType != null && builder.contentType().mediaType().equals(contentType)) {
builder.rawField(SOURCE_PARSE_FIELD.getPreferredName(), new BytesArray(idOrCode));
try (InputStream stream = new BytesArray(idOrCode).streamInput()) {
builder.rawField(SOURCE_PARSE_FIELD.getPreferredName(), stream);
}
} else {
builder.field(SOURCE_PARSE_FIELD.getPreferredName(), idOrCode);
}

View File

@ -26,6 +26,7 @@ import java.util.List;
import java.util.Objects;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ToXContent;
@ -49,11 +50,11 @@ public class ScriptException extends ElasticsearchException {
private final List<String> scriptStack;
private final String script;
private final String lang;
/**
* Create a new ScriptException.
* @param message A short and simple summary of what happened, such as "compile error".
* Must not be {@code null}.
* @param message A short and simple summary of what happened, such as "compile error".
* Must not be {@code null}.
* @param cause The underlying cause of the exception. Must not be {@code null}.
* @param scriptStack An implementation-specific "stacktrace" for the error in the script.
* Must not be {@code null}, but can be empty (though this should be avoided if possible).
@ -85,7 +86,7 @@ public class ScriptException extends ElasticsearchException {
out.writeString(script);
out.writeString(lang);
}
@Override
protected void metadataToXContent(XContentBuilder builder, Params params) throws IOException {
builder.field("script_stack", scriptStack);
@ -100,7 +101,7 @@ public class ScriptException extends ElasticsearchException {
public List<String> getScriptStack() {
return scriptStack;
}
/**
* Returns the identifier for which script.
* @return script's name or source text that identifies the script.
@ -108,7 +109,7 @@ public class ScriptException extends ElasticsearchException {
public String getScript() {
return script;
}
/**
* Returns the language of the script.
* @return the {@code lang} parameter of the scripting engine.
@ -117,7 +118,7 @@ public class ScriptException extends ElasticsearchException {
return lang;
}
/**
/**
* Returns a JSON version of this exception for debugging.
*/
public String toJsonString() {
@ -126,7 +127,7 @@ public class ScriptException extends ElasticsearchException {
json.startObject();
toXContent(json, ToXContent.EMPTY_PARAMS);
json.endObject();
return json.string();
return Strings.toString(json);
} catch (IOException e) {
throw new RuntimeException(e);
}

Some files were not shown because too many files have changed in this diff Show More