diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java index a844ab223aa..f9bcb31b406 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java @@ -24,14 +24,13 @@ import org.apache.lucene.document.StoredField; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.compress.Compressor; import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -46,17 +45,13 @@ import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; -import java.io.BufferedInputStream; import java.io.IOException; -import java.io.InputStream; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.Objects; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeStringValue; /** * @@ -70,8 +65,6 @@ public class SourceFieldMapper extends MetadataFieldMapper { public static class Defaults { public static final String NAME = SourceFieldMapper.NAME; public static final boolean ENABLED = true; - public static final long COMPRESS_THRESHOLD = -1; - public static final String FORMAT = null; // default format is to use the one provided public static final MappedFieldType FIELD_TYPE = new SourceFieldType(); @@ -91,8 +84,6 @@ public class SourceFieldMapper extends MetadataFieldMapper { private boolean enabled = Defaults.ENABLED; - private String format = Defaults.FORMAT; - private String[] includes = null; private String[] excludes = null; @@ -105,11 +96,6 @@ public class SourceFieldMapper extends MetadataFieldMapper { return this; } - public Builder format(String format) { - this.format = format; - return this; - } - public Builder includes(String[] includes) { this.includes = includes; return this; @@ -122,7 +108,7 @@ public class SourceFieldMapper extends MetadataFieldMapper { @Override public SourceFieldMapper build(BuilderContext context) { - return new SourceFieldMapper(enabled, format, includes, excludes, context.indexSettings()); + return new SourceFieldMapper(enabled, includes, excludes, context.indexSettings()); } } @@ -138,8 +124,8 @@ public class SourceFieldMapper extends MetadataFieldMapper { if (fieldName.equals("enabled")) { builder.enabled(nodeBooleanValue(fieldNode)); iterator.remove(); - } else if ("format".equals(fieldName)) { - builder.format(nodeStringValue(fieldNode, null)); + } else if ("format".equals(fieldName) && parserContext.indexVersionCreated().before(Version.V_3_0_0)) { + // ignore on old indices, reject on and after 3.0 iterator.remove(); } else if (fieldName.equals("includes")) { List values = (List) fieldNode; @@ -213,22 +199,15 @@ public class SourceFieldMapper extends MetadataFieldMapper { private final String[] includes; private final String[] excludes; - private String format; - - private XContentType formatContentType; - private SourceFieldMapper(Settings indexSettings) { - this(Defaults.ENABLED, Defaults.FORMAT, null, null, indexSettings); + this(Defaults.ENABLED, null, null, indexSettings); } - private SourceFieldMapper(boolean enabled, String format, - String[] includes, String[] excludes, Settings indexSettings) { + private SourceFieldMapper(boolean enabled, String[] includes, String[] excludes, Settings indexSettings) { super(NAME, Defaults.FIELD_TYPE.clone(), Defaults.FIELD_TYPE, indexSettings); // Only stored. this.enabled = enabled; this.includes = includes; this.excludes = excludes; - this.format = format; - this.formatContentType = format == null ? null : XContentType.fromRestContentType(format); this.complete = enabled && includes == null && excludes == null; } @@ -284,50 +263,11 @@ public class SourceFieldMapper extends MetadataFieldMapper { Tuple> mapTuple = XContentHelper.convertToMap(source, true); Map filteredSource = XContentMapValues.filter(mapTuple.v2(), includes, excludes); BytesStreamOutput bStream = new BytesStreamOutput(); - XContentType contentType = formatContentType; - if (contentType == null) { - contentType = mapTuple.v1(); - } + XContentType contentType = mapTuple.v1(); XContentBuilder builder = XContentFactory.contentBuilder(contentType, bStream).map(filteredSource); builder.close(); source = bStream.bytes(); - } else if (formatContentType != null) { - // see if we need to convert the content type - Compressor compressor = CompressorFactory.compressor(source); - if (compressor != null) { - InputStream compressedStreamInput = compressor.streamInput(source.streamInput()); - if (compressedStreamInput.markSupported() == false) { - compressedStreamInput = new BufferedInputStream(compressedStreamInput); - } - XContentType contentType = XContentFactory.xContentType(compressedStreamInput); - if (contentType != formatContentType) { - // we need to reread and store back, compressed.... - BytesStreamOutput bStream = new BytesStreamOutput(); - StreamOutput streamOutput = CompressorFactory.defaultCompressor().streamOutput(bStream); - XContentBuilder builder = XContentFactory.contentBuilder(formatContentType, streamOutput); - builder.copyCurrentStructure(XContentFactory.xContent(contentType).createParser(compressedStreamInput)); - builder.close(); - source = bStream.bytes(); - // update the data in the context, so we store it in the translog in this format - context.source(source); - } else { - compressedStreamInput.close(); - } - } else { - XContentType contentType = XContentFactory.xContentType(source); - if (contentType != formatContentType) { - // we need to reread and store back - // we need to reread and store back, compressed.... - BytesStreamOutput bStream = new BytesStreamOutput(); - XContentBuilder builder = XContentFactory.contentBuilder(formatContentType, bStream); - builder.copyCurrentStructure(XContentFactory.xContent(contentType).createParser(source)); - builder.close(); - source = bStream.bytes(); - // update the data in the context, so we store it in the translog in this format - context.source(source); - } - } } if (!source.hasArray()) { source = source.toBytesArray(); @@ -352,9 +292,6 @@ public class SourceFieldMapper extends MetadataFieldMapper { if (includeDefaults || enabled != Defaults.ENABLED) { builder.field("enabled", enabled); } - if (includeDefaults || !Objects.equals(format, Defaults.FORMAT)) { - builder.field("format", format); - } if (includes != null) { builder.field("includes", includes); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java index bbc1847dc08..364e9f2063f 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.mapper.*; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.VersionUtils; import java.io.IOException; import java.util.ArrayList; @@ -63,25 +64,16 @@ public class DefaultSourceMappingTests extends ESSingleNodeTestCase { assertThat(XContentFactory.xContentType(doc.source()), equalTo(XContentType.SMILE)); } - public void testJsonFormat() throws Exception { + public void testFormatBackCompat() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_source").field("format", "json").endObject() .endObject().endObject().string(); + Settings settings = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_2_2_0)) + .build(); - DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - DocumentMapper documentMapper = parser.parse(mapping); - ParsedDocument doc = documentMapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject() - .field("field", "value") - .endObject().bytes()); - - assertThat(XContentFactory.xContentType(doc.source()), equalTo(XContentType.JSON)); - - documentMapper = parser.parse(mapping); - doc = documentMapper.parse("test", "type", "1", XContentFactory.smileBuilder().startObject() - .field("field", "value") - .endObject().bytes()); - - assertThat(XContentFactory.xContentType(doc.source()), equalTo(XContentType.JSON)); + DocumentMapperParser parser = createIndex("test", settings).mapperService().documentMapperParser(); + parser.parse(mapping); // no exception } public void testIncludes() throws Exception { diff --git a/docs/reference/migration/migrate_3_0.asciidoc b/docs/reference/migration/migrate_3_0.asciidoc index fcee4105160..6588f22a85a 100644 --- a/docs/reference/migration/migrate_3_0.asciidoc +++ b/docs/reference/migration/migrate_3_0.asciidoc @@ -213,6 +213,13 @@ float by default instead of a double. The reasoning is that floats should be more than enough for most cases but would decrease storage requirements significantly. +==== `_source`'s `format` option + +The `_source` mapping does not support the `format` option anymore. This option +will still be accepted for indices created before the upgrade to 3.0 for backward +compatibility, but it will have no effect. Indices created on or after 3.0 will +reject this option. + [[breaking_30_plugins]] === Plugin changes