Merge pull request #15344 from jpountz/fix/remove_source_backcompat

Remove back compat for the `_source` compression options.
This commit is contained in:
Adrien Grand 2015-12-11 17:01:15 +01:00
commit ed53edd4d0
3 changed files with 5 additions and 203 deletions

View File

@ -24,7 +24,6 @@ import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
@ -35,7 +34,6 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentHelper;
@ -93,10 +91,6 @@ public class SourceFieldMapper extends MetadataFieldMapper {
private boolean enabled = Defaults.ENABLED; private boolean enabled = Defaults.ENABLED;
private long compressThreshold = Defaults.COMPRESS_THRESHOLD;
private Boolean compress = null;
private String format = Defaults.FORMAT; private String format = Defaults.FORMAT;
private String[] includes = null; private String[] includes = null;
@ -111,16 +105,6 @@ public class SourceFieldMapper extends MetadataFieldMapper {
return this; return this;
} }
public Builder compress(boolean compress) {
this.compress = compress;
return this;
}
public Builder compressThreshold(long compressThreshold) {
this.compressThreshold = compressThreshold;
return this;
}
public Builder format(String format) { public Builder format(String format) {
this.format = format; this.format = format;
return this; return this;
@ -138,7 +122,7 @@ public class SourceFieldMapper extends MetadataFieldMapper {
@Override @Override
public SourceFieldMapper build(BuilderContext context) { public SourceFieldMapper build(BuilderContext context) {
return new SourceFieldMapper(enabled, format, compress, compressThreshold, includes, excludes, context.indexSettings()); return new SourceFieldMapper(enabled, format, includes, excludes, context.indexSettings());
} }
} }
@ -154,22 +138,6 @@ public class SourceFieldMapper extends MetadataFieldMapper {
if (fieldName.equals("enabled")) { if (fieldName.equals("enabled")) {
builder.enabled(nodeBooleanValue(fieldNode)); builder.enabled(nodeBooleanValue(fieldNode));
iterator.remove(); iterator.remove();
} else if (fieldName.equals("compress") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
if (fieldNode != null) {
builder.compress(nodeBooleanValue(fieldNode));
}
iterator.remove();
} else if (fieldName.equals("compress_threshold") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
if (fieldNode != null) {
if (fieldNode instanceof Number) {
builder.compressThreshold(((Number) fieldNode).longValue());
builder.compress(true);
} else {
builder.compressThreshold(ByteSizeValue.parseBytesSizeValue(fieldNode.toString(), "compress_threshold").bytes());
builder.compress(true);
}
}
iterator.remove();
} else if ("format".equals(fieldName)) { } else if ("format".equals(fieldName)) {
builder.format(nodeStringValue(fieldNode, null)); builder.format(nodeStringValue(fieldNode, null));
iterator.remove(); iterator.remove();
@ -242,9 +210,6 @@ public class SourceFieldMapper extends MetadataFieldMapper {
/** indicates whether the source will always exist and be complete, for use by features like the update API */ /** indicates whether the source will always exist and be complete, for use by features like the update API */
private final boolean complete; private final boolean complete;
private Boolean compress;
private long compressThreshold;
private final String[] includes; private final String[] includes;
private final String[] excludes; private final String[] excludes;
@ -253,15 +218,13 @@ public class SourceFieldMapper extends MetadataFieldMapper {
private XContentType formatContentType; private XContentType formatContentType;
private SourceFieldMapper(Settings indexSettings) { private SourceFieldMapper(Settings indexSettings) {
this(Defaults.ENABLED, Defaults.FORMAT, null, -1, null, null, indexSettings); this(Defaults.ENABLED, Defaults.FORMAT, null, null, indexSettings);
} }
private SourceFieldMapper(boolean enabled, String format, Boolean compress, long compressThreshold, private SourceFieldMapper(boolean enabled, String format,
String[] includes, String[] excludes, Settings indexSettings) { String[] includes, String[] excludes, Settings indexSettings) {
super(NAME, Defaults.FIELD_TYPE.clone(), Defaults.FIELD_TYPE, indexSettings); // Only stored. super(NAME, Defaults.FIELD_TYPE.clone(), Defaults.FIELD_TYPE, indexSettings); // Only stored.
this.enabled = enabled; this.enabled = enabled;
this.compress = compress;
this.compressThreshold = compressThreshold;
this.includes = includes; this.includes = includes;
this.excludes = excludes; this.excludes = excludes;
this.format = format; this.format = format;
@ -321,35 +284,14 @@ public class SourceFieldMapper extends MetadataFieldMapper {
Tuple<XContentType, Map<String, Object>> mapTuple = XContentHelper.convertToMap(source, true); Tuple<XContentType, Map<String, Object>> mapTuple = XContentHelper.convertToMap(source, true);
Map<String, Object> filteredSource = XContentMapValues.filter(mapTuple.v2(), includes, excludes); Map<String, Object> filteredSource = XContentMapValues.filter(mapTuple.v2(), includes, excludes);
BytesStreamOutput bStream = new BytesStreamOutput(); BytesStreamOutput bStream = new BytesStreamOutput();
StreamOutput streamOutput = bStream;
if (compress != null && compress && (compressThreshold == -1 || source.length() > compressThreshold)) {
streamOutput = CompressorFactory.defaultCompressor().streamOutput(bStream);
}
XContentType contentType = formatContentType; XContentType contentType = formatContentType;
if (contentType == null) { if (contentType == null) {
contentType = mapTuple.v1(); contentType = mapTuple.v1();
} }
XContentBuilder builder = XContentFactory.contentBuilder(contentType, streamOutput).map(filteredSource); XContentBuilder builder = XContentFactory.contentBuilder(contentType, bStream).map(filteredSource);
builder.close(); builder.close();
source = bStream.bytes(); source = bStream.bytes();
} else if (compress != null && compress && !CompressorFactory.isCompressed(source)) {
if (compressThreshold == -1 || source.length() > compressThreshold) {
BytesStreamOutput bStream = new BytesStreamOutput();
XContentType contentType = XContentFactory.xContentType(source);
if (formatContentType != null && formatContentType != contentType) {
XContentBuilder builder = XContentFactory.contentBuilder(formatContentType, CompressorFactory.defaultCompressor().streamOutput(bStream));
builder.copyCurrentStructure(XContentFactory.xContent(contentType).createParser(source));
builder.close();
} else {
StreamOutput streamOutput = CompressorFactory.defaultCompressor().streamOutput(bStream);
source.writeTo(streamOutput);
streamOutput.close();
}
source = bStream.bytes();
// update the data in the context, so it can be compressed and stored compressed outside...
context.source(source);
}
} else if (formatContentType != null) { } else if (formatContentType != null) {
// see if we need to convert the content type // see if we need to convert the content type
Compressor compressor = CompressorFactory.compressor(source); Compressor compressor = CompressorFactory.compressor(source);
@ -403,7 +345,7 @@ public class SourceFieldMapper extends MetadataFieldMapper {
boolean includeDefaults = params.paramAsBoolean("include_defaults", false); boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
// all are defaults, no need to write it at all // all are defaults, no need to write it at all
if (!includeDefaults && enabled == Defaults.ENABLED && compress == null && compressThreshold == -1 && includes == null && excludes == null) { if (!includeDefaults && enabled == Defaults.ENABLED && includes == null && excludes == null) {
return builder; return builder;
} }
builder.startObject(contentType()); builder.startObject(contentType());
@ -413,16 +355,6 @@ public class SourceFieldMapper extends MetadataFieldMapper {
if (includeDefaults || !Objects.equals(format, Defaults.FORMAT)) { if (includeDefaults || !Objects.equals(format, Defaults.FORMAT)) {
builder.field("format", format); builder.field("format", format);
} }
if (compress != null) {
builder.field("compress", compress);
} else if (includeDefaults) {
builder.field("compress", false);
}
if (compressThreshold != -1) {
builder.field("compress_threshold", new ByteSizeValue(compressThreshold).toString());
} else if (includeDefaults) {
builder.field("compress_threshold", -1);
}
if (includes != null) { if (includes != null) {
builder.field("includes", includes); builder.field("includes", includes);
@ -453,13 +385,6 @@ public class SourceFieldMapper extends MetadataFieldMapper {
if (Arrays.equals(excludes(), sourceMergeWith.excludes()) == false) { if (Arrays.equals(excludes(), sourceMergeWith.excludes()) == false) {
mergeResult.addConflict("Cannot update excludes setting for [_source]"); mergeResult.addConflict("Cannot update excludes setting for [_source]");
} }
} else {
if (sourceMergeWith.compress != null) {
this.compress = sourceMergeWith.compress;
}
if (sourceMergeWith.compressThreshold != -1) {
this.compressThreshold = sourceMergeWith.compressThreshold;
}
} }
} }
} }

View File

@ -1,97 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.source;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.compress.CompressorFactory;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.test.ESSingleNodeTestCase;
import static org.hamcrest.Matchers.equalTo;
/**
*
*/
public class CompressSourceMappingTests extends ESSingleNodeTestCase {
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
public void testCompressDisabled() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_source").field("compress", false).endObject()
.endObject().endObject().string();
DocumentMapper documentMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = documentMapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject()
.field("field1", "value1")
.field("field2", "value2")
.endObject().bytes());
BytesRef bytes = doc.rootDoc().getBinaryValue("_source");
assertThat(CompressorFactory.isCompressed(new BytesArray(bytes)), equalTo(false));
}
public void testCompressEnabled() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_source").field("compress", true).endObject()
.endObject().endObject().string();
DocumentMapper documentMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = documentMapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject()
.field("field1", "value1")
.field("field2", "value2")
.endObject().bytes());
BytesRef bytes = doc.rootDoc().getBinaryValue("_source");
assertThat(CompressorFactory.isCompressed(new BytesArray(bytes)), equalTo(true));
}
public void testCompressThreshold() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_source").field("compress_threshold", "200b").endObject()
.endObject().endObject().string();
DocumentMapper documentMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = documentMapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject()
.field("field1", "value1")
.endObject().bytes());
BytesRef bytes = doc.rootDoc().getBinaryValue("_source");
assertThat(CompressorFactory.isCompressed(new BytesArray(bytes)), equalTo(false));
doc = documentMapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject()
.field("field1", "value1")
.field("field2", "value2 xxxxxxxxxxxxxx yyyyyyyyyyyyyyyyyyy zzzzzzzzzzzzzzzzz")
.field("field2", "value2 xxxxxxxxxxxxxx yyyyyyyyyyyyyyyyyyy zzzzzzzzzzzzzzzzz")
.field("field2", "value2 xxxxxxxxxxxxxx yyyyyyyyyyyyyyyyyyy zzzzzzzzzzzzzzzzz")
.field("field2", "value2 xxxxxxxxxxxxxx yyyyyyyyyyyyyyyyyyy zzzzzzzzzzzzzzzzz")
.endObject().bytes());
bytes = doc.rootDoc().getBinaryValue("_source");
assertThat(CompressorFactory.isCompressed(new BytesArray(bytes)), equalTo(true));
}
}

View File

@ -84,32 +84,6 @@ public class DefaultSourceMappingTests extends ESSingleNodeTestCase {
assertThat(XContentFactory.xContentType(doc.source()), equalTo(XContentType.JSON)); assertThat(XContentFactory.xContentType(doc.source()), equalTo(XContentType.JSON));
} }
public void testJsonFormatCompressedBackcompat() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_source").field("format", "json").field("compress", true).endObject()
.endObject().endObject().string();
Settings backcompatSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
DocumentMapperParser parser = createIndex("test", backcompatSettings).mapperService().documentMapperParser();
DocumentMapper documentMapper = parser.parse(mapping);
ParsedDocument doc = documentMapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject()
.field("field", "value")
.endObject().bytes());
assertThat(CompressorFactory.isCompressed(doc.source()), equalTo(true));
byte[] uncompressed = CompressorFactory.uncompressIfNeeded(doc.source()).toBytes();
assertThat(XContentFactory.xContentType(uncompressed), equalTo(XContentType.JSON));
documentMapper = parser.parse(mapping);
doc = documentMapper.parse("test", "type", "1", XContentFactory.smileBuilder().startObject()
.field("field", "value")
.endObject().bytes());
assertThat(CompressorFactory.isCompressed(doc.source()), equalTo(true));
uncompressed = CompressorFactory.uncompressIfNeeded(doc.source()).toBytes();
assertThat(XContentFactory.xContentType(uncompressed), equalTo(XContentType.JSON));
}
public void testIncludes() throws Exception { public void testIncludes() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_source").field("includes", new String[]{"path1*"}).endObject() .startObject("_source").field("includes", new String[]{"path1*"}).endObject()