Simulate the entire toXContent instead of special caseing
Today we try to detect if we need to generate the mapping or not in the all mapper. This is error prone since it misses conditions if not explicitly added. We should rather similate the generation instead. This commit also adds a random test to check if the settings of the all field mapper are correctly applied. Closes #4579 Closes #4581
This commit is contained in:
parent
79f676e45e
commit
beaa9153a6
|
@ -28,6 +28,7 @@ import org.apache.lucene.search.Query;
|
||||||
import org.apache.lucene.search.TermQuery;
|
import org.apache.lucene.search.TermQuery;
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
|
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||||
import org.elasticsearch.common.lucene.Lucene;
|
import org.elasticsearch.common.lucene.Lucene;
|
||||||
import org.elasticsearch.common.lucene.all.AllField;
|
import org.elasticsearch.common.lucene.all.AllField;
|
||||||
import org.elasticsearch.common.lucene.all.AllTermQuery;
|
import org.elasticsearch.common.lucene.all.AllTermQuery;
|
||||||
|
@ -256,16 +257,26 @@ public class AllFieldMapper extends AbstractFieldMapper<Void> implements Interna
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
// if all are defaults, no need to write it at all
|
|
||||||
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
|
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
|
||||||
|
if (!includeDefaults) {
|
||||||
if (!includeDefaults && enabled == Defaults.ENABLED && fieldType.stored() == Defaults.FIELD_TYPE.stored() &&
|
// simulate the generation to make sure we don't add unnecessary content if all is default
|
||||||
fieldType.storeTermVectors() == Defaults.FIELD_TYPE.storeTermVectors() &&
|
// if all are defaults, no need to write it at all - generating is twice is ok though
|
||||||
indexAnalyzer == null && searchAnalyzer == null && customFieldDataSettings == null
|
BytesStreamOutput bytesStreamOutput = new BytesStreamOutput(0);
|
||||||
&& fieldType.omitNorms() == Defaults.FIELD_TYPE.omitNorms()) {
|
XContentBuilder b = new XContentBuilder(builder.contentType().xContent(), bytesStreamOutput);
|
||||||
return builder;
|
long pos = bytesStreamOutput.position();
|
||||||
|
innerToXContent(b, false);
|
||||||
|
b.flush();
|
||||||
|
if (pos == bytesStreamOutput.position()) {
|
||||||
|
return builder;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
builder.startObject(CONTENT_TYPE);
|
builder.startObject(CONTENT_TYPE);
|
||||||
|
innerToXContent(builder, includeDefaults);
|
||||||
|
builder.endObject();
|
||||||
|
return builder;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void innerToXContent(XContentBuilder builder, boolean includeDefaults) throws IOException {
|
||||||
if (includeDefaults || enabled != Defaults.ENABLED) {
|
if (includeDefaults || enabled != Defaults.ENABLED) {
|
||||||
builder.field("enabled", enabled);
|
builder.field("enabled", enabled);
|
||||||
}
|
}
|
||||||
|
@ -276,7 +287,7 @@ public class AllFieldMapper extends AbstractFieldMapper<Void> implements Interna
|
||||||
builder.field("store", fieldType.stored());
|
builder.field("store", fieldType.stored());
|
||||||
}
|
}
|
||||||
if (includeDefaults || fieldType.storeTermVectors() != Defaults.FIELD_TYPE.storeTermVectors()) {
|
if (includeDefaults || fieldType.storeTermVectors() != Defaults.FIELD_TYPE.storeTermVectors()) {
|
||||||
builder.field("store_term_vector", fieldType.storeTermVectors());
|
builder.field("store_term_vectors", fieldType.storeTermVectors());
|
||||||
}
|
}
|
||||||
if (includeDefaults || fieldType.storeTermVectorOffsets() != Defaults.FIELD_TYPE.storeTermVectorOffsets()) {
|
if (includeDefaults || fieldType.storeTermVectorOffsets() != Defaults.FIELD_TYPE.storeTermVectorOffsets()) {
|
||||||
builder.field("store_term_vector_offsets", fieldType.storeTermVectorOffsets());
|
builder.field("store_term_vector_offsets", fieldType.storeTermVectorOffsets());
|
||||||
|
@ -332,11 +343,9 @@ public class AllFieldMapper extends AbstractFieldMapper<Void> implements Interna
|
||||||
} else if (includeDefaults) {
|
} else if (includeDefaults) {
|
||||||
builder.field("fielddata", (Map) fieldDataType.getSettings().getAsMap());
|
builder.field("fielddata", (Map) fieldDataType.getSettings().getAsMap());
|
||||||
}
|
}
|
||||||
|
|
||||||
builder.endObject();
|
|
||||||
return builder;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
|
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
|
||||||
// do nothing here, no merging, but also no exception
|
// do nothing here, no merging, but also no exception
|
||||||
|
|
|
@ -20,12 +20,18 @@
|
||||||
package org.elasticsearch.index.mapper.all;
|
package org.elasticsearch.index.mapper.all;
|
||||||
|
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
|
import org.apache.lucene.search.Query;
|
||||||
import org.apache.lucene.search.TermQuery;
|
import org.apache.lucene.search.TermQuery;
|
||||||
import org.elasticsearch.common.bytes.BytesArray;
|
import org.elasticsearch.common.bytes.BytesArray;
|
||||||
|
import org.elasticsearch.common.collect.Tuple;
|
||||||
|
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||||
import org.elasticsearch.common.lucene.all.AllEntries;
|
import org.elasticsearch.common.lucene.all.AllEntries;
|
||||||
import org.elasticsearch.common.lucene.all.AllField;
|
import org.elasticsearch.common.lucene.all.AllField;
|
||||||
import org.elasticsearch.common.lucene.all.AllTermQuery;
|
import org.elasticsearch.common.lucene.all.AllTermQuery;
|
||||||
import org.elasticsearch.common.lucene.all.AllTokenStream;
|
import org.elasticsearch.common.lucene.all.AllTokenStream;
|
||||||
|
import org.elasticsearch.common.xcontent.ToXContent;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentType;
|
||||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||||
import org.elasticsearch.index.mapper.FieldMapper;
|
import org.elasticsearch.index.mapper.FieldMapper;
|
||||||
import org.elasticsearch.index.mapper.MapperTestUtils;
|
import org.elasticsearch.index.mapper.MapperTestUtils;
|
||||||
|
@ -34,9 +40,14 @@ import org.elasticsearch.test.ElasticsearchTestCase;
|
||||||
import org.hamcrest.Matchers;
|
import org.hamcrest.Matchers;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
import static org.elasticsearch.common.io.Streams.copyToBytesFromClasspath;
|
import static org.elasticsearch.common.io.Streams.copyToBytesFromClasspath;
|
||||||
import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath;
|
import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath;
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||||
|
import static org.hamcrest.Matchers.*;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
|
@ -152,4 +163,121 @@ public class SimpleAllMapperTests extends ElasticsearchTestCase {
|
||||||
assertThat(text, equalTo(allEntries.buildText()));
|
assertThat(text, equalTo(allEntries.buildText()));
|
||||||
assertThat(field.fieldType().omitNorms(), equalTo(false));
|
assertThat(field.fieldType().omitNorms(), equalTo(false));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testRandom() throws Exception {
|
||||||
|
boolean omitNorms = false;
|
||||||
|
boolean stored = false;
|
||||||
|
boolean enabled = true;
|
||||||
|
boolean autoBoost = false;
|
||||||
|
boolean tv_stored = false;
|
||||||
|
boolean tv_payloads = false;
|
||||||
|
boolean tv_offsets = false;
|
||||||
|
boolean tv_positions = false;
|
||||||
|
String similarity = null;
|
||||||
|
boolean fieldData = false;
|
||||||
|
XContentBuilder mappingBuilder = jsonBuilder();
|
||||||
|
mappingBuilder.startObject().startObject("test");
|
||||||
|
List<Tuple<String, Boolean>> booleanOptionList = new ArrayList<Tuple<String, Boolean>>();
|
||||||
|
boolean allDefault = true;
|
||||||
|
if (frequently()) {
|
||||||
|
allDefault = false;
|
||||||
|
mappingBuilder.startObject("_all");
|
||||||
|
if (randomBoolean()) {
|
||||||
|
booleanOptionList.add(new Tuple<String, Boolean>("omit_norms", omitNorms = randomBoolean()));
|
||||||
|
}
|
||||||
|
if (randomBoolean()) {
|
||||||
|
booleanOptionList.add(new Tuple<String, Boolean>("store", stored = randomBoolean()));
|
||||||
|
}
|
||||||
|
if (randomBoolean()) {
|
||||||
|
booleanOptionList.add(new Tuple<String, Boolean>("store_term_vectors", tv_stored = randomBoolean()));
|
||||||
|
}
|
||||||
|
if (randomBoolean()) {
|
||||||
|
booleanOptionList.add(new Tuple<String, Boolean>("enabled", enabled = randomBoolean()));
|
||||||
|
}
|
||||||
|
if (randomBoolean()) {
|
||||||
|
booleanOptionList.add(new Tuple<String, Boolean>("auto_boost", autoBoost = randomBoolean()));
|
||||||
|
}
|
||||||
|
if (randomBoolean()) {
|
||||||
|
booleanOptionList.add(new Tuple<String, Boolean>("store_term_vector_offsets", tv_offsets = randomBoolean()));
|
||||||
|
}
|
||||||
|
if (randomBoolean()) {
|
||||||
|
booleanOptionList.add(new Tuple<String, Boolean>("store_term_vector_positions", tv_positions = randomBoolean()));
|
||||||
|
}
|
||||||
|
if (randomBoolean()) {
|
||||||
|
booleanOptionList.add(new Tuple<String, Boolean>("store_term_vector_payloads", tv_payloads = randomBoolean()));
|
||||||
|
}
|
||||||
|
Collections.shuffle(booleanOptionList, getRandom());
|
||||||
|
for (Tuple<String, Boolean> option : booleanOptionList) {
|
||||||
|
mappingBuilder.field(option.v1(), option.v2().booleanValue());
|
||||||
|
}
|
||||||
|
tv_stored |= tv_positions || tv_payloads || tv_offsets;
|
||||||
|
if (randomBoolean()) {
|
||||||
|
mappingBuilder.field("similarity", similarity = randomBoolean() ? "BM25" : "TF/IDF");
|
||||||
|
}
|
||||||
|
if (randomBoolean()) {
|
||||||
|
fieldData = true;
|
||||||
|
mappingBuilder.startObject("fielddata");
|
||||||
|
mappingBuilder.field("foo", "bar");
|
||||||
|
mappingBuilder.endObject();
|
||||||
|
}
|
||||||
|
mappingBuilder.endObject();
|
||||||
|
}
|
||||||
|
|
||||||
|
String mapping = mappingBuilder.endObject().endObject().bytes().toUtf8();
|
||||||
|
logger.info(mapping);
|
||||||
|
DocumentMapper docMapper = MapperTestUtils.newParser().parse(mapping);
|
||||||
|
String builtMapping = docMapper.mappingSource().string();
|
||||||
|
// reparse it
|
||||||
|
DocumentMapper builtDocMapper = MapperTestUtils.newParser().parse(builtMapping);
|
||||||
|
|
||||||
|
byte[] json = jsonBuilder().startObject().startObject("test")
|
||||||
|
.field("foo", "bar")
|
||||||
|
.field("_id", 1)
|
||||||
|
.field("foobar", "foobar")
|
||||||
|
.endObject().endObject().bytes().array();
|
||||||
|
Document doc = builtDocMapper.parse(new BytesArray(json)).rootDoc();
|
||||||
|
AllField field = (AllField) doc.getField("_all");
|
||||||
|
if (enabled) {
|
||||||
|
assertThat(field.fieldType().omitNorms(), equalTo(omitNorms));
|
||||||
|
assertThat(field.fieldType().stored(), equalTo(stored));
|
||||||
|
assertThat(field.fieldType().storeTermVectorOffsets(), equalTo(tv_offsets));
|
||||||
|
assertThat(field.fieldType().storeTermVectorPayloads(), equalTo(tv_payloads));
|
||||||
|
assertThat(field.fieldType().storeTermVectorPositions(), equalTo(tv_positions));
|
||||||
|
assertThat(field.fieldType().storeTermVectors(), equalTo(tv_stored));
|
||||||
|
AllEntries allEntries = ((AllTokenStream) field.tokenStream(docMapper.mappers().indexAnalyzer())).allEntries();
|
||||||
|
assertThat(allEntries.fields().size(), equalTo(2));
|
||||||
|
assertThat(allEntries.fields().contains("foobar"), equalTo(true));
|
||||||
|
assertThat(allEntries.fields().contains("foo"), equalTo(true));
|
||||||
|
if (!stored) {
|
||||||
|
assertThat(field.stringValue(), nullValue());
|
||||||
|
}
|
||||||
|
String text = stored ? field.stringValue() : "bar foobar";
|
||||||
|
assertThat(text.trim(), equalTo(allEntries.buildText().trim()));
|
||||||
|
} else {
|
||||||
|
assertThat(field, nullValue());
|
||||||
|
}
|
||||||
|
|
||||||
|
Term term = new Term("foo", "bar");
|
||||||
|
Query query = builtDocMapper.allFieldMapper().queryStringTermQuery(term);
|
||||||
|
if (autoBoost) {
|
||||||
|
assertThat(query, equalTo((Query)new AllTermQuery(term)));
|
||||||
|
} else {
|
||||||
|
assertThat(query, equalTo((Query)new TermQuery(term)));
|
||||||
|
}
|
||||||
|
if (similarity == null || similarity.equals("TF/IDF")) {
|
||||||
|
assertThat(builtDocMapper.allFieldMapper().similarity(), nullValue());
|
||||||
|
} else {
|
||||||
|
assertThat(similarity, equalTo(builtDocMapper.allFieldMapper().similarity().name()));
|
||||||
|
}
|
||||||
|
assertThat(builtMapping.contains("fielddata"), is(fieldData));
|
||||||
|
if (allDefault) {
|
||||||
|
BytesStreamOutput bytesStreamOutput = new BytesStreamOutput(0);
|
||||||
|
XContentBuilder b = new XContentBuilder(XContentType.JSON.xContent(), bytesStreamOutput);
|
||||||
|
XContentBuilder xContentBuilder = builtDocMapper.allFieldMapper().toXContent(b, ToXContent.EMPTY_PARAMS);
|
||||||
|
xContentBuilder.flush();
|
||||||
|
assertThat(bytesStreamOutput.size(), equalTo(0));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue