Migrate some field mapper tests to ESTestCase (#61301) (#61346)

This switches a few tests for field mappers from `ESSingleNodeTestCase`
to `ESTestCase` because, in general, we prefer to avoid
`ESSingleNodeTestCase` when we can because it is slow and "big". "Big"
here means that it pulls in an entire node, making it difficult to
reason about what you are testing.
This commit is contained in:
Nik Everett 2020-08-19 15:43:49 -04:00 committed by GitHub
parent 89a7f32100
commit 9789e6d154
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 791 additions and 646 deletions

View File

@ -25,15 +25,12 @@ import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.MapperService.MergeReason;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.InternalSettingsPlugin;
import org.junit.Before;
import java.io.IOException;
@ -43,17 +40,12 @@ import java.util.Collections;
import java.util.List;
import java.util.Set;
import static java.util.Collections.singletonList;
import static org.hamcrest.Matchers.containsString;
public class ScaledFloatFieldMapperTests extends FieldMapperTestCase<ScaledFloatFieldMapper.Builder> {
IndexService indexService;
DocumentMapperParser parser;
public class ScaledFloatFieldMapperTests extends FieldMapperTestCase2<ScaledFloatFieldMapper.Builder> {
@Before
public void setup() {
indexService = createIndex("test");
parser = indexService.mapperService().documentMapperParser();
addModifier("scaling_factor", false, (a, b) -> {
a.scalingFactor(10);
b.scalingFactor(100);
@ -66,8 +58,8 @@ public class ScaledFloatFieldMapperTests extends FieldMapperTestCase<ScaledFloat
}
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return pluginList(InternalSettingsPlugin.class, MapperExtrasPlugin.class);
protected Collection<? extends Plugin> getPlugins() {
return singletonList(new MapperExtrasPlugin());
}
@Override
@ -75,17 +67,17 @@ public class ScaledFloatFieldMapperTests extends FieldMapperTestCase<ScaledFloat
return new ScaledFloatFieldMapper.Builder("scaled-float").scalingFactor(1);
}
@Override
protected void minimalMapping(XContentBuilder b) throws IOException {
b.field("type", "scaled_float").field("scaling_factor", 10.0);
}
public void testDefaults() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "scaled_float")
.field("scaling_factor", 10.0).endObject().endObject()
.endObject().endObject());
XContentBuilder mapping = fieldMapping(b -> b.field("type", "scaled_float").field("scaling_factor", 10.0));
DocumentMapper mapper = createDocumentMapper(mapping);
assertEquals(Strings.toString(mapping), mapper.mappingSource().toString());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
ParsedDocument doc = mapper.parse(new SourceToParse("test", "_doc", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", 123)
@ -105,37 +97,27 @@ public class ScaledFloatFieldMapperTests extends FieldMapperTestCase<ScaledFloat
}
public void testMissingScalingFactor() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "scaled_float").endObject().endObject()
.endObject().endObject());
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> parser.parse("type", new CompressedXContent(mapping)));
assertEquals("Field [field] misses required parameter [scaling_factor]", e.getMessage());
Exception e = expectThrows(
MapperParsingException.class,
() -> createMapperService(fieldMapping(b -> b.field("type", "scaled_float")))
);
assertThat(e.getMessage(), containsString("Field [field] misses required parameter [scaling_factor]"));
}
public void testIllegalScalingFactor() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "scaled_float")
.field("scaling_factor", -1).endObject().endObject()
.endObject().endObject());
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> parser.parse("type", new CompressedXContent(mapping)));
assertEquals("[scaling_factor] must be a positive number, got [-1.0]", e.getMessage());
Exception e = expectThrows(
MapperParsingException.class,
() -> createMapperService(fieldMapping(b -> b.field("type", "scaled_float").field("scaling_factor", -1)))
);
assertThat(e.getMessage(), containsString("[scaling_factor] must be a positive number, got [-1.0]"));
}
public void testNotIndexed() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "scaled_float")
.field("index", false).field("scaling_factor", 10.0).endObject().endObject()
.endObject().endObject());
DocumentMapper mapper = createDocumentMapper(
fieldMapping(b -> b.field("type", "scaled_float").field("index", false).field("scaling_factor", 10.0))
);
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
ParsedDocument doc = mapper.parse(new SourceToParse("test", "_doc", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", 123)
@ -150,16 +132,11 @@ public class ScaledFloatFieldMapperTests extends FieldMapperTestCase<ScaledFloat
}
public void testNoDocValues() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "scaled_float")
.field("doc_values", false).field("scaling_factor", 10.0).endObject().endObject()
.endObject().endObject());
DocumentMapper mapper = createDocumentMapper(
fieldMapping(b -> b.field("type", "scaled_float").field("doc_values", false).field("scaling_factor", 10.0))
);
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
ParsedDocument doc = mapper.parse(new SourceToParse("test", "_doc", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", 123)
@ -174,16 +151,11 @@ public class ScaledFloatFieldMapperTests extends FieldMapperTestCase<ScaledFloat
}
public void testStore() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "scaled_float")
.field("store", true).field("scaling_factor", 10.0).endObject().endObject()
.endObject().endObject());
DocumentMapper mapper = createDocumentMapper(
fieldMapping(b -> b.field("type", "scaled_float").field("store", true).field("scaling_factor", 10.0))
);
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
ParsedDocument doc = mapper.parse(new SourceToParse("test", "_doc", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", 123)
@ -203,22 +175,13 @@ public class ScaledFloatFieldMapperTests extends FieldMapperTestCase<ScaledFloat
}
public void testCoerce() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "scaled_float")
.field("scaling_factor", 10.0).endObject().endObject()
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "123")
.endObject()),
XContentType.JSON));
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
ParsedDocument doc = mapper.parse(new SourceToParse("test", "_doc", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "123")
.endObject()),
XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(2, fields.length);
IndexableField pointField = fields[0];
@ -227,16 +190,10 @@ public class ScaledFloatFieldMapperTests extends FieldMapperTestCase<ScaledFloat
IndexableField dvField = fields[1];
assertEquals(DocValuesType.SORTED_NUMERIC, dvField.fieldType().docValuesType());
mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "scaled_float")
.field("scaling_factor", 10.0).field("coerce", false).endObject().endObject()
.endObject().endObject());
DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper2.mappingSource().toString());
ThrowingRunnable runnable = () -> mapper2.parse(new SourceToParse("test", "type", "1", BytesReference
DocumentMapper mapper2 = createDocumentMapper(
fieldMapping(b -> b.field("type", "scaled_float").field("scaling_factor", 10.0).field("coerce", false))
);
ThrowingRunnable runnable = () -> mapper2.parse(new SourceToParse("test", "_doc", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "123")
@ -256,16 +213,8 @@ public class ScaledFloatFieldMapperTests extends FieldMapperTestCase<ScaledFloat
}
private void doTestIgnoreMalformed(String value, String exceptionMessageContains) throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "scaled_float")
.field("scaling_factor", 10.0).endObject().endObject()
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ThrowingRunnable runnable = () -> mapper.parse(new SourceToParse("test", "type", "1", BytesReference
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
ThrowingRunnable runnable = () -> mapper.parse(new SourceToParse("test", "_doc", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", value)
@ -274,14 +223,10 @@ public class ScaledFloatFieldMapperTests extends FieldMapperTestCase<ScaledFloat
MapperParsingException e = expectThrows(MapperParsingException.class, runnable);
assertThat(e.getCause().getMessage(), containsString(exceptionMessageContains));
mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "scaled_float")
.field("scaling_factor", 10.0).field("ignore_malformed", true).endObject().endObject()
.endObject().endObject());
DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = mapper2.parse(new SourceToParse("test", "type", "1", BytesReference
DocumentMapper mapper2 = createDocumentMapper(
fieldMapping(b -> b.field("type", "scaled_float").field("scaling_factor", 10.0).field("ignore_malformed", true))
);
ParsedDocument doc = mapper2.parse(new SourceToParse("test", "_doc", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", value)
@ -293,20 +238,8 @@ public class ScaledFloatFieldMapperTests extends FieldMapperTestCase<ScaledFloat
}
public void testNullValue() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("type")
.startObject("properties")
.startObject("field")
.field("type", "scaled_float")
.field("scaling_factor", 10.0)
.endObject()
.endObject()
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
ParsedDocument doc = mapper.parse(new SourceToParse("test", "_doc", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.nullField("field")
@ -314,21 +247,10 @@ public class ScaledFloatFieldMapperTests extends FieldMapperTestCase<ScaledFloat
XContentType.JSON));
assertArrayEquals(new IndexableField[0], doc.rootDoc().getFields("field"));
mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("type")
.startObject("properties")
.startObject("field")
.field("type", "scaled_float")
.field("scaling_factor", 10.0)
.field("null_value", 2.5)
.endObject()
.endObject()
.endObject().endObject());
mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "scaled_float")
.field("scaling_factor", 10.0)
.field("null_value", 2.5)));
doc = mapper.parse(new SourceToParse("test", "_doc", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.nullField("field")
@ -345,62 +267,15 @@ public class ScaledFloatFieldMapperTests extends FieldMapperTestCase<ScaledFloat
assertFalse(dvField.fieldType().stored());
}
public void testEmptyName() throws IOException {
// after 5.x
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("")
.field("type", "scaled_float")
.field("scaling_factor", 10.0).endObject().endObject()
.endObject().endObject());
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> parser.parse("type", new CompressedXContent(mapping))
);
assertThat(e.getMessage(), containsString("name cannot be empty string"));
}
/**
* `index_options` was deprecated and is rejected as of 7.0
*/
public void testRejectIndexOptions() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("foo")
.field("type", "scaled_float")
.field("index_options", randomFrom(new String[] { "docs", "freqs", "positions", "offsets" }))
.endObject()
.endObject().endObject().endObject());
MapperParsingException e = expectThrows(MapperParsingException.class, () -> parser.parse("type", new CompressedXContent(mapping)));
assertThat(e.getMessage(), containsString("index_options not allowed in field [foo] of type [scaled_float]"));
}
public void testMeta() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc")
.startObject("properties").startObject("field").field("type", "scaled_float")
.field("meta", Collections.singletonMap("foo", "bar"))
.field("scaling_factor", 10.0)
.endObject().endObject().endObject().endObject());
DocumentMapper mapper = indexService.mapperService().merge("_doc",
new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
assertEquals(mapping, mapper.mappingSource().toString());
String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc")
.startObject("properties").startObject("field").field("type", "scaled_float")
.field("scaling_factor", 10.0)
.endObject().endObject().endObject().endObject());
mapper = indexService.mapperService().merge("_doc",
new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE);
assertEquals(mapping2, mapper.mappingSource().toString());
String mapping3 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc")
.startObject("properties").startObject("field").field("type", "scaled_float")
.field("meta", Collections.singletonMap("baz", "quux"))
.field("scaling_factor", 10.0)
.endObject().endObject().endObject().endObject());
mapper = indexService.mapperService().merge("_doc",
new CompressedXContent(mapping3), MergeReason.MAPPING_UPDATE);
assertEquals(mapping3, mapper.mappingSource().toString());
MapperParsingException e = expectThrows(
MapperParsingException.class,
() -> createMapperService(fieldMapping(b -> b.field("type", "scaled_float").field("index_options", randomIndexOptions())))
);
assertThat(e.getMessage(), containsString("index_options not allowed in field [field] of type [scaled_float]"));
}
public void testParseSourceValue() {

View File

@ -22,31 +22,34 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.MockLowerCaseFilter;
import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.core.LowerCaseFilter;
import org.apache.lucene.analysis.core.WhitespaceTokenizer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.IndexableFieldType;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.analysis.AnalyzerScope;
import org.elasticsearch.index.analysis.CharFilterFactory;
import org.elasticsearch.index.analysis.CustomAnalyzer;
import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.analysis.LowercaseNormalizer;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.PreConfiguredTokenFilter;
import org.elasticsearch.index.analysis.TokenFilterFactory;
import org.elasticsearch.index.analysis.TokenizerFactory;
import org.elasticsearch.index.mapper.MapperService.MergeReason;
import org.elasticsearch.index.termvectors.TermVectorsService;
import org.elasticsearch.indices.analysis.AnalysisModule;
import org.elasticsearch.plugins.AnalysisPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
import org.junit.Before;
import java.io.IOException;
import java.util.Arrays;
@ -62,8 +65,7 @@ import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
public class KeywordFieldMapperTests extends ESSingleNodeTestCase {
public class KeywordFieldMapperTests extends MapperTestCase {
/**
* Creates a copy of the lowercase token filter which we use for testing merge errors.
*/
@ -75,47 +77,75 @@ public class KeywordFieldMapperTests extends ESSingleNodeTestCase {
@Override
public Map<String, AnalysisModule.AnalysisProvider<TokenizerFactory>> getTokenizers() {
return singletonMap("keyword", (indexSettings, environment, name, settings) ->
TokenizerFactory.newFactory(name, () -> new MockTokenizer(MockTokenizer.KEYWORD, false)));
return singletonMap(
"keyword",
(indexSettings, environment, name, settings) -> TokenizerFactory.newFactory(
name,
() -> new MockTokenizer(MockTokenizer.KEYWORD, false)
)
);
}
}
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return pluginList(InternalSettingsPlugin.class, MockAnalysisPlugin.class);
protected Collection<? extends Plugin> getPlugins() {
return singletonList(new MockAnalysisPlugin());
}
private static final Settings mapperSettings = Settings.builder()
.put("index.analysis.normalizer.my_lowercase.type", "custom")
.putList("index.analysis.normalizer.my_lowercase.filter", "lowercase")
.put("index.analysis.normalizer.my_other_lowercase.type", "custom")
.putList("index.analysis.normalizer.my_other_lowercase.filter", "lowercase").build();
@Override
protected IndexAnalyzers createIndexAnalyzers() {
return new IndexAnalyzers(
singletonMap("default", new NamedAnalyzer("default", AnalyzerScope.INDEX, new StandardAnalyzer())),
org.elasticsearch.common.collect.Map.of(
"lowercase", new NamedAnalyzer("lowercase", AnalyzerScope.INDEX, new LowercaseNormalizer()),
"other_lowercase", new NamedAnalyzer("other_lowercase", AnalyzerScope.INDEX, new LowercaseNormalizer())
),
singletonMap(
"lowercase",
new NamedAnalyzer(
"lowercase",
AnalyzerScope.INDEX,
new CustomAnalyzer(
TokenizerFactory.newFactory("lowercase", WhitespaceTokenizer::new),
new CharFilterFactory[0],
new TokenFilterFactory[] { new TokenFilterFactory() {
IndexService indexService;
DocumentMapperParser parser;
@Override
public String name() {
return "lowercase";
}
@Before
public void setup() {
indexService = createIndex("test", mapperSettings);
parser = indexService.mapperService().documentMapperParser();
@Override
public TokenStream create(TokenStream tokenStream) {
return new LowerCaseFilter(tokenStream);
}
} }
)
)
)
);
}
@Override
protected void minimalMapping(XContentBuilder b) throws IOException {
b.field("type", "keyword");
}
public void testDefaults() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "keyword").endObject().endObject()
.endObject().endObject());
XContentBuilder mapping = fieldMapping(this::minimalMapping);
DocumentMapper mapper = createDocumentMapper(mapping);
assertEquals(Strings.toString(mapping), mapper.mappingSource().toString());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()),
XContentType.JSON));
ParsedDocument doc = mapper.parse(
new SourceToParse(
"test",
"_doc",
"1",
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "1234").endObject()),
XContentType.JSON
)
);
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(2, fields.length);
@ -145,74 +175,70 @@ public class KeywordFieldMapperTests extends ESSingleNodeTestCase {
}
public void testIgnoreAbove() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "keyword").field("ignore_above", 5).endObject().endObject()
.endObject().endObject());
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "keyword").field("ignore_above", 5)));
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "elk")
.endObject()),
XContentType.JSON));
ParsedDocument doc = mapper.parse(
new SourceToParse(
"test",
"_doc",
"1",
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "elk").endObject()),
XContentType.JSON
)
);
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(2, fields.length);
doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "elasticsearch")
.endObject()),
XContentType.JSON));
doc = mapper.parse(
new SourceToParse(
"test",
"_doc",
"1",
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "elasticsearch").endObject()),
XContentType.JSON
)
);
fields = doc.rootDoc().getFields("field");
assertEquals(0, fields.length);
}
public void testNullValue() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "keyword").endObject().endObject()
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.nullField("field")
.endObject()),
XContentType.JSON));
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
ParsedDocument doc = mapper.parse(
new SourceToParse(
"test",
"_doc",
"1",
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().nullField("field").endObject()),
XContentType.JSON
)
);
assertArrayEquals(new IndexableField[0], doc.rootDoc().getFields("field"));
mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "keyword").field("null_value", "uri").endObject().endObject()
.endObject().endObject());
mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.endObject()),
XContentType.JSON));
mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "keyword").field("null_value", "uri")));
doc = mapper.parse(
new SourceToParse(
"test",
"_doc",
"1",
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().endObject()),
XContentType.JSON
)
);
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(0, fields.length);
doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.nullField("field")
.endObject()),
XContentType.JSON));
doc = mapper.parse(
new SourceToParse(
"test",
"_doc",
"1",
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().nullField("field").endObject()),
XContentType.JSON
)
);
fields = doc.rootDoc().getFields("field");
assertEquals(2, fields.length);
@ -220,20 +246,16 @@ public class KeywordFieldMapperTests extends ESSingleNodeTestCase {
}
public void testEnableStore() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "keyword").field("store", true).endObject().endObject()
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()),
XContentType.JSON));
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "keyword").field("store", true)));
ParsedDocument doc = mapper.parse(
new SourceToParse(
"test",
"_doc",
"1",
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "1234").endObject()),
XContentType.JSON
)
);
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(2, fields.length);
@ -241,20 +263,16 @@ public class KeywordFieldMapperTests extends ESSingleNodeTestCase {
}
public void testDisableIndex() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "keyword").field("index", false).endObject().endObject()
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()),
XContentType.JSON));
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "keyword").field("index", false)));
ParsedDocument doc = mapper.parse(
new SourceToParse(
"test",
"_doc",
"1",
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "1234").endObject()),
XContentType.JSON
)
);
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(1, fields.length);
@ -263,20 +281,16 @@ public class KeywordFieldMapperTests extends ESSingleNodeTestCase {
}
public void testDisableDocValues() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "keyword").field("doc_values", false).endObject().endObject()
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()),
XContentType.JSON));
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "keyword").field("doc_values", false)));
ParsedDocument doc = mapper.parse(
new SourceToParse(
"test",
"_doc",
"1",
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "1234").endObject()),
XContentType.JSON
)
);
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(1, fields.length);
@ -284,68 +298,51 @@ public class KeywordFieldMapperTests extends ESSingleNodeTestCase {
}
public void testIndexOptions() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "keyword")
.field("index_options", "freqs").endObject().endObject()
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()),
XContentType.JSON));
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "keyword").field("index_options", "freqs")));
ParsedDocument doc = mapper.parse(
new SourceToParse(
"test",
"_doc",
"1",
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "1234").endObject()),
XContentType.JSON
)
);
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(2, fields.length);
assertEquals(IndexOptions.DOCS_AND_FREQS, fields[0].fieldType().indexOptions());
for (String indexOptions : Arrays.asList("positions", "offsets")) {
final String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "keyword")
.field("index_options", indexOptions).endObject().endObject()
.endObject().endObject());
MapperParsingException e = expectThrows(MapperParsingException.class,
() -> parser.parse("type", new CompressedXContent(mapping2)));
assertEquals("Unknown value [" + indexOptions + "] for field [index_options] - accepted values are [docs, freqs]",
e.getMessage());
MapperParsingException e = expectThrows(
MapperParsingException.class,
() -> createMapperService(fieldMapping(b -> b.field("type", "keyword").field("index_options", indexOptions)))
);
assertThat(
e.getMessage(),
containsString("Unknown value [" + indexOptions + "] for field [index_options] - accepted values are [docs, freqs]")
);
}
}
public void testBoost() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "keyword").field("boost", 2f).endObject().endObject()
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
MapperService mapperService = createMapperService(fieldMapping(b -> b.field("type", "keyword").field("boost", 2f)));
assertThat(mapperService.fieldType("field").boost(), equalTo(2f));
}
public void testEnableNorms() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("field")
.field("type", "keyword")
.field("doc_values", false)
.field("norms", true)
.endObject()
.endObject()
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()),
XContentType.JSON));
DocumentMapper mapper = createDocumentMapper(
fieldMapping(b -> b.field("type", "keyword").field("doc_values", false).field("norms", true))
);
ParsedDocument doc = mapper.parse(
new SourceToParse(
"test",
"_doc",
"1",
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "1234").endObject()),
XContentType.JSON
)
);
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(1, fields.length);
@ -355,30 +352,17 @@ public class KeywordFieldMapperTests extends ESSingleNodeTestCase {
assertEquals(0, fieldNamesFields.length);
}
public void testCustomNormalizer() throws IOException {
checkLowercaseNormalizer("my_lowercase");
}
public void testInBuiltNormalizer() throws IOException {
checkLowercaseNormalizer("lowercase");
}
public void checkLowercaseNormalizer(String normalizerName) throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field")
.field("type", "keyword").field("normalizer", normalizerName).endObject().endObject()
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "AbC")
.endObject()),
XContentType.JSON));
public void testNormalizer() throws IOException {
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "keyword").field("normalizer", "lowercase")));
ParsedDocument doc = mapper.parse(
new SourceToParse(
"test",
"_doc",
"1",
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "AbC").endObject()),
XContentType.JSON
)
);
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(2, fields.length);
@ -402,209 +386,127 @@ public class KeywordFieldMapperTests extends ESSingleNodeTestCase {
}
public void testParsesKeywordNestedEmptyObjectStrict() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder()
.startObject()
.startObject("type")
.startObject("properties")
.startObject("field")
.field("type", "keyword")
.endObject()
.endObject()
.endObject()
.endObject());
DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping));
DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(this::minimalMapping));
BytesReference source = BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.startObject("field")
.endObject()
.endObject());
MapperParsingException ex = expectThrows(MapperParsingException.class,
() -> defaultMapper.parse(new SourceToParse("test", "type", "1", source, XContentType.JSON)));
assertEquals("failed to parse field [field] of type [keyword] in document with id '1'. " +
"Preview of field's value: '{}'", ex.getMessage());
BytesReference source = BytesReference.bytes(
XContentFactory.jsonBuilder().startObject().startObject("field").endObject().endObject()
);
MapperParsingException ex = expectThrows(
MapperParsingException.class,
() -> defaultMapper.parse(new SourceToParse("test", "_doc", "1", source, XContentType.JSON))
);
assertEquals(
"failed to parse field [field] of type [keyword] in document with id '1'. " + "Preview of field's value: '{}'",
ex.getMessage()
);
}
public void testParsesKeywordNestedListStrict() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder()
.startObject()
.startObject("type")
.startObject("properties")
.startObject("field")
.field("type", "keyword")
.endObject()
.endObject()
.endObject()
.endObject());
DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping));
BytesReference source = BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(this::minimalMapping));
BytesReference source = BytesReference.bytes(
XContentFactory.jsonBuilder()
.startObject()
.startArray("field")
.startObject()
.startArray("array_name")
.value("inner_field_first")
.value("inner_field_second")
.endArray()
.endObject()
.startObject()
.startArray("array_name")
.value("inner_field_first")
.value("inner_field_second")
.endArray()
.endObject()
.endArray()
.endObject());
MapperParsingException ex = expectThrows(MapperParsingException.class,
() -> defaultMapper.parse(new SourceToParse("test", "type", "1", source, XContentType.JSON)));
() -> defaultMapper.parse(new SourceToParse("test", "_doc", "1", source, XContentType.JSON)));
assertEquals("failed to parse field [field] of type [keyword] in document with id '1'. " +
"Preview of field's value: '{array_name=[inner_field_first, inner_field_second]}'", ex.getMessage());
}
public void testParsesKeywordNullStrict() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder()
.startObject()
.startObject("type")
.startObject("properties")
.startObject("field")
.field("type", "keyword")
.endObject()
.endObject()
.endObject()
.endObject());
DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping));
BytesReference source = BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.startObject("field")
.nullField("field_name")
.endObject()
.endObject());
MapperParsingException ex = expectThrows(MapperParsingException.class,
() -> defaultMapper.parse(new SourceToParse("test", "type", "1", source, XContentType.JSON)));
assertEquals("failed to parse field [field] of type [keyword] in document with id '1'. " +
"Preview of field's value: '{field_name=null}'", ex.getMessage());
DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(this::minimalMapping));
BytesReference source = BytesReference.bytes(
XContentFactory.jsonBuilder().startObject().startObject("field").nullField("field_name").endObject().endObject()
);
MapperParsingException ex = expectThrows(
MapperParsingException.class,
() -> defaultMapper.parse(new SourceToParse("test", "_doc", "1", source, XContentType.JSON))
);
assertEquals(
"failed to parse field [field] of type [keyword] in document with id '1'. " + "Preview of field's value: '{field_name=null}'",
ex.getMessage()
);
}
public void testUpdateNormalizer() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field")
.field("type", "keyword").field("normalizer", "my_lowercase").endObject().endObject()
.endObject().endObject());
indexService.mapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field")
.field("type", "keyword").field("normalizer", "my_other_lowercase").endObject().endObject()
.endObject().endObject());
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> indexService.mapperService().merge("type",
new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE));
assertEquals(
"Mapper for [field] conflicts with existing mapper:\n" +
"\tCannot update parameter [normalizer] from [my_lowercase] to [my_other_lowercase]",
e.getMessage());
}
public void testEmptyName() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("type")
.startObject("properties")
.startObject("")
.field("type", "keyword")
.endObject()
.endObject()
.endObject().endObject());
// Empty name not allowed in index created after 5.0
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> parser.parse("type", new CompressedXContent(mapping))
MapperService mapperService = createMapperService(fieldMapping(b -> b.field("type", "keyword").field("normalizer", "lowercase")));
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> merge(mapperService, fieldMapping(b -> b.field("type", "keyword").field("normalizer", "other_lowercase")))
);
assertEquals(
"Mapper for [field] conflicts with existing mapper:\n"
+ "\tCannot update parameter [normalizer] from [lowercase] to [other_lowercase]",
e.getMessage()
);
assertThat(e.getMessage(), containsString("name cannot be empty string"));
}
public void testSplitQueriesOnWhitespace() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("type")
.startObject("properties")
.startObject("field")
.field("type", "keyword")
.endObject()
.startObject("field_with_normalizer")
.field("type", "keyword")
.field("normalizer", "my_lowercase")
.field("split_queries_on_whitespace", true)
.endObject()
.endObject()
.endObject().endObject());
indexService.mapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
MapperService mapperService = createMapperService(mapping(b -> {
b.startObject("field").field("type", "keyword").endObject();
b.startObject("field_with_normalizer");
{
b.field("type", "keyword");
b.field("normalizer", "lowercase");
b.field("split_queries_on_whitespace", true);
}
b.endObject();
}));
MappedFieldType fieldType = indexService.mapperService().fieldType("field");
MappedFieldType fieldType = mapperService.fieldType("field");
assertThat(fieldType, instanceOf(KeywordFieldMapper.KeywordFieldType.class));
KeywordFieldMapper.KeywordFieldType ft = (KeywordFieldMapper.KeywordFieldType) fieldType;
Analyzer a = ft.getTextSearchInfo().getSearchAnalyzer();
assertTokenStreamContents(a.tokenStream("", "Hello World"), new String[] {"Hello World"});
assertTokenStreamContents(a.tokenStream("", "Hello World"), new String[] { "Hello World" });
fieldType = indexService.mapperService().fieldType("field_with_normalizer");
fieldType = mapperService.fieldType("field_with_normalizer");
assertThat(fieldType, instanceOf(KeywordFieldMapper.KeywordFieldType.class));
ft = (KeywordFieldMapper.KeywordFieldType) fieldType;
assertThat(ft.getTextSearchInfo().getSearchAnalyzer().name(), equalTo("my_lowercase"));
assertTokenStreamContents(ft.getTextSearchInfo().getSearchAnalyzer().analyzer().tokenStream("", "Hello World"),
new String[] {"hello", "world"});
assertThat(ft.getTextSearchInfo().getSearchAnalyzer().name(), equalTo("lowercase"));
assertTokenStreamContents(
ft.getTextSearchInfo().getSearchAnalyzer().analyzer().tokenStream("", "Hello World"),
new String[] { "hello", "world" }
);
mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("type")
.startObject("properties")
.startObject("field")
.field("type", "keyword")
.field("split_queries_on_whitespace", true)
.endObject()
.startObject("field_with_normalizer")
.field("type", "keyword")
.field("normalizer", "my_lowercase")
.field("split_queries_on_whitespace", false)
.endObject()
.endObject()
.endObject().endObject());
indexService.mapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
mapperService = createMapperService(mapping(b -> {
b.startObject("field").field("type", "keyword").field("split_queries_on_whitespace", true).endObject();
b.startObject("field_with_normalizer");
{
b.field("type", "keyword");
b.field("normalizer", "lowercase");
b.field("split_queries_on_whitespace", false);
}
b.endObject();
}));
fieldType = indexService.mapperService().fieldType("field");
fieldType = mapperService.fieldType("field");
assertThat(fieldType, instanceOf(KeywordFieldMapper.KeywordFieldType.class));
ft = (KeywordFieldMapper.KeywordFieldType) fieldType;
assertTokenStreamContents(ft.getTextSearchInfo().getSearchAnalyzer().analyzer().tokenStream("", "Hello World"),
new String[] {"Hello", "World"});
assertTokenStreamContents(
ft.getTextSearchInfo().getSearchAnalyzer().analyzer().tokenStream("", "Hello World"),
new String[] { "Hello", "World" }
);
fieldType = indexService.mapperService().fieldType("field_with_normalizer");
fieldType = mapperService.fieldType("field_with_normalizer");
assertThat(fieldType, instanceOf(KeywordFieldMapper.KeywordFieldType.class));
ft = (KeywordFieldMapper.KeywordFieldType) fieldType;
assertThat(ft.getTextSearchInfo().getSearchAnalyzer().name(), equalTo("my_lowercase"));
assertTokenStreamContents(ft.getTextSearchInfo().getSearchAnalyzer().analyzer().tokenStream("", "Hello World"),
new String[] {"hello world"});
}
public void testMeta() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc")
.startObject("properties").startObject("field").field("type", "keyword")
.field("meta", Collections.singletonMap("foo", "bar"))
.endObject().endObject().endObject().endObject());
DocumentMapper mapper = indexService.mapperService().merge("_doc",
new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
assertEquals(mapping, mapper.mappingSource().toString());
String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc")
.startObject("properties").startObject("field").field("type", "keyword")
.endObject().endObject().endObject().endObject());
mapper = indexService.mapperService().merge("_doc",
new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE);
assertEquals(mapping2, mapper.mappingSource().toString());
String mapping3 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc")
.startObject("properties").startObject("field").field("type", "keyword")
.field("meta", Collections.singletonMap("baz", "quux"))
.endObject().endObject().endObject().endObject());
mapper = indexService.mapperService().merge("_doc",
new CompressedXContent(mapping3), MergeReason.MAPPING_UPDATE);
assertEquals(mapping3, mapper.mappingSource().toString());
assertThat(ft.getTextSearchInfo().getSearchAnalyzer().name(), equalTo("lowercase"));
assertTokenStreamContents(
ft.getTextSearchInfo().getSearchAnalyzer().analyzer().tokenStream("", "Hello World"),
new String[] { "hello world" }
);
}
public void testParseSourceValue() {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
Mapper.BuilderContext context = new Mapper.BuilderContext(getIndexSettings(), new ContentPath());
KeywordFieldMapper mapper = new KeywordFieldMapper.Builder("field").build(context);
assertEquals("value", mapper.parseSourceValue("value", null));
@ -614,23 +516,18 @@ public class KeywordFieldMapperTests extends ESSingleNodeTestCase {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> mapper.parseSourceValue(true, "format"));
assertEquals("Field [field] of type [keyword] doesn't support formats.", e.getMessage());
KeywordFieldMapper ignoreAboveMapper = new KeywordFieldMapper.Builder("field")
.ignoreAbove(4)
.build(context);
KeywordFieldMapper ignoreAboveMapper = new KeywordFieldMapper.Builder("field").ignoreAbove(4).build(context);
assertNull(ignoreAboveMapper.parseSourceValue("value", null));
assertEquals("42", ignoreAboveMapper.parseSourceValue(42L, null));
assertEquals("true", ignoreAboveMapper.parseSourceValue(true, null));
KeywordFieldMapper normalizerMapper = new KeywordFieldMapper.Builder("field", indexService.getIndexAnalyzers())
.normalizer("lowercase")
KeywordFieldMapper normalizerMapper = new KeywordFieldMapper.Builder("field", createIndexAnalyzers()).normalizer("lowercase")
.build(context);
assertEquals("value", normalizerMapper.parseSourceValue("VALUE", null));
assertEquals("42", normalizerMapper.parseSourceValue(42L, null));
assertEquals("value", normalizerMapper.parseSourceValue("value", null));
KeywordFieldMapper nullValueMapper = new KeywordFieldMapper.Builder("field")
.nullValue("NULL")
.build(context);
KeywordFieldMapper nullValueMapper = new KeywordFieldMapper.Builder("field").nullValue("NULL").build(context);
SourceLookup sourceLookup = new SourceLookup();
sourceLookup.setSource(Collections.singletonMap("field", null));
assertEquals(org.elasticsearch.common.collect.List.of("NULL"), nullValueMapper.lookupValues(sourceLookup, null));

View File

@ -45,6 +45,12 @@ import java.util.function.BiConsumer;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
/**
* Base class for testing {@link FieldMapper}s.
* @param <T> builder for the mapper to test
* @deprecated prefer {@link FieldMapperTestCase2}
*/
@Deprecated
public abstract class FieldMapperTestCase<T extends FieldMapper.Builder<?>> extends ESSingleNodeTestCase {
protected final Settings SETTINGS = Settings.builder()

View File

@ -0,0 +1,229 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.analysis.core.KeywordAnalyzer;
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.analysis.AnalyzerScope;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.function.BiConsumer;
import static java.util.Collections.singletonMap;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
/**
* Base class for testing {@link FieldMapper}s.
* @param <T> builder for the mapper to test
*/
public abstract class FieldMapperTestCase2<T extends FieldMapper.Builder<?>> extends MapperTestCase {
private final class Modifier {
final String property;
final boolean updateable;
final BiConsumer<T, T> modifier;
Modifier(String property, boolean updateable, BiConsumer<T, T> modifier) {
this.property = property;
this.updateable = updateable;
this.modifier = modifier;
}
void apply(T first, T second) {
modifier.accept(first, second);
}
}
private Modifier booleanModifier(String name, boolean updateable, BiConsumer<T, Boolean> method) {
return new Modifier(name, updateable, (a, b) -> {
method.accept(a, true);
method.accept(b, false);
});
}
protected Set<String> unsupportedProperties() {
return Collections.emptySet();
}
private final List<Modifier> modifiers = new ArrayList<>(Arrays.asList(new Modifier("analyzer", false, (a, b) -> {
a.indexAnalyzer(new NamedAnalyzer("standard", AnalyzerScope.INDEX, new StandardAnalyzer()));
a.indexAnalyzer(new NamedAnalyzer("keyword", AnalyzerScope.INDEX, new KeywordAnalyzer()));
}), new Modifier("boost", true, (a, b) -> {
a.boost(1.1f);
b.boost(1.2f);
}), new Modifier("doc_values", false, (a, b) -> {
a.docValues(true);
b.docValues(false);
}),
booleanModifier("eager_global_ordinals", true, (a, t) -> a.setEagerGlobalOrdinals(t)),
booleanModifier("index", false, (a, t) -> a.index(t)),
booleanModifier("norms", false, FieldMapper.Builder::omitNorms),
new Modifier("search_analyzer", true, (a, b) -> {
a.searchAnalyzer(new NamedAnalyzer("standard", AnalyzerScope.INDEX, new StandardAnalyzer()));
a.searchAnalyzer(new NamedAnalyzer("keyword", AnalyzerScope.INDEX, new KeywordAnalyzer()));
}),
new Modifier("search_quote_analyzer", true, (a, b) -> {
a.searchQuoteAnalyzer(new NamedAnalyzer("standard", AnalyzerScope.INDEX, new StandardAnalyzer()));
a.searchQuoteAnalyzer(new NamedAnalyzer("whitespace", AnalyzerScope.INDEX, new WhitespaceAnalyzer()));
}),
new Modifier("store", false, (a, b) -> {
a.store(true);
b.store(false);
}),
new Modifier("term_vector", false, (a, b) -> {
a.storeTermVectors(true);
b.storeTermVectors(false);
}),
new Modifier("term_vector_positions", false, (a, b) -> {
a.storeTermVectors(true);
b.storeTermVectors(true);
a.storeTermVectorPositions(true);
b.storeTermVectorPositions(false);
}),
new Modifier("term_vector_payloads", false, (a, b) -> {
a.storeTermVectors(true);
b.storeTermVectors(true);
a.storeTermVectorPositions(true);
b.storeTermVectorPositions(true);
a.storeTermVectorPayloads(true);
b.storeTermVectorPayloads(false);
}),
new Modifier("term_vector_offsets", false, (a, b) -> {
a.storeTermVectors(true);
b.storeTermVectors(true);
a.storeTermVectorPositions(true);
b.storeTermVectorPositions(true);
a.storeTermVectorOffsets(true);
b.storeTermVectorOffsets(false);
})
));
/**
* Add type-specific modifiers for consistency checking.
*
* This should be called in a {@code @Before} method
*/
protected void addModifier(String property, boolean updateable, BiConsumer<T, T> method) {
modifiers.add(new Modifier(property, updateable, method));
}
/**
* Add type-specific modifiers for consistency checking.
*
* This should be called in a {@code @Before} method
*/
protected void addBooleanModifier(String property, boolean updateable, BiConsumer<T, Boolean> method) {
modifiers.add(new Modifier(property, updateable, (a, b) -> {
method.accept(a, true);
method.accept(b, false);
}));
}
protected abstract T newBuilder();
public void testMergeConflicts() {
Mapper.BuilderContext context = new Mapper.BuilderContext(SETTINGS, new ContentPath(1));
T builder1 = newBuilder();
T builder2 = newBuilder();
{
FieldMapper mapper = (FieldMapper) builder1.build(context);
FieldMapper toMerge = (FieldMapper) builder2.build(context);
mapper.merge(toMerge); // identical mappers should merge with no issue
}
{
FieldMapper mapper = (FieldMapper) newBuilder().build(context);
FieldMapper toMerge = new MockFieldMapper("bogus") {
@Override
protected String contentType() {
return "bogustype";
}
};
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> mapper.merge(toMerge));
assertThat(e.getMessage(), containsString("cannot be changed from type"));
assertThat(e.getMessage(), containsString("bogustype"));
}
for (Modifier modifier : modifiers) {
if (unsupportedProperties().contains(modifier.property)) {
continue;
}
builder1 = newBuilder();
builder2 = newBuilder();
modifier.apply(builder1, builder2);
FieldMapper mapper = (FieldMapper) builder1.build(context);
FieldMapper toMerge = (FieldMapper) builder2.build(context);
if (modifier.updateable) {
mapper.merge(toMerge);
} else {
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
"Expected an error when merging property difference " + modifier.property,
() -> mapper.merge(toMerge)
);
assertThat(e.getMessage(), containsString(modifier.property));
}
}
}
public final void testSerialization() throws IOException {
for (Modifier modifier : modifiers) {
if (unsupportedProperties().contains(modifier.property)) {
continue;
}
T builder1 = newBuilder();
T builder2 = newBuilder();
modifier.apply(builder1, builder2);
assertSerializes(builder1);
assertSerializes(builder2);
}
}
protected void assertSerializes(T builder) throws IOException {
Mapper.BuilderContext context = new Mapper.BuilderContext(getIndexSettings(), new ContentPath(1));
XContentBuilder mappings = mappingsToJson(builder.build(context), false);
XContentBuilder mappingsWithDefault = mappingsToJson(builder.build(context), true);
MapperService mapperService = createMapperService(mappings);
Mapper rebuilt = mapperService.documentMapper().mappers().getMapper(builder.name);
XContentBuilder reparsed = mappingsToJson(rebuilt, false);
XContentBuilder reparsedWithDefault = mappingsToJson(rebuilt, true);
assertThat(Strings.toString(reparsed), equalTo(Strings.toString(mappings)));
assertThat(Strings.toString(reparsedWithDefault), equalTo(Strings.toString(mappingsWithDefault)));
}
private XContentBuilder mappingsToJson(ToXContent builder, boolean includeDefaults) throws IOException {
ToXContent.Params params = includeDefaults
? new ToXContent.MapParams(singletonMap("include_defaults", "true"))
: ToXContent.EMPTY_PARAMS;
return mapping(b -> builder.toXContent(b, params));
}
}

View File

@ -0,0 +1,179 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.CheckedConsumer;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AnalyzerScope;
import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.MapperService.MergeReason;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.indices.IndicesModule;
import org.elasticsearch.indices.mapper.MapperRegistry;
import org.elasticsearch.plugins.MapperPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import static java.util.Collections.emptyList;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonMap;
import static java.util.stream.Collectors.toList;
import static org.hamcrest.Matchers.containsString;
/**
* Base class for testing {@link Mapper}s.
*/
public abstract class MapperTestCase extends ESTestCase {
protected static final Settings SETTINGS = Settings.builder().put("index.version.created", Version.CURRENT).build();
protected Collection<? extends Plugin> getPlugins() {
return emptyList();
}
protected Settings getIndexSettings() {
return Settings.EMPTY;
}
protected IndexAnalyzers createIndexAnalyzers() {
return new IndexAnalyzers(
singletonMap("default", new NamedAnalyzer("default", AnalyzerScope.INDEX, new StandardAnalyzer())),
emptyMap(),
emptyMap()
);
}
protected final String randomIndexOptions() {
return randomFrom(new String[] { "docs", "freqs", "positions", "offsets" });
}
protected final DocumentMapper createDocumentMapper(XContentBuilder mappings) throws IOException {
return createMapperService(mappings).documentMapper();
}
protected final MapperService createMapperService(XContentBuilder mappings) throws IOException {
return createMapperService(getIndexSettings(), mappings);
}
/**
* Create a {@link MapperService} like we would for an index.
*/
protected final MapperService createMapperService(Settings settings, XContentBuilder mapping) throws IOException {
IndexMetadata meta = IndexMetadata.builder("index")
.settings(Settings.builder().put("index.version.created", Version.CURRENT))
.numberOfReplicas(0)
.numberOfShards(1)
.build();
IndexSettings indexSettings = new IndexSettings(meta, Settings.EMPTY);
MapperRegistry mapperRegistry = new IndicesModule(
getPlugins().stream().filter(p -> p instanceof MapperPlugin).map(p -> (MapperPlugin) p).collect(toList())
).getMapperRegistry();
ScriptService scriptService = new ScriptService(Settings.EMPTY, emptyMap(), emptyMap());
SimilarityService similarityService = new SimilarityService(indexSettings, scriptService, emptyMap());
MapperService mapperService = new MapperService(
indexSettings,
createIndexAnalyzers(),
xContentRegistry(),
similarityService,
mapperRegistry,
() -> { throw new UnsupportedOperationException(); },
() -> true
);
merge(mapperService, mapping);
return mapperService;
}
/**
* Merge a new mapping into the one in the provided {@link MapperService}.
*/
protected final void merge(MapperService mapperService, XContentBuilder mapping) throws IOException {
mapperService.merge("_doc", new CompressedXContent(BytesReference.bytes(mapping)), MergeReason.MAPPING_UPDATE);
}
protected final XContentBuilder mapping(CheckedConsumer<XContentBuilder, IOException> buildFields) throws IOException {
XContentBuilder builder = XContentFactory.jsonBuilder().startObject().startObject("_doc").startObject("properties");
buildFields.accept(builder);
return builder.endObject().endObject().endObject();
}
protected final XContentBuilder fieldMapping(CheckedConsumer<XContentBuilder, IOException> buildField) throws IOException {
return mapping(b -> {
b.startObject("field");
buildField.accept(b);
b.endObject();
});
}
protected abstract void minimalMapping(XContentBuilder b) throws IOException;
public final void testEmptyName() throws IOException {
MapperParsingException e = expectThrows(MapperParsingException.class, () -> createMapperService(mapping(b -> {
b.startObject("");
minimalMapping(b);
b.endObject();
})));
assertThat(e.getMessage(), containsString("name cannot be empty string"));
}
public final void testMeta() throws Exception {
XContentBuilder mapping = fieldMapping(
b -> {
minimalMapping(b);
b.field("meta", Collections.singletonMap("foo", "bar"));
}
);
MapperService mapperService = createMapperService(mapping);
assertEquals(
XContentHelper.convertToMap(BytesReference.bytes(mapping), false, mapping.contentType()),
XContentHelper.convertToMap(mapperService.documentMapper().mappingSource().uncompressed(), false, mapping.contentType())
);
mapping = fieldMapping(this::minimalMapping);
merge(mapperService, mapping);
assertEquals(
XContentHelper.convertToMap(BytesReference.bytes(mapping), false, mapping.contentType()),
XContentHelper.convertToMap(mapperService.documentMapper().mappingSource().uncompressed(), false, mapping.contentType())
);
mapping = fieldMapping(b -> {
minimalMapping(b);
b.field("meta", Collections.singletonMap("baz", "quux"));
});
merge(mapperService, mapping);
assertEquals(
XContentHelper.convertToMap(BytesReference.bytes(mapping), false, mapping.contentType()),
XContentHelper.convertToMap(mapperService.documentMapper().mappingSource().uncompressed(), false, mapping.contentType())
);
}
}

View File

@ -6,7 +6,6 @@
package org.elasticsearch.xpack.constantkeyword;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.plugins.ActionPlugin;
import org.elasticsearch.plugins.MapperPlugin;
@ -18,9 +17,6 @@ import java.util.Map;
import static java.util.Collections.singletonMap;
public class ConstantKeywordMapperPlugin extends Plugin implements MapperPlugin, ActionPlugin {
public ConstantKeywordMapperPlugin(Settings settings) {}
@Override
public Map<String, Mapper.TypeParser> getMappers() {
return singletonMap(ConstantKeywordFieldMapper.CONTENT_TYPE, new ConstantKeywordFieldMapper.TypeParser());

View File

@ -9,32 +9,34 @@ package org.elasticsearch.xpack.constantkeyword.mapper;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.FieldMapperTestCase;
import org.elasticsearch.index.mapper.FieldMapperTestCase2;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.MapperService.MergeReason;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.SourceToParse;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.xpack.constantkeyword.ConstantKeywordMapperPlugin;
import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin;
import org.junit.Before;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Set;
public class ConstantKeywordFieldMapperTests extends FieldMapperTestCase<ConstantKeywordFieldMapper.Builder> {
import static java.util.Collections.singleton;
public class ConstantKeywordFieldMapperTests extends FieldMapperTestCase2<ConstantKeywordFieldMapper.Builder> {
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return pluginList(ConstantKeywordMapperPlugin.class, LocalStateCompositeXPackPlugin.class);
protected Collection<Plugin> getPlugins() {
return singleton(new ConstantKeywordMapperPlugin());
}
@Override
@ -54,20 +56,16 @@ public class ConstantKeywordFieldMapperTests extends FieldMapperTestCase<Constan
b.setValue("bar");
});
addModifier("unset", false, (a, b) -> {
a.setValue("foo");;
});
addModifier("value-from-null", true, (a, b) -> {
b.setValue("bar");
a.setValue("foo");
;
});
addModifier("value-from-null", true, (a, b) -> { b.setValue("bar"); });
}
public void testDefaults() throws Exception {
IndexService indexService = createIndex("test");
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc")
.startObject("properties").startObject("field").field("type", "constant_keyword")
.field("value", "foo").endObject().endObject().endObject().endObject());
DocumentMapper mapper = indexService.mapperService().merge("_doc", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
assertEquals(mapping, mapper.mappingSource().toString());
XContentBuilder mapping = fieldMapping(b -> b.field("type", "constant_keyword").field("value", "foo"));
DocumentMapper mapper = createDocumentMapper(mapping);
assertEquals(Strings.toString(mapping), mapper.mappingSource().toString());
BytesReference source = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().endObject());
ParsedDocument doc = mapper.parse(new SourceToParse("test", "_doc", "1", source, XContentType.JSON));
@ -77,32 +75,29 @@ public class ConstantKeywordFieldMapperTests extends FieldMapperTestCase<Constan
doc = mapper.parse(new SourceToParse("test", "_doc", "1", source, XContentType.JSON));
assertNull(doc.rootDoc().getField("field"));
BytesReference illegalSource = BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject().field("field", "bar").endObject());
MapperParsingException e = expectThrows(MapperParsingException.class,
() -> mapper.parse(new SourceToParse("test", "_doc", "1", illegalSource, XContentType.JSON)));
assertEquals("[constant_keyword] field [field] only accepts values that are equal to the value defined in the mappings [foo], " +
"but got [bar]", e.getCause().getMessage());
BytesReference illegalSource = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "bar").endObject());
MapperParsingException e = expectThrows(
MapperParsingException.class,
() -> mapper.parse(new SourceToParse("test", "_doc", "1", illegalSource, XContentType.JSON))
);
assertEquals(
"[constant_keyword] field [field] only accepts values that are equal to the value defined in the mappings [foo], "
+ "but got [bar]",
e.getCause().getMessage()
);
}
public void testDynamicValue() throws Exception {
IndexService indexService = createIndex("test");
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc")
.startObject("properties").startObject("field").field("type", "constant_keyword")
.endObject().endObject().endObject().endObject());
DocumentMapper mapper = indexService.mapperService().merge("_doc", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
assertEquals(mapping, mapper.mappingSource().toString());
MapperService mapperService = createMapperService(fieldMapping(b -> b.field("type", "constant_keyword")));
BytesReference source = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "foo").endObject());
ParsedDocument doc = mapper.parse(new SourceToParse("test", "_doc", "1", source, XContentType.JSON));
ParsedDocument doc = mapperService.documentMapper().parse(new SourceToParse("test", "_doc", "1", source, XContentType.JSON));
assertNull(doc.rootDoc().getField("field"));
assertNotNull(doc.dynamicMappingsUpdate());
CompressedXContent mappingUpdate = new CompressedXContent(Strings.toString(doc.dynamicMappingsUpdate()));
DocumentMapper updatedMapper = indexService.mapperService().merge("_doc", mappingUpdate, MergeReason.MAPPING_UPDATE);
String expectedMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc")
.startObject("properties").startObject("field").field("type", "constant_keyword")
.field("value", "foo").endObject().endObject().endObject().endObject());
DocumentMapper updatedMapper = mapperService.merge("_doc", mappingUpdate, MergeReason.MAPPING_UPDATE);
String expectedMapping = Strings.toString(fieldMapping(b -> b.field("type", "constant_keyword").field("value", "foo")));
assertEquals(expectedMapping, updatedMapper.mappingSource().toString());
doc = updatedMapper.parse(new SourceToParse("test", "_doc", "1", source, XContentType.JSON));
@ -110,51 +105,19 @@ public class ConstantKeywordFieldMapperTests extends FieldMapperTestCase<Constan
assertNull(doc.dynamicMappingsUpdate());
}
public void testMeta() throws Exception {
IndexService indexService = createIndex("test");
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc")
.startObject("properties").startObject("field").field("type", "constant_keyword")
.field("meta", Collections.singletonMap("foo", "bar"))
.endObject().endObject().endObject().endObject());
DocumentMapper mapper = indexService.mapperService().merge("_doc",
new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
assertEquals(mapping, mapper.mappingSource().toString());
String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc")
.startObject("properties").startObject("field").field("type", "constant_keyword")
.endObject().endObject().endObject().endObject());
mapper = indexService.mapperService().merge("_doc",
new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE);
assertEquals(mapping2, mapper.mappingSource().toString());
String mapping3 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc")
.startObject("properties").startObject("field").field("type", "constant_keyword")
.field("meta", Collections.singletonMap("baz", "quux"))
.endObject().endObject().endObject().endObject());
mapper = indexService.mapperService().merge("_doc",
new CompressedXContent(mapping3), MergeReason.MAPPING_UPDATE);
assertEquals(mapping3, mapper.mappingSource().toString());
@Override
protected void minimalMapping(XContentBuilder b) throws IOException {
b.field("type", "constant_keyword");
}
public void testLookupValues() throws Exception {
IndexService indexService = createIndex("test");
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc")
.startObject("properties").startObject("field").field("type", "constant_keyword")
.endObject().endObject().endObject().endObject());
DocumentMapper mapper = indexService.mapperService().merge("_doc", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
assertEquals(mapping, mapper.mappingSource().toString());
FieldMapper fieldMapper = (FieldMapper) mapper.mappers().getMapper("field");
MapperService mapperService = createMapperService(fieldMapping(b -> b.field("type", "constant_keyword")));
FieldMapper fieldMapper = (FieldMapper) mapperService.documentMapper().mappers().getMapper("field");
List<?> values = fieldMapper.lookupValues(new SourceLookup(), null);
assertTrue(values.isEmpty());
merge(mapperService, fieldMapping(b -> b.field("type", "constant_keyword").field("value", "foo")));
String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc")
.startObject("properties").startObject("field").field("type", "constant_keyword")
.field("value", "foo").endObject().endObject().endObject().endObject());
mapper = indexService.mapperService().merge("_doc", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE);
fieldMapper = (FieldMapper) mapper.mappers().getMapper("field");
fieldMapper = (FieldMapper) mapperService.documentMapper().mappers().getMapper("field");
values = fieldMapper.lookupValues(new SourceLookup(), null);
assertEquals(1, values.size());
assertEquals("foo", values.get(0));