Migrate some more mapper test cases (#61507) (#61552)

Migrate some more mapper test cases from `ESSingleNodeTestCase` to
`MapperTestCase`.
This commit is contained in:
Nik Everett 2020-08-25 15:27:26 -04:00 committed by GitHub
parent 8b56441d2b
commit 87cf81e179
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 473 additions and 1016 deletions

View File

@ -26,13 +26,9 @@ import org.apache.lucene.index.IndexableField;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.List;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.plugins.Plugin;
import org.hamcrest.Matchers;
import org.junit.Before;
@ -42,11 +38,7 @@ import java.util.Arrays;
import java.util.Collection;
import java.util.Set;
public class RankFeatureFieldMapperTests extends FieldMapperTestCase<RankFeatureFieldMapper.Builder> {
IndexService indexService;
DocumentMapperParser parser;
public class RankFeatureFieldMapperTests extends FieldMapperTestCase2<RankFeatureFieldMapper.Builder> {
@Override
protected Set<String> unsupportedProperties() {
return org.elasticsearch.common.collect.Set.of("analyzer", "similarity", "store", "doc_values", "index");
@ -54,8 +46,6 @@ public class RankFeatureFieldMapperTests extends FieldMapperTestCase<RankFeature
@Before
public void setup() {
indexService = createIndex("test");
parser = indexService.mapperService().documentMapperParser();
addModifier("positive_score_impact", false, (a, b) -> {
a.positiveScoreImpact(true);
b.positiveScoreImpact(false);
@ -63,8 +53,8 @@ public class RankFeatureFieldMapperTests extends FieldMapperTestCase<RankFeature
}
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return pluginList(MapperExtrasPlugin.class);
protected Collection<? extends Plugin> getPlugins() {
return List.of(new MapperExtrasPlugin());
}
static int getFrequency(TokenStream tk) throws IOException {
@ -81,34 +71,27 @@ public class RankFeatureFieldMapperTests extends FieldMapperTestCase<RankFeature
return new RankFeatureFieldMapper.Builder("rank-feature");
}
@Override
protected void minimalMapping(XContentBuilder b) throws IOException {
b.field("type", "rank_feature");
}
@Override
protected boolean supportsMeta() {
return false;
}
public void testDefaults() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "rank_feature").endObject().endObject()
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc1 = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", 10)
.endObject()),
XContentType.JSON));
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
assertEquals(Strings.toString(fieldMapping(this::minimalMapping)), mapper.mappingSource().toString());
ParsedDocument doc1 = mapper.parse(source(b -> b.field("field", 10)));
IndexableField[] fields = doc1.rootDoc().getFields("_feature");
assertEquals(1, fields.length);
assertThat(fields[0], Matchers.instanceOf(FeatureField.class));
FeatureField featureField1 = (FeatureField) fields[0];
ParsedDocument doc2 = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", 12)
.endObject()),
XContentType.JSON));
ParsedDocument doc2 = mapper.parse(source(b -> b.field("field", 12)));
FeatureField featureField2 = (FeatureField) doc2.rootDoc().getFields("_feature")[0];
int freq1 = getFrequency(featureField1.tokenStream(null, null));
@ -117,34 +100,17 @@ public class RankFeatureFieldMapperTests extends FieldMapperTestCase<RankFeature
}
public void testNegativeScoreImpact() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "rank_feature")
.field("positive_score_impact", false).endObject().endObject()
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc1 = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", 10)
.endObject()),
XContentType.JSON));
DocumentMapper mapper = createDocumentMapper(
fieldMapping(b -> b.field("type", "rank_feature").field("positive_score_impact", false))
);
ParsedDocument doc1 = mapper.parse(source(b -> b.field("field", 10)));
IndexableField[] fields = doc1.rootDoc().getFields("_feature");
assertEquals(1, fields.length);
assertThat(fields[0], Matchers.instanceOf(FeatureField.class));
FeatureField featureField1 = (FeatureField) fields[0];
ParsedDocument doc2 = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", 12)
.endObject()),
XContentType.JSON));
ParsedDocument doc2 = mapper.parse(source(b -> b.field("field", 12)));
FeatureField featureField2 = (FeatureField) doc2.rootDoc().getFields("_feature")[0];
int freq1 = getFrequency(featureField1.tokenStream(null, null));
@ -153,39 +119,30 @@ public class RankFeatureFieldMapperTests extends FieldMapperTestCase<RankFeature
}
public void testRejectMultiValuedFields() throws MapperParsingException, IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "rank_feature").endObject().startObject("foo")
.startObject("properties").startObject("field").field("type", "rank_feature").endObject().endObject()
.endObject().endObject().endObject().endObject());
DocumentMapper mapper = createDocumentMapper(mapping(b -> {
b.startObject("field").field("type", "rank_feature").endObject();
b.startObject("foo").startObject("properties");
{
b.startObject("field").field("type", "rank_feature").endObject();
}
b.endObject().endObject();
}));
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
MapperParsingException e = expectThrows(MapperParsingException.class,
() -> mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", Arrays.asList(10, 20))
.endObject()),
XContentType.JSON)));
MapperParsingException e = expectThrows(
MapperParsingException.class,
() -> mapper.parse(source(b -> b.field("field", Arrays.asList(10, 20))))
);
assertEquals("[rank_feature] fields do not support indexing multiple values for the same field [field] in the same document",
e.getCause().getMessage());
e = expectThrows(MapperParsingException.class,
() -> mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.startArray("foo")
.startObject()
.field("field", 10)
.endObject()
.startObject()
.field("field", 20)
.endObject()
.endArray()
.endObject()),
XContentType.JSON)));
e = expectThrows(MapperParsingException.class, () -> mapper.parse(source(b -> {
b.startArray("foo");
{
b.startObject().field("field", 10).endObject();
b.startObject().field("field", 20).endObject();
}
b.endArray();
})));
assertEquals("[rank_feature] fields do not support indexing multiple values for the same field [foo.field] in the same document",
e.getCause().getMessage());
}

View File

@ -22,59 +22,42 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.document.FeatureField;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.plugins.Plugin;
import org.hamcrest.Matchers;
import org.junit.Before;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Set;
public class RankFeaturesFieldMapperTests extends FieldMapperTestCase<RankFeaturesFieldMapper.Builder> {
public class RankFeaturesFieldMapperTests extends FieldMapperTestCase2<RankFeaturesFieldMapper.Builder> {
@Override
protected Set<String> unsupportedProperties() {
return org.elasticsearch.common.collect.Set.of("analyzer", "similarity", "store", "doc_values", "index");
}
IndexService indexService;
DocumentMapperParser parser;
@Before
public void setup() {
indexService = createIndex("test");
parser = indexService.mapperService().documentMapperParser();
@Override
protected Collection<? extends Plugin> getPlugins() {
return org.elasticsearch.common.collect.List.of(new MapperExtrasPlugin());
}
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return pluginList(MapperExtrasPlugin.class);
protected void minimalMapping(XContentBuilder b) throws IOException {
b.field("type", "rank_features");
}
@Override
protected boolean supportsMeta() {
return false;
}
public void testDefaults() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "rank_features").endObject().endObject()
.endObject().endObject());
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
assertEquals(Strings.toString(fieldMapping(this::minimalMapping)), mapper.mappingSource().toString());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc1 = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.startObject("field")
.field("foo", 10)
.field("bar", 20)
.endObject()
.endObject()),
XContentType.JSON));
ParsedDocument doc1 = mapper.parse(source(b -> b.startObject("field").field("foo", 10).field("bar", 20).endObject()));
IndexableField[] fields = doc1.rootDoc().getFields("field");
assertEquals(2, fields.length);
@ -90,45 +73,30 @@ public class RankFeaturesFieldMapperTests extends FieldMapperTestCase<RankFeatur
}
public void testRejectMultiValuedFields() throws MapperParsingException, IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "rank_features").endObject().startObject("foo")
.startObject("properties").startObject("field").field("type", "rank_features").endObject().endObject()
.endObject().endObject().endObject().endObject());
DocumentMapper mapper = createDocumentMapper(mapping(b -> {
b.startObject("field").field("type", "rank_features").endObject();
b.startObject("foo").startObject("properties");
{
b.startObject("field").field("type", "rank_features").endObject();
}
b.endObject().endObject();
}));
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
MapperParsingException e = expectThrows(MapperParsingException.class,
() -> mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.startObject("field")
.field("foo", Arrays.asList(10, 20))
.endObject()
.endObject()),
XContentType.JSON)));
MapperParsingException e = expectThrows(
MapperParsingException.class,
() -> mapper.parse(source(b -> b.startObject("field").field("foo", Arrays.asList(10, 20)).endObject()))
);
assertEquals("[rank_features] fields take hashes that map a feature to a strictly positive float, but got unexpected token " +
"START_ARRAY", e.getCause().getMessage());
e = expectThrows(MapperParsingException.class,
() -> mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.startArray("foo")
.startObject()
.startObject("field")
.field("bar", 10)
.endObject()
.endObject()
.startObject()
.startObject("field")
.field("bar", 20)
.endObject()
.endObject()
.endArray()
.endObject()),
XContentType.JSON)));
e = expectThrows(MapperParsingException.class, () -> mapper.parse(source(b -> {
b.startArray("foo");
{
b.startObject().startObject("field").field("bar", 10).endObject().endObject();
b.startObject().startObject("field").field("bar", 20).endObject().endObject();
}
b.endArray();
})));
assertEquals("[rank_features] fields do not support indexing multiple values for the same rank feature [foo.field.bar] in " +
"the same document", e.getCause().getMessage());
}

View File

@ -18,8 +18,10 @@
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.analysis.core.KeywordAnalyzer;
import org.apache.lucene.analysis.core.SimpleAnalyzer;
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.Term;
@ -37,16 +39,11 @@ import org.apache.lucene.search.similarities.BooleanSimilarity;
import org.apache.lucene.search.spans.FieldMaskingSpanQuery;
import org.apache.lucene.search.spans.SpanNearQuery;
import org.apache.lucene.search.spans.SpanTermQuery;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AnalyzerScope;
import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.SearchAsYouTypeFieldMapper.PrefixFieldMapper;
import org.elasticsearch.index.mapper.SearchAsYouTypeFieldMapper.PrefixFieldType;
@ -60,8 +57,6 @@ import org.elasticsearch.index.query.MultiMatchQueryBuilder;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.similarity.SimilarityProvider;
import org.elasticsearch.plugins.Plugin;
import org.hamcrest.Matcher;
import org.hamcrest.Matchers;
import org.junit.Before;
import java.io.IOException;
@ -75,7 +70,6 @@ import java.util.stream.Collectors;
import java.util.stream.Stream;
import static java.util.Arrays.asList;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasProperty;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
@ -83,7 +77,7 @@ import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.core.IsInstanceOf.instanceOf;
public class SearchAsYouTypeFieldMapperTests extends FieldMapperTestCase<SearchAsYouTypeFieldMapper.Builder> {
public class SearchAsYouTypeFieldMapperTests extends FieldMapperTestCase2<SearchAsYouTypeFieldMapper.Builder> {
@Before
public void addModifiers() {
@ -103,8 +97,8 @@ public class SearchAsYouTypeFieldMapperTests extends FieldMapperTestCase<SearchA
}
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return pluginList(MapperExtrasPlugin.class);
protected Collection<? extends Plugin> getPlugins() {
return org.elasticsearch.common.collect.List.of(new MapperExtrasPlugin());
}
@Override
@ -115,30 +109,42 @@ public class SearchAsYouTypeFieldMapperTests extends FieldMapperTestCase<SearchA
.searchQuoteAnalyzer(new NamedAnalyzer("standard", AnalyzerScope.INDEX, new StandardAnalyzer()));
}
@Override
protected IndexAnalyzers createIndexAnalyzers(IndexSettings indexSettings) {
NamedAnalyzer dflt = new NamedAnalyzer(
"default",
AnalyzerScope.INDEX,
new StandardAnalyzer(),
TextFieldMapper.Defaults.POSITION_INCREMENT_GAP
);
NamedAnalyzer standard = new NamedAnalyzer("standard", AnalyzerScope.INDEX, new StandardAnalyzer());
NamedAnalyzer keyword = new NamedAnalyzer("keyword", AnalyzerScope.INDEX, new KeywordAnalyzer());
NamedAnalyzer simple = new NamedAnalyzer("simple", AnalyzerScope.INDEX, new SimpleAnalyzer());
NamedAnalyzer whitespace = new NamedAnalyzer("whitespace", AnalyzerScope.INDEX, new WhitespaceAnalyzer());
return new IndexAnalyzers(
org.elasticsearch.common.collect.Map.of(
"default", dflt, "standard", standard, "keyword", keyword, "simple", simple, "whitespace", whitespace
),
org.elasticsearch.common.collect.Map.of(),
org.elasticsearch.common.collect.Map.of()
);
}
@Override
protected void minimalMapping(XContentBuilder b) throws IOException {
b.field("type", "search_as_you_type");
}
@Override
protected void metaMapping(XContentBuilder b) throws IOException {
// We serialize these fields regardless of whether or not they are changed
b.field("type", "search_as_you_type").field("max_shingle_size", 3).field("doc_values", false);
}
public void testIndexing() throws IOException {
final String mapping = Strings.toString(XContentFactory.jsonBuilder()
.startObject()
.startObject("_doc")
.startObject("properties")
.startObject("a_field")
.field("type", "search_as_you_type")
.endObject()
.endObject()
.endObject()
.endObject());
final DocumentMapper mapper = createIndex("test")
.mapperService()
.documentMapperParser()
.parse("_doc", new CompressedXContent(mapping));
ParsedDocument doc = mapper.parse(new SourceToParse("test", "_doc", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("a_field", "new york city")
.endObject()),
XContentType.JSON));
for (String field : new String[] { "a_field", "a_field._index_prefix", "a_field._2gram", "a_field._3gram"}) {
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
ParsedDocument doc = mapper.parse(source(b -> b.field("field", "new york city")));
for (String field : new String[] { "field", "field._index_prefix", "field._2gram", "field._3gram"}) {
IndexableField[] fields = doc.rootDoc().getFields(field);
assertEquals(1, fields.length);
assertEquals("new york city", fields[0].stringValue());
@ -146,340 +152,177 @@ public class SearchAsYouTypeFieldMapperTests extends FieldMapperTestCase<SearchA
}
public void testDefaultConfiguration() throws IOException {
final String mapping = Strings.toString(XContentFactory.jsonBuilder()
.startObject()
.startObject("_doc")
.startObject("properties")
.startObject("a_field")
.field("type", "search_as_you_type")
.endObject()
.endObject()
.endObject()
.endObject());
final DocumentMapper defaultMapper = createIndex("test")
.mapperService()
.documentMapperParser()
.parse("_doc", new CompressedXContent(mapping));
final SearchAsYouTypeFieldMapper rootMapper = getRootFieldMapper(defaultMapper, "a_field");
DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(this::minimalMapping));
SearchAsYouTypeFieldMapper rootMapper = getRootFieldMapper(defaultMapper, "field");
assertRootFieldMapper(rootMapper, 3, "default");
final PrefixFieldMapper prefixFieldMapper = getPrefixFieldMapper(defaultMapper, "a_field._index_prefix");
PrefixFieldMapper prefixFieldMapper = getPrefixFieldMapper(defaultMapper, "field._index_prefix");
assertPrefixFieldType(prefixFieldMapper.fieldType(), 3, "default");
assertShingleFieldType(
getShingleFieldMapper(defaultMapper, "a_field._2gram").fieldType(), 2, "default", prefixFieldMapper.fieldType());
getShingleFieldMapper(defaultMapper, "field._2gram").fieldType(), 2, "default", prefixFieldMapper.fieldType());
assertShingleFieldType(
getShingleFieldMapper(defaultMapper, "a_field._3gram").fieldType(), 3, "default", prefixFieldMapper.fieldType());
getShingleFieldMapper(defaultMapper, "field._3gram").fieldType(), 3, "default", prefixFieldMapper.fieldType());
}
public void testConfiguration() throws IOException {
final int maxShingleSize = 4;
final String analyzerName = "simple";
int maxShingleSize = 4;
String analyzerName = "simple";
DocumentMapper defaultMapper = createDocumentMapper(
fieldMapping(
b -> b.field("type", "search_as_you_type").field("analyzer", analyzerName).field("max_shingle_size", maxShingleSize)
)
);
final String mapping = Strings.toString(XContentFactory.jsonBuilder()
.startObject()
.startObject("_doc")
.startObject("properties")
.startObject("a_field")
.field("type", "search_as_you_type")
.field("analyzer", analyzerName)
.field("max_shingle_size", maxShingleSize)
.endObject()
.endObject()
.endObject()
.endObject());
final DocumentMapper defaultMapper = createIndex("test")
.mapperService()
.documentMapperParser()
.parse("_doc", new CompressedXContent(mapping));
final SearchAsYouTypeFieldMapper rootMapper = getRootFieldMapper(defaultMapper, "a_field");
SearchAsYouTypeFieldMapper rootMapper = getRootFieldMapper(defaultMapper, "field");
assertRootFieldMapper(rootMapper, maxShingleSize, analyzerName);
final PrefixFieldMapper prefixFieldMapper = getPrefixFieldMapper(defaultMapper, "a_field._index_prefix");
PrefixFieldMapper prefixFieldMapper = getPrefixFieldMapper(defaultMapper, "field._index_prefix");
assertPrefixFieldType(prefixFieldMapper.fieldType(), maxShingleSize, analyzerName);
assertShingleFieldType(
getShingleFieldMapper(defaultMapper, "a_field._2gram").fieldType(), 2, analyzerName, prefixFieldMapper.fieldType());
getShingleFieldMapper(defaultMapper, "field._2gram").fieldType(), 2, analyzerName, prefixFieldMapper.fieldType());
assertShingleFieldType(
getShingleFieldMapper(defaultMapper, "a_field._3gram").fieldType(), 3, analyzerName, prefixFieldMapper.fieldType());
getShingleFieldMapper(defaultMapper, "field._3gram").fieldType(), 3, analyzerName, prefixFieldMapper.fieldType());
assertShingleFieldType(
getShingleFieldMapper(defaultMapper, "a_field._4gram").fieldType(), 4, analyzerName, prefixFieldMapper.fieldType());
getShingleFieldMapper(defaultMapper, "field._4gram").fieldType(), 4, analyzerName, prefixFieldMapper.fieldType());
}
public void testSimpleMerge() throws IOException {
MapperService mapperService = createIndex("test").mapperService();
{
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("_doc")
.startObject("properties")
.startObject("a_field")
.field("type", "search_as_you_type")
.field("analyzer", "standard")
.endObject()
.endObject()
.endObject().endObject());
DocumentMapper mapper = mapperService.merge("_doc",
new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE);
}
{
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("_doc")
.startObject("properties")
.startObject("a_field")
.field("type", "search_as_you_type")
.field("analyzer", "standard")
.endObject()
.startObject("b_field")
.field("type", "text")
.endObject()
.endObject()
.endObject().endObject());
DocumentMapper mapper = mapperService.merge("_doc",
new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE);
}
{
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("_doc")
.startObject("properties")
.startObject("a_field")
.field("type", "search_as_you_type")
.field("analyzer", "standard")
.field("max_shingle_size", "4")
.endObject()
.startObject("b_field")
.field("type", "text")
.endObject()
.endObject()
.endObject().endObject());
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> mapperService.merge("_doc",
new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE));
assertThat(e.getMessage(), containsString("different [max_shingle_size]"));
}
MapperService mapperService = createMapperService(
mapping(b -> b.startObject("a_field").field("type", "search_as_you_type").field("analyzer", "standard").endObject())
);
merge(mapperService, mapping(b -> {
b.startObject("a_field").field("type", "search_as_you_type").field("analyzer", "standard").endObject();
b.startObject("b_field").field("type", "text").endObject();
}));
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> merge(mapperService, mapping(b -> {
b.startObject("a_field");
{
b.field("type", "search_as_you_type");
b.field("analyzer", "standard");
b.field("max_shingle_size", "4");
}
b.endObject();
b.startObject("b_field").field("type", "text").endObject();
})));
assertThat(e.getMessage(), containsString("different [max_shingle_size]"));
}
public void testMultiFields() throws IOException {
for (int shingleSize = 2; shingleSize < 4; shingleSize++) {
final XContentBuilder mapping = XContentFactory.jsonBuilder()
.startObject()
.startObject("properties")
.startObject("a_field")
.field("type", "text")
.startObject("fields")
.startObject("suggest")
.field("type", "search_as_you_type")
.field("max_shingle_size", shingleSize)
.endObject()
.endObject()
.endObject()
.endObject()
.endObject();
assertMultiField(shingleSize);
}
}
final String index = "foo_" + shingleSize;
final String path = "a_field.suggest";
List<String> fields = new ArrayList<>();
fields.add(path);
final MapperService mapperService =
createIndex(index, Settings.EMPTY, "_doc", mapping).mapperService();
MappedFieldType fieldType = mapperService.fieldType(path + "._index_prefix");
assertThat(fieldType, instanceOf(PrefixFieldType.class));
PrefixFieldType prefixFieldType = (PrefixFieldType) fieldType;
assertEquals(path, prefixFieldType.parentField);
for (int i = 2; i < shingleSize; i++) {
String name = path + "._" + i + "gram";
fields.add(name);
fieldType = mapperService.fieldType(name);
assertThat(fieldType, instanceOf(ShingleFieldType.class));
ShingleFieldType ft = (ShingleFieldType) fieldType;
assertEquals(i, ft.shingleSize);
assertTrue(prefixFieldType == ft.prefixFieldType);
private void assertMultiField(int shingleSize) throws IOException {
String path = "field.suggest";
List<String> fields = new ArrayList<>();
fields.add(path);
MapperService mapperService = createMapperService(fieldMapping(b -> {
b.field("type", "text");
b.startObject("fields");
{
b.startObject("suggest").field("type", "search_as_you_type").field("max_shingle_size", shingleSize).endObject();
}
b.endObject();
}));
MappedFieldType fieldType = mapperService.fieldType(path + "._index_prefix");
assertThat(fieldType, instanceOf(PrefixFieldType.class));
PrefixFieldType prefixFieldType = (PrefixFieldType) fieldType;
assertEquals(path, prefixFieldType.parentField);
for (int i = 2; i < shingleSize; i++) {
String name = path + "._" + i + "gram";
fields.add(name);
fieldType = mapperService.fieldType(name);
assertThat(fieldType, instanceOf(ShingleFieldType.class));
ShingleFieldType ft = (ShingleFieldType) fieldType;
assertEquals(i, ft.shingleSize);
assertTrue(prefixFieldType == ft.prefixFieldType);
}
ParsedDocument doc = mapperService.documentMapper()
.parse(new SourceToParse("test", "_doc", "1",
BytesReference.bytes(
XContentFactory.jsonBuilder()
.startObject()
.field("a_field", "new york city")
.endObject()
), XContentType.JSON)
);
for (String field : fields) {
IndexableField[] indexFields = doc.rootDoc().getFields(field);
assertEquals(1, indexFields.length);
assertEquals("new york city", indexFields[0].stringValue());
}
ParsedDocument doc = mapperService.documentMapper().parse(source(b -> b.field("field", "new york city")));
for (String field : fields) {
IndexableField[] indexFields = doc.rootDoc().getFields(field);
assertEquals(1, indexFields.length);
assertEquals("new york city", indexFields[0].stringValue());
}
}
public void testIndexOptions() throws IOException {
final String mapping = Strings.toString(XContentFactory.jsonBuilder()
.startObject()
.startObject("_doc")
.startObject("properties")
.startObject("a_field")
.field("type", "search_as_you_type")
.field("index_options", "offsets")
.endObject()
.endObject()
.endObject()
.endObject());
final DocumentMapper defaultMapper = createIndex("test")
.mapperService()
.documentMapperParser()
.parse("_doc", new CompressedXContent(mapping));
DocumentMapper mapper = createDocumentMapper(
fieldMapping(b -> b.field("type", "search_as_you_type").field("index_options", "offsets"))
);
Stream.of(
getRootFieldMapper(defaultMapper, "a_field"),
getPrefixFieldMapper(defaultMapper, "a_field._index_prefix"),
getShingleFieldMapper(defaultMapper, "a_field._2gram"),
getShingleFieldMapper(defaultMapper, "a_field._3gram")
).forEach(mapper -> assertThat("for " + mapper.name(),
mapper.fieldType.indexOptions(), equalTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS)));
getRootFieldMapper(mapper, "field"),
getPrefixFieldMapper(mapper, "field._index_prefix"),
getShingleFieldMapper(mapper, "field._2gram"),
getShingleFieldMapper(mapper, "field._3gram")
).forEach(m -> assertThat("for " + m.name(),
m.fieldType.indexOptions(), equalTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS)));
}
public void testStore() throws IOException {
final String mapping = Strings.toString(XContentFactory.jsonBuilder()
.startObject()
.startObject("_doc")
.startObject("properties")
.startObject("a_field")
.field("type", "search_as_you_type")
.field("store", "true")
.endObject()
.endObject()
.endObject()
.endObject());
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "search_as_you_type").field("store", true)));
final DocumentMapper defaultMapper = createIndex("test")
.mapperService()
.documentMapperParser()
.parse("_doc", new CompressedXContent(mapping));
assertTrue(getRootFieldMapper(defaultMapper, "a_field").fieldType.stored());
assertTrue(getRootFieldMapper(mapper, "field").fieldType.stored());
Stream.of(
getPrefixFieldMapper(defaultMapper, "a_field._index_prefix"),
getShingleFieldMapper(defaultMapper, "a_field._2gram"),
getShingleFieldMapper(defaultMapper, "a_field._3gram")
).forEach(mapper -> assertFalse("for " + mapper.name(), mapper.fieldType.stored()));
getPrefixFieldMapper(mapper, "field._index_prefix"),
getShingleFieldMapper(mapper, "field._2gram"),
getShingleFieldMapper(mapper, "field._3gram")
).forEach(m -> assertFalse("for " + m.name(), m.fieldType.stored()));
}
public void testIndex() throws IOException {
final String mapping = Strings.toString(XContentFactory.jsonBuilder()
.startObject()
.startObject("_doc")
.startObject("properties")
.startObject("a_field")
.field("type", "search_as_you_type")
.field("index", "false")
.endObject()
.endObject()
.endObject()
.endObject());
final DocumentMapper defaultMapper = createIndex("test")
.mapperService()
.documentMapperParser()
.parse("_doc", new CompressedXContent(mapping));
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "search_as_you_type").field("index", false)));
Stream.of(
getRootFieldMapper(defaultMapper, "a_field"),
getPrefixFieldMapper(defaultMapper, "a_field._index_prefix"),
getShingleFieldMapper(defaultMapper, "a_field._2gram"),
getShingleFieldMapper(defaultMapper, "a_field._3gram")
).forEach(mapper -> assertThat("for " + mapper.name(), mapper.fieldType.indexOptions(), equalTo(IndexOptions.NONE)));
getRootFieldMapper(mapper, "field"),
getPrefixFieldMapper(mapper, "field._index_prefix"),
getShingleFieldMapper(mapper, "field._2gram"),
getShingleFieldMapper(mapper, "field._3gram")
).forEach(m -> assertThat("for " + m.name(), m.fieldType.indexOptions(), equalTo(IndexOptions.NONE)));
}
public void testTermVectors() throws IOException {
final String mapping = Strings.toString(XContentFactory.jsonBuilder()
.startObject()
.startObject("_doc")
.startObject("properties")
.startObject("a_field")
.field("type", "search_as_you_type")
.field("term_vector", "yes")
.endObject()
.endObject()
.endObject()
.endObject());
final DocumentMapper defaultMapper = createIndex("test")
.mapperService()
.documentMapperParser()
.parse("_doc", new CompressedXContent(mapping));
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "search_as_you_type").field("term_vector", "yes")));
Stream.of(
getRootFieldMapper(defaultMapper, "a_field"),
getShingleFieldMapper(defaultMapper, "a_field._2gram"),
getShingleFieldMapper(defaultMapper, "a_field._3gram")
).forEach(mapper -> assertTrue("for " + mapper.name(), mapper.fieldType.storeTermVectors()));
getRootFieldMapper(mapper, "field"),
getShingleFieldMapper(mapper, "field._2gram"),
getShingleFieldMapper(mapper, "field._3gram")
).forEach(m -> assertTrue("for " + m.name(), m.fieldType.storeTermVectors()));
final PrefixFieldMapper prefixFieldMapper = getPrefixFieldMapper(defaultMapper, "a_field._index_prefix");
PrefixFieldMapper prefixFieldMapper = getPrefixFieldMapper(mapper, "field._index_prefix");
assertFalse(prefixFieldMapper.fieldType.storeTermVectors());
}
public void testNorms() throws IOException {
// default setting
{
final String mapping = Strings.toString(XContentFactory.jsonBuilder()
.startObject()
.startObject("_doc")
.startObject("properties")
.startObject("a_field")
.field("type", "search_as_you_type")
.endObject()
.endObject()
.endObject()
.endObject());
final DocumentMapper defaultMapper = createIndex("test-1")
.mapperService()
.documentMapperParser()
.parse("_doc", new CompressedXContent(mapping));
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
Stream.of(
getRootFieldMapper(defaultMapper, "a_field"),
getShingleFieldMapper(defaultMapper, "a_field._2gram"),
getShingleFieldMapper(defaultMapper, "a_field._3gram")
).forEach(mapper -> assertFalse("for " + mapper.name(), mapper.fieldType.omitNorms()));
getRootFieldMapper(mapper, "field"),
getShingleFieldMapper(mapper, "field._2gram"),
getShingleFieldMapper(mapper, "field._3gram")
).forEach(m -> assertFalse("for " + m.name(), m.fieldType.omitNorms()));
final PrefixFieldMapper prefixFieldMapper = getPrefixFieldMapper(defaultMapper, "a_field._index_prefix");
PrefixFieldMapper prefixFieldMapper = getPrefixFieldMapper(mapper, "field._index_prefix");
assertTrue(prefixFieldMapper.fieldType.omitNorms());
}
// can disable them on shingle fields
// can disable norms on search_as_you_type fields
{
final String mapping = Strings.toString(XContentFactory.jsonBuilder()
.startObject()
.startObject("_doc")
.startObject("properties")
.startObject("a_field")
.field("type", "search_as_you_type")
.field("norms", "false")
.endObject()
.endObject()
.endObject()
.endObject());
final DocumentMapper defaultMapper = createIndex("test-2")
.mapperService()
.documentMapperParser()
.parse("_doc", new CompressedXContent(mapping));
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "search_as_you_type").field("norms", false)));
Stream.of(
getRootFieldMapper(defaultMapper, "a_field"),
getPrefixFieldMapper(defaultMapper, "a_field._index_prefix"),
getShingleFieldMapper(defaultMapper, "a_field._2gram"),
getShingleFieldMapper(defaultMapper, "a_field._3gram")
).forEach(mapper -> assertTrue("for " + mapper.name(), mapper.fieldType.omitNorms()));
getRootFieldMapper(mapper, "field"),
getPrefixFieldMapper(mapper, "field._index_prefix"),
getShingleFieldMapper(mapper, "field._2gram"),
getShingleFieldMapper(mapper, "field._3gram")
).forEach(m -> assertTrue("for " + m.name(), m.fieldType.omitNorms()));
}
}
@ -493,31 +336,17 @@ public class SearchAsYouTypeFieldMapperTests extends FieldMapperTestCase<SearchA
}
public void testMatchPhrasePrefix() throws IOException {
IndexService indexService = createIndex("test", Settings.EMPTY);
QueryShardContext queryShardContext = indexService.newQueryShardContext(
randomInt(20), null, () -> {
throw new UnsupportedOperationException();
}, null);
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("field")
.field("type", "search_as_you_type")
.endObject()
.endObject()
.endObject().endObject());
queryShardContext.getMapperService().merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE);
QueryShardContext queryShardContext = createQueryShardContext(createMapperService(fieldMapping(this::minimalMapping)));
{
Query q = new MatchPhrasePrefixQueryBuilder("field", "two words").toQuery(queryShardContext);
Query expected = new SynonymQuery(new Term("field._index_prefix", "two words"));
Query expected = new SynonymQuery.Builder("field._index_prefix").addTerm(new Term("field._index_prefix", "two words")).build();
assertThat(q, equalTo(expected));
}
{
Query q = new MatchPhrasePrefixQueryBuilder("field", "three words here").toQuery(queryShardContext);
Query expected = new SynonymQuery(new Term("field._index_prefix", "three words here"));
Query expected = new SynonymQuery.Builder("field._index_prefix").addTerm(new Term("field._index_prefix", "three words here"))
.build();
assertThat(q, equalTo(expected));
}
@ -572,106 +401,91 @@ public class SearchAsYouTypeFieldMapperTests extends FieldMapperTestCase<SearchA
}
public void testMatchPhrase() throws IOException {
final IndexService indexService = createIndex("test", Settings.EMPTY);
final QueryShardContext queryShardContext = indexService.newQueryShardContext(randomInt(20), null,
() -> { throw new UnsupportedOperationException(); }, null);
final String mapping = Strings.toString(XContentFactory.jsonBuilder()
.startObject()
.startObject("_doc")
.startObject("properties")
.startObject("a_field")
.field("type", "search_as_you_type")
.endObject()
.endObject()
.endObject()
.endObject());
queryShardContext.getMapperService().merge("_doc", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE);
QueryShardContext queryShardContext = createQueryShardContext(createMapperService(fieldMapping(this::minimalMapping)));
{
final Query actual = new MatchPhraseQueryBuilder("a_field", "one")
Query actual = new MatchPhraseQueryBuilder("field", "one")
.toQuery(queryShardContext);
final Query expected = new TermQuery(new Term("a_field", "one"));
Query expected = new TermQuery(new Term("field", "one"));
assertThat(actual, equalTo(expected));
}
{
final Query actual = new MatchPhraseQueryBuilder("a_field", "one two")
Query actual = new MatchPhraseQueryBuilder("field", "one two")
.toQuery(queryShardContext);
final Query expected = new MultiPhraseQuery.Builder()
.add(new Term("a_field._2gram", "one two"))
Query expected = new MultiPhraseQuery.Builder()
.add(new Term("field._2gram", "one two"))
.build();
assertThat(actual, equalTo(expected));
}
{
final Query actual = new MatchPhraseQueryBuilder("a_field", "one two three")
Query actual = new MatchPhraseQueryBuilder("field", "one two three")
.toQuery(queryShardContext);
final Query expected = new MultiPhraseQuery.Builder()
.add(new Term("a_field._3gram", "one two three"))
Query expected = new MultiPhraseQuery.Builder()
.add(new Term("field._3gram", "one two three"))
.build();
assertThat(actual, equalTo(expected));
}
{
final Query actual = new MatchPhraseQueryBuilder("a_field", "one two three four")
Query actual = new MatchPhraseQueryBuilder("field", "one two three four")
.toQuery(queryShardContext);
final Query expected = new MultiPhraseQuery.Builder()
.add(new Term("a_field._3gram", "one two three"))
.add(new Term("a_field._3gram", "two three four"))
Query expected = new MultiPhraseQuery.Builder()
.add(new Term("field._3gram", "one two three"))
.add(new Term("field._3gram", "two three four"))
.build();
assertThat(actual, equalTo(expected));
}
{
final Query actual = new MatchPhraseQueryBuilder("a_field", "one two")
Query actual = new MatchPhraseQueryBuilder("field", "one two")
.slop(1)
.toQuery(queryShardContext);
final Query expected = new MultiPhraseQuery.Builder()
.add(new Term("a_field", "one"))
.add(new Term("a_field", "two"))
Query expected = new MultiPhraseQuery.Builder()
.add(new Term("field", "one"))
.add(new Term("field", "two"))
.setSlop(1)
.build();
assertThat(actual, equalTo(expected));
}
{
final Query actual = new MatchPhraseQueryBuilder("a_field._2gram", "one two")
Query actual = new MatchPhraseQueryBuilder("field._2gram", "one two")
.toQuery(queryShardContext);
final Query expected = new TermQuery(new Term("a_field._2gram", "one two"));
Query expected = new TermQuery(new Term("field._2gram", "one two"));
assertThat(actual, equalTo(expected));
}
{
final Query actual = new MatchPhraseQueryBuilder("a_field._2gram", "one two three")
Query actual = new MatchPhraseQueryBuilder("field._2gram", "one two three")
.toQuery(queryShardContext);
final Query expected = new MultiPhraseQuery.Builder()
.add(new Term("a_field._2gram", "one two"))
.add(new Term("a_field._2gram", "two three"))
Query expected = new MultiPhraseQuery.Builder()
.add(new Term("field._2gram", "one two"))
.add(new Term("field._2gram", "two three"))
.build();
assertThat(actual, equalTo(expected));
}
{
final Query actual = new MatchPhraseQueryBuilder("a_field._3gram", "one two three")
Query actual = new MatchPhraseQueryBuilder("field._3gram", "one two three")
.toQuery(queryShardContext);
final Query expected = new TermQuery(new Term("a_field._3gram", "one two three"));
Query expected = new TermQuery(new Term("field._3gram", "one two three"));
assertThat(actual, equalTo(expected));
}
{
final Query actual = new MatchPhraseQueryBuilder("a_field._3gram", "one two three four")
Query actual = new MatchPhraseQueryBuilder("field._3gram", "one two three four")
.toQuery(queryShardContext);
final Query expected = new MultiPhraseQuery.Builder()
.add(new Term("a_field._3gram", "one two three"))
.add(new Term("a_field._3gram", "two three four"))
Query expected = new MultiPhraseQuery.Builder()
.add(new Term("field._3gram", "one two three"))
.add(new Term("field._3gram", "two three four"))
.build();
assertThat(actual, equalTo(expected));
}
{
expectThrows(IllegalArgumentException.class,
() -> new MatchPhraseQueryBuilder("a_field._index_prefix", "one two three four").toQuery(queryShardContext));
() -> new MatchPhraseQueryBuilder("field._index_prefix", "one two three four").toQuery(queryShardContext));
}
}
@ -688,29 +502,16 @@ public class SearchAsYouTypeFieldMapperTests extends FieldMapperTestCase<SearchA
}
public void testMultiMatchBoolPrefix() throws IOException {
final IndexService indexService = createIndex("test", Settings.EMPTY);
final QueryShardContext queryShardContext = indexService.newQueryShardContext(randomInt(20), null,
() -> { throw new UnsupportedOperationException(); }, null);
final String mapping = Strings.toString(XContentFactory.jsonBuilder()
.startObject()
.startObject("_doc")
.startObject("properties")
.startObject("a_field")
.field("type", "search_as_you_type")
.field("max_shingle_size", 4)
.endObject()
.endObject()
.endObject()
.endObject());
QueryShardContext queryShardContext = createQueryShardContext(
createMapperService(fieldMapping(b -> b.field("type", "search_as_you_type").field("max_shingle_size", 4)))
);
queryShardContext.getMapperService().merge("_doc", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE);
final MultiMatchQueryBuilder builder = new MultiMatchQueryBuilder(
MultiMatchQueryBuilder builder = new MultiMatchQueryBuilder(
"quick brown fox jump lazy dog",
"a_field",
"a_field._2gram",
"a_field._3gram",
"a_field._4gram"
"field",
"field._2gram",
"field._3gram",
"field._4gram"
);
builder.type(MultiMatchQueryBuilder.Type.BOOL_PREFIX);
@ -720,60 +521,35 @@ public class SearchAsYouTypeFieldMapperTests extends FieldMapperTestCase<SearchA
assertThat(disMaxQuery.getDisjuncts(), hasSize(4));
assertThat(disMaxQuery.getDisjuncts(), containsInAnyOrder(
buildBoolPrefixQuery(
"a_field", "a_field._index_prefix", asList("quick", "brown", "fox", "jump", "lazy", "dog")),
buildBoolPrefixQuery("a_field._2gram", "a_field._index_prefix",
"field", "field._index_prefix", asList("quick", "brown", "fox", "jump", "lazy", "dog")),
buildBoolPrefixQuery("field._2gram", "field._index_prefix",
asList("quick brown", "brown fox", "fox jump", "jump lazy", "lazy dog")),
buildBoolPrefixQuery("a_field._3gram", "a_field._index_prefix",
buildBoolPrefixQuery("field._3gram", "field._index_prefix",
asList("quick brown fox", "brown fox jump", "fox jump lazy", "jump lazy dog")),
buildBoolPrefixQuery("a_field._4gram", "a_field._index_prefix",
buildBoolPrefixQuery("field._4gram", "field._index_prefix",
asList("quick brown fox jump", "brown fox jump lazy", "fox jump lazy dog"))));
}
private void documentParsingTestCase(Collection<String> values) throws IOException {
final String mapping = Strings.toString(XContentFactory.jsonBuilder()
.startObject()
.startObject("_doc")
.startObject("properties")
.startObject("a_field")
.field("type", "search_as_you_type")
.endObject()
.endObject()
.endObject()
.endObject());
DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(this::minimalMapping));
final ParsedDocument parsedDocument = defaultMapper.parse(source(b -> {
if (values.size() > 1) {
b.array("field", values.toArray(new String[0]));
} else {
b.field("field", values.iterator().next());
}
}));
final DocumentMapper defaultMapper = createIndex("test")
.mapperService()
.documentMapperParser()
.parse("_doc", new CompressedXContent(mapping));
final XContentBuilder builder = XContentFactory.jsonBuilder();
builder.startObject();
if (values.size() > 1) {
builder.array("a_field", values.toArray(new String[0]));
} else {
builder.field("a_field", values.iterator().next());
}
builder.endObject();
final ParsedDocument parsedDocument = defaultMapper.parse(
new SourceToParse("test", "_doc", "1", BytesReference.bytes(builder), XContentType.JSON));
IndexableField[] rootFields = parsedDocument.rootDoc().getFields("a_field");
IndexableField[] prefixFields = parsedDocument.rootDoc().getFields("a_field._index_prefix");
IndexableField[] shingle2Fields = parsedDocument.rootDoc().getFields("a_field._2gram");
IndexableField[] shingle3Fields = parsedDocument.rootDoc().getFields("a_field._3gram");
IndexableField[] rootFields = parsedDocument.rootDoc().getFields("field");
IndexableField[] prefixFields = parsedDocument.rootDoc().getFields("field._index_prefix");
IndexableField[] shingle2Fields = parsedDocument.rootDoc().getFields("field._2gram");
IndexableField[] shingle3Fields = parsedDocument.rootDoc().getFields("field._3gram");
for (IndexableField[] fields : new IndexableField[][]{ rootFields, prefixFields, shingle2Fields, shingle3Fields}) {
Set<String> expectedValues = Arrays.stream(fields).map(IndexableField::stringValue).collect(Collectors.toSet());
assertThat(values, equalTo(expectedValues));
}
}
private static Matcher<IndexableField> indexableFieldMatcher(String value, Class<? extends FieldType> fieldTypeClass) {
return Matchers.allOf(
hasProperty(IndexableField::stringValue, equalTo(value)),
hasProperty(IndexableField::fieldType, instanceOf(fieldTypeClass))
);
}
private static void assertRootFieldMapper(SearchAsYouTypeFieldMapper mapper,
int maxShingleSize,
String analyzerName) {

View File

@ -18,11 +18,10 @@
*/
package org.elasticsearch.index.mapper;
import static org.hamcrest.Matchers.equalTo;
import com.ibm.icu.text.Collator;
import com.ibm.icu.text.RawCollationKey;
import com.ibm.icu.util.ULocale;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
@ -31,17 +30,11 @@ import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.List;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.MapperService.MergeReason;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.plugin.analysis.icu.AnalysisICUPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.InternalSettingsPlugin;
import org.junit.Before;
import java.io.IOException;
@ -50,14 +43,15 @@ import java.util.Collection;
import java.util.Set;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
public class ICUCollationKeywordFieldMapperTests extends FieldMapperTestCase<ICUCollationKeywordFieldMapper.Builder> {
public class ICUCollationKeywordFieldMapperTests extends FieldMapperTestCase2<ICUCollationKeywordFieldMapper.Builder> {
private static final String FIELD_TYPE = "icu_collation_keyword";
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return Arrays.asList(AnalysisICUPlugin.class, InternalSettingsPlugin.class);
protected Collection<? extends Plugin> getPlugins() {
return List.of(new AnalysisICUPlugin());
}
@Override
@ -70,13 +64,8 @@ public class ICUCollationKeywordFieldMapperTests extends FieldMapperTestCase<ICU
return org.elasticsearch.common.collect.Set.of("analyzer", "similarity");
}
IndexService indexService;
DocumentMapperParser parser;
@Before
public void setup() {
indexService = createIndex("test");
parser = indexService.mapperService().documentMapperParser();
addModifier("strength", false, (a, b) -> {
a.strength("primary");
b.strength("secondary");
@ -102,22 +91,16 @@ public class ICUCollationKeywordFieldMapperTests extends FieldMapperTestCase<ICU
addBooleanModifier("hiragana_quaternary_mode", false, ICUCollationKeywordFieldMapper.Builder::hiraganaQuaternaryMode);
}
@Override
protected void minimalMapping(XContentBuilder b) throws IOException {
b.field("type", FIELD_TYPE);
}
public void testDefaults() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE).endObject().endObject()
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()),
XContentType.JSON));
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
assertEquals(Strings.toString(fieldMapping(this::minimalMapping)), mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(source(b -> b.field("field", "1234")));
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(2, fields.length);
@ -144,46 +127,17 @@ public class ICUCollationKeywordFieldMapperTests extends FieldMapperTestCase<ICU
}
public void testNullValue() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE).endObject().endObject()
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.nullField("field")
.endObject()),
XContentType.JSON));
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
ParsedDocument doc = mapper.parse(source(b -> b.nullField("field")));
assertArrayEquals(new IndexableField[0], doc.rootDoc().getFields("field"));
mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE)
.field("null_value", "1234").endObject().endObject()
.endObject().endObject());
mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.endObject()),
XContentType.JSON));
mapper = createDocumentMapper(fieldMapping(b -> b.field("type", FIELD_TYPE).field("null_value", "1234")));
doc = mapper.parse(source(b -> {}));
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(0, fields.length);
doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.nullField("field")
.endObject()),
XContentType.JSON));
doc = mapper.parse(source(b -> b.nullField("field")));
Collator collator = Collator.getInstance(ULocale.ROOT);
RawCollationKey key = collator.getRawCollationKey("1234", null);
BytesRef expected = new BytesRef(key.bytes, 0, key.size);
@ -194,44 +148,16 @@ public class ICUCollationKeywordFieldMapperTests extends FieldMapperTestCase<ICU
}
public void testEnableStore() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE)
.field("store", true).endObject().endObject()
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()),
XContentType.JSON));
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", FIELD_TYPE).field("store", true)));
ParsedDocument doc = mapper.parse(source(b -> b.field("field", "1234")));
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(2, fields.length);
assertTrue(fields[0].fieldType().stored());
}
public void testDisableIndex() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE)
.field("index", false).endObject().endObject()
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()),
XContentType.JSON));
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", FIELD_TYPE).field("index", false)));
ParsedDocument doc = mapper.parse(source(b -> b.field("field", "1234")));
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(1, fields.length);
assertEquals(IndexOptions.NONE, fields[0].fieldType().indexOptions());
@ -239,43 +165,17 @@ public class ICUCollationKeywordFieldMapperTests extends FieldMapperTestCase<ICU
}
public void testDisableDocValues() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE)
.field("doc_values", false).endObject().endObject()
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()),
XContentType.JSON));
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", FIELD_TYPE).field("doc_values", false)));
ParsedDocument doc = mapper.parse(source(b -> b.field("field", "1234")));
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(1, fields.length);
assertEquals(DocValuesType.NONE, fields[0].fieldType().docValuesType());
}
public void testMultipleValues() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE).endObject().endObject()
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", Arrays.asList("1234", "5678"))
.endObject()),
XContentType.JSON));
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
ParsedDocument doc = mapper.parse(source(b -> b.field("field", List.of("1234", "5678"))));
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(4, fields.length);
@ -323,79 +223,35 @@ public class ICUCollationKeywordFieldMapperTests extends FieldMapperTestCase<ICU
}
public void testIndexOptions() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE)
.field("index_options", "freqs").endObject().endObject()
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()),
XContentType.JSON));
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", FIELD_TYPE).field("index_options", "freqs")));
ParsedDocument doc = mapper.parse(source(b -> b.field("field", "1234")));
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(2, fields.length);
assertEquals(IndexOptions.DOCS_AND_FREQS, fields[0].fieldType().indexOptions());
for (String indexOptions : Arrays.asList("positions", "offsets")) {
final String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE)
.field("index_options", indexOptions).endObject().endObject()
.endObject().endObject());
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> parser.parse("type", new CompressedXContent(mapping2)));
assertEquals("The [" + FIELD_TYPE + "] field does not support positions, got [index_options]=" + indexOptions,
e.getMessage());
Exception e = expectThrows(MapperParsingException.class,
() -> createDocumentMapper(fieldMapping(b -> b.field("type", FIELD_TYPE).field("index_options", indexOptions))));
assertThat(
e.getMessage(),
containsString("The [" + FIELD_TYPE + "] field does not support positions, got [index_options]=" + indexOptions)
);
}
}
public void testEnableNorms() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE)
.field("norms", true).endObject().endObject()
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject()),
XContentType.JSON));
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", FIELD_TYPE).field("norms", true)));
ParsedDocument doc = mapper.parse(source(b -> b.field("field", "1234")));
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(2, fields.length);
assertFalse(fields[0].fieldType().omitNorms());
}
public void testCollator() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field")
.field("type", FIELD_TYPE)
.field("language", "tr")
.field("strength", "primary")
.endObject().endObject().endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "I WİLL USE TURKİSH CASING")
.endObject()),
XContentType.JSON));
DocumentMapper mapper = createDocumentMapper(
fieldMapping(b -> b.field("type", FIELD_TYPE).field("language", "tr").field("strength", "primary"))
);
ParsedDocument doc = mapper.parse(source(b -> b.field("field", "I WİLL USE TURKİSH CASING")));
Collator collator = Collator.getInstance(new ULocale("tr"));
collator.setStrength(Collator.PRIMARY);
RawCollationKey key = collator.getRawCollationKey("ı will use turkish casıng", null); // should collate to same value
@ -423,69 +279,35 @@ public class ICUCollationKeywordFieldMapperTests extends FieldMapperTestCase<ICU
}
public void testUpdateCollator() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field")
.field("type", FIELD_TYPE)
.field("language", "tr")
.field("strength", "primary")
.endObject().endObject().endObject().endObject());
indexService.mapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
MapperService mapperService = createMapperService(
fieldMapping(b -> b.field("type", FIELD_TYPE).field("language", "tr").field("strength", "primary"))
);
String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field")
.field("type", FIELD_TYPE)
.field("language", "en")
.endObject().endObject().endObject().endObject());
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> indexService.mapperService().merge("type",
new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE));
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> merge(mapperService, fieldMapping(b -> b.field("type", FIELD_TYPE).field("language", "en")))
);
assertThat(e.getMessage(), containsString("mapper [field] has different [collator]"));
}
public void testIgnoreAbove() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE)
.field("ignore_above", 5).endObject().endObject()
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "elk")
.endObject()),
XContentType.JSON));
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", FIELD_TYPE).field("ignore_above", 5)));
ParsedDocument doc = mapper.parse(source(b -> b.field("field", "elk")));
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(2, fields.length);
doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "elasticsearch")
.endObject()),
XContentType.JSON));
doc = mapper.parse(source(b -> b.field("field", "elasticsearch")));
fields = doc.rootDoc().getFields("field");
assertEquals(0, fields.length);
}
public void testUpdateIgnoreAbove() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE).endObject().endObject()
.endObject().endObject());
indexService.mapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE)
.field("ignore_above", 5).endObject().endObject()
.endObject().endObject());
indexService.mapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
MapperService mapperService = createMapperService(fieldMapping(this::minimalMapping));
merge(mapperService, fieldMapping(b -> b.field("type", FIELD_TYPE).field("ignore_above", 5)));
ParsedDocument doc = mapperService.documentMapper().parse(source(b -> b.field("field", "elasticsearch")));
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(0, fields.length);
}
public void testFetchSourceValue() {

View File

@ -17,7 +17,6 @@
* under the License.
*/
apply plugin: 'elasticsearch.yaml-rest-test'
apply plugin: 'elasticsearch.internal-cluster-test'
esplugin {
description 'The Mapper Murmur3 plugin allows to compute hashes of a field\'s values at index-time and to store them in the index.'
@ -29,5 +28,3 @@ restResources {
includeCore '_common', 'indices', 'index', 'search'
}
}
// no unit tests
test.enabled = false

View File

@ -1,171 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.murmur3;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.FieldMapperTestCase;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.SourceToParse;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.indices.mapper.MapperRegistry;
import org.elasticsearch.plugin.mapper.MapperMurmur3Plugin;
import org.elasticsearch.plugins.MapperPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.InternalSettingsPlugin;
import org.junit.Before;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Set;
import java.util.function.Supplier;
import static org.hamcrest.Matchers.containsString;
public class Murmur3FieldMapperTests extends FieldMapperTestCase<Murmur3FieldMapper.Builder> {
MapperRegistry mapperRegistry;
IndexService indexService;
DocumentMapperParser parser;
@Override
protected Set<String> unsupportedProperties() {
return org.elasticsearch.common.collect.Set.of("analyzer", "similarity", "doc_values", "index");
}
@Before
public void setup() {
indexService = createIndex("test");
mapperRegistry = new MapperRegistry(
Collections.singletonMap(Murmur3FieldMapper.CONTENT_TYPE, new Murmur3FieldMapper.TypeParser()),
Collections.emptyMap(), MapperPlugin.NOOP_FIELD_FILTER);
Supplier<QueryShardContext> queryShardContext = () -> {
return indexService.newQueryShardContext(0, null, () -> { throw new UnsupportedOperationException(); }, null);
};
parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(), indexService.xContentRegistry(),
indexService.similarityService(), mapperRegistry, queryShardContext);
}
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return pluginList(InternalSettingsPlugin.class, MapperMurmur3Plugin.class);
}
public void testDefaults() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field")
.field("type", "murmur3")
.endObject().endObject().endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
ParsedDocument parsedDoc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
.endObject()),
XContentType.JSON));
IndexableField[] fields = parsedDoc.rootDoc().getFields("field");
assertNotNull(fields);
assertEquals(Arrays.toString(fields), 1, fields.length);
IndexableField field = fields[0];
assertEquals(IndexOptions.NONE, field.fieldType().indexOptions());
assertEquals(DocValuesType.SORTED_NUMERIC, field.fieldType().docValuesType());
}
public void testDocValuesSettingNotAllowed() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field")
.field("type", "murmur3")
.field("doc_values", false)
.endObject().endObject().endObject().endObject());
try {
parser.parse("type", new CompressedXContent(mapping));
fail("expected a mapper parsing exception");
} catch (MapperParsingException e) {
assertTrue(e.getMessage().contains("Setting [doc_values] cannot be modified"));
}
// even setting to the default is not allowed, the setting is invalid
mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field")
.field("type", "murmur3")
.field("doc_values", true)
.endObject().endObject().endObject().endObject());
try {
parser.parse("type", new CompressedXContent(mapping));
fail("expected a mapper parsing exception");
} catch (MapperParsingException e) {
assertTrue(e.getMessage().contains("Setting [doc_values] cannot be modified"));
}
}
public void testIndexSettingNotAllowed() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field")
.field("type", "murmur3")
.field("index", "not_analyzed")
.endObject().endObject().endObject().endObject());
try {
parser.parse("type", new CompressedXContent(mapping));
fail("expected a mapper parsing exception");
} catch (MapperParsingException e) {
assertTrue(e.getMessage().contains("Setting [index] cannot be modified"));
}
// even setting to the default is not allowed, the setting is invalid
mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field")
.field("type", "murmur3")
.field("index", "no")
.endObject().endObject().endObject().endObject());
try {
parser.parse("type", new CompressedXContent(mapping));
fail("expected a mapper parsing exception");
} catch (MapperParsingException e) {
assertTrue(e.getMessage().contains("Setting [index] cannot be modified"));
}
}
public void testEmptyName() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("")
.field("type", "murmur3")
.endObject().endObject().endObject().endObject());
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> parser.parse("type", new CompressedXContent(mapping))
);
assertThat(e.getMessage(), containsString("name cannot be empty string"));
}
@Override
protected Murmur3FieldMapper.Builder newBuilder() {
return new Murmur3FieldMapper.Builder("murmur");
}
}

View File

@ -0,0 +1,101 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.murmur3;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.FieldMapperTestCase2;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.plugin.mapper.MapperMurmur3Plugin;
import org.elasticsearch.plugins.Plugin;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Set;
import static org.hamcrest.Matchers.containsString;
public class Murmur3FieldMapperTests extends FieldMapperTestCase2<Murmur3FieldMapper.Builder> {
@Override
protected Set<String> unsupportedProperties() {
return org.elasticsearch.common.collect.Set.of("analyzer", "similarity", "doc_values", "index");
}
@Override
protected Collection<? extends Plugin> getPlugins() {
return org.elasticsearch.common.collect.List.of(new MapperMurmur3Plugin());
}
@Override
protected void minimalMapping(XContentBuilder b) throws IOException {
b.field("type", "murmur3");
}
public void testDefaults() throws Exception {
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
ParsedDocument parsedDoc = mapper.parse(source(b -> b.field("field", "value")));
IndexableField[] fields = parsedDoc.rootDoc().getFields("field");
assertNotNull(fields);
assertEquals(Arrays.toString(fields), 1, fields.length);
IndexableField field = fields[0];
assertEquals(IndexOptions.NONE, field.fieldType().indexOptions());
assertEquals(DocValuesType.SORTED_NUMERIC, field.fieldType().docValuesType());
}
public void testDocValuesSettingNotAllowed() throws Exception {
Exception e = expectThrows(
MapperParsingException.class,
() -> createMapperService(fieldMapping(b -> b.field("type", "murmur3").field("doc_values", false)))
);
assertThat(e.getMessage(), containsString("Setting [doc_values] cannot be modified"));
// even setting to the default is not allowed, the setting is invalid
e = expectThrows(
MapperParsingException.class,
() -> createMapperService(fieldMapping(b -> b.field("type", "murmur3").field("doc_values", true)))
);
assertThat(e.getMessage(), containsString("Setting [doc_values] cannot be modified"));
}
public void testIndexSettingNotAllowed() throws Exception {
Exception e = expectThrows(
MapperParsingException.class,
() -> createMapperService(fieldMapping(b -> b.field("type", "murmur3").field("index", "not_analyzed")))
);
assertThat(e.getMessage(), containsString("Setting [index] cannot be modified"));
// even setting to the default is not allowed, the setting is invalid
e = expectThrows(
MapperParsingException.class,
() -> createMapperService(fieldMapping(b -> b.field("type", "murmur3").field("index", "no")))
);
assertThat(e.getMessage(), containsString("Setting [index] cannot be modified"));
}
@Override
protected Murmur3FieldMapper.Builder newBuilder() {
return new Murmur3FieldMapper.Builder("murmur");
}
}

View File

@ -87,10 +87,6 @@ import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.core.Is.is;
import static org.mockito.Matchers.anyObject;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class TextFieldMapperTests extends FieldMapperTestCase2<TextFieldMapper.Builder> {
@ -615,16 +611,6 @@ public class TextFieldMapperTests extends FieldMapperTestCase2<TextFieldMapper.B
}
}
private QueryShardContext createQueryShardContext(MapperService mapperService) {
QueryShardContext queryShardContext = mock(QueryShardContext.class);
when(queryShardContext.getMapperService()).thenReturn(mapperService);
when(queryShardContext.fieldMapper(anyString())).thenAnswer(inv -> mapperService.fieldType(inv.getArguments()[0].toString()));
when(queryShardContext.getIndexAnalyzers()).thenReturn(mapperService.getIndexAnalyzers());
when(queryShardContext.getSearchQuoteAnalyzer(anyObject())).thenCallRealMethod();
when(queryShardContext.getSearchAnalyzer(anyObject())).thenCallRealMethod();
return queryShardContext;
}
public void testFastPhraseMapping() throws IOException {
MapperService mapperService = createMapperService(mapping(b -> {
b.startObject("field").field("type", "text").field("analyzer", "my_stop_analyzer").field("index_phrases", true).endObject();

View File

@ -42,6 +42,7 @@ import org.elasticsearch.index.analysis.AnalyzerScope;
import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.MapperService.MergeReason;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.indices.IndicesModule;
import org.elasticsearch.indices.mapper.MapperRegistry;
@ -61,6 +62,8 @@ import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonMap;
import static java.util.stream.Collectors.toList;
import static org.hamcrest.Matchers.containsString;
import static org.mockito.Matchers.anyObject;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@ -170,6 +173,20 @@ public abstract class MapperTestCase extends ESTestCase {
});
}
QueryShardContext createQueryShardContext(MapperService mapperService) {
QueryShardContext queryShardContext = mock(QueryShardContext.class);
when(queryShardContext.getMapperService()).thenReturn(mapperService);
when(queryShardContext.fieldMapper(anyString())).thenAnswer(inv -> mapperService.fieldType(inv.getArguments()[0].toString()));
when(queryShardContext.getIndexAnalyzers()).thenReturn(mapperService.getIndexAnalyzers());
when(queryShardContext.getSearchQuoteAnalyzer(anyObject())).thenCallRealMethod();
when(queryShardContext.getSearchAnalyzer(anyObject())).thenCallRealMethod();
when(queryShardContext.getIndexSettings()).thenReturn(mapperService.getIndexSettings());
when(queryShardContext.simpleMatchToIndexNames(anyObject())).thenAnswer(
inv -> mapperService.simpleMatchToFullName(inv.getArguments()[0].toString())
);
return queryShardContext;
}
protected abstract void minimalMapping(XContentBuilder b) throws IOException;
public final void testEmptyName() throws IOException {
@ -204,35 +221,39 @@ public abstract class MapperTestCase extends ESTestCase {
return true;
}
protected void metaMapping(XContentBuilder b) throws IOException {
minimalMapping(b);
}
public final void testMeta() throws IOException {
assumeTrue("Field doesn't support meta", supportsMeta());
XContentBuilder mapping = fieldMapping(
b -> {
minimalMapping(b);
metaMapping(b);
b.field("meta", Collections.singletonMap("foo", "bar"));
}
);
MapperService mapperService = createMapperService(mapping);
assertEquals(
XContentHelper.convertToMap(BytesReference.bytes(mapping), false, mapping.contentType()),
XContentHelper.convertToMap(mapperService.documentMapper().mappingSource().uncompressed(), false, mapping.contentType())
XContentHelper.convertToMap(BytesReference.bytes(mapping), false, mapping.contentType()).v2(),
XContentHelper.convertToMap(mapperService.documentMapper().mappingSource().uncompressed(), false, mapping.contentType()).v2()
);
mapping = fieldMapping(this::minimalMapping);
mapping = fieldMapping(this::metaMapping);
merge(mapperService, mapping);
assertEquals(
XContentHelper.convertToMap(BytesReference.bytes(mapping), false, mapping.contentType()),
XContentHelper.convertToMap(mapperService.documentMapper().mappingSource().uncompressed(), false, mapping.contentType())
XContentHelper.convertToMap(BytesReference.bytes(mapping), false, mapping.contentType()).v2(),
XContentHelper.convertToMap(mapperService.documentMapper().mappingSource().uncompressed(), false, mapping.contentType()).v2()
);
mapping = fieldMapping(b -> {
minimalMapping(b);
metaMapping(b);
b.field("meta", Collections.singletonMap("baz", "quux"));
});
merge(mapperService, mapping);
assertEquals(
XContentHelper.convertToMap(BytesReference.bytes(mapping), false, mapping.contentType()),
XContentHelper.convertToMap(mapperService.documentMapper().mappingSource().uncompressed(), false, mapping.contentType())
XContentHelper.convertToMap(BytesReference.bytes(mapping), false, mapping.contentType()).v2(),
XContentHelper.convertToMap(mapperService.documentMapper().mappingSource().uncompressed(), false, mapping.contentType()).v2()
);
}