Migrate server mapper tests to new MapperTestCase (#61378) (#61490)

This continues #61301, migrating all of the mappers in `server` to the
new `MapperTestCase` which is nicer than `FieldMapperTestCase` because
it doesn't depend on all of Elasticsearch.
This commit is contained in:
Nik Everett 2020-08-24 13:33:35 -04:00 committed by GitHub
parent 17b5a0d25e
commit f3b6d49ae1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 1064 additions and 2537 deletions

View File

@ -280,6 +280,10 @@ public class LegacyGeoShapeFieldMapper extends AbstractShapeGeometryFieldMapper<
@Override
public LegacyGeoShapeFieldMapper build(BuilderContext context) {
if (name.isEmpty()) {
// Check for an empty name early so we can throw a consistent error message
throw new IllegalArgumentException("name cannot be empty string");
}
return new LegacyGeoShapeFieldMapper(name, fieldType, buildFieldType(context), ignoreMalformed(context),
coerce(context), orientation(), ignoreZValue(), context.indexSettings(),
multiFieldsBuilder.build(this, context), copyTo);

View File

@ -18,48 +18,33 @@
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.List;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.InternalSettingsPlugin;
import org.elasticsearch.test.geo.RandomGeoGenerator;
import org.hamcrest.CoreMatchers;
import java.io.IOException;
import java.util.Collection;
import java.util.Map;
import java.util.Set;
import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.geometry.utils.Geohash.stringEncode;
import static org.elasticsearch.index.mapper.AbstractGeometryFieldMapper.Names.IGNORE_MALFORMED;
import static org.elasticsearch.index.mapper.AbstractGeometryFieldMapper.Names.IGNORE_Z_VALUE;
import static org.elasticsearch.index.mapper.AbstractPointGeometryFieldMapper.Names.NULL_VALUE;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.hamcrest.Matchers.arrayWithSize;
import static org.hamcrest.Matchers.both;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasToString;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
public class GeoPointFieldMapperTests extends FieldMapperTestCase<GeoPointFieldMapper.Builder> {
public class GeoPointFieldMapperTests extends FieldMapperTestCase2<GeoPointFieldMapper.Builder> {
@Override
protected Set<String> unsupportedProperties() {
@ -67,378 +52,177 @@ public class GeoPointFieldMapperTests extends FieldMapperTestCase<GeoPointFieldM
}
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return pluginList(InternalSettingsPlugin.class);
protected void minimalMapping(XContentBuilder b) throws IOException {
b.field("type", "geo_point");
}
public void testGeoHashValue() throws Exception {
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point");
String mapping = Strings.toString(xContentBuilder.endObject().endObject().endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("point", stringEncode(1.3, 1.2))
.endObject()),
XContentType.JSON));
assertThat(doc.rootDoc().getField("point"), notNullValue());
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
ParsedDocument doc = mapper.parse(source(b -> b.field("field", stringEncode(1.3, 1.2))));
assertThat(doc.rootDoc().getField("field"), notNullValue());
}
public void testWKT() throws Exception {
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point");
String mapping = Strings.toString(xContentBuilder.endObject().endObject().endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("point", "POINT (2 3)")
.endObject()),
XContentType.JSON));
assertThat(doc.rootDoc().getField("point"), notNullValue());
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
ParsedDocument doc = mapper.parse(source(b -> b.field("field", "POINT (2 3)")));
assertThat(doc.rootDoc().getField("field"), notNullValue());
}
public void testLatLonValuesStored() throws Exception {
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point");
String mapping = Strings.toString(xContentBuilder.field("store", true).endObject().endObject().endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.startObject("point").field("lat", 1.2).field("lon", 1.3).endObject()
.endObject()),
XContentType.JSON));
assertThat(doc.rootDoc().getField("point"), notNullValue());
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "geo_point").field("store", true)));
ParsedDocument doc = mapper.parse(source(b -> b.startObject("field").field("lat", 1.2).field("lon", 1.3).endObject()));
assertThat(doc.rootDoc().getField("field"), notNullValue());
}
public void testArrayLatLonValues() throws Exception {
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("doc_values", false);
String mapping = Strings.toString(xContentBuilder.field("store", true).endObject().endObject().endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.startArray("point")
.startObject().field("lat", 1.2).field("lon", 1.3).endObject()
.startObject().field("lat", 1.4).field("lon", 1.5).endObject()
.endArray()
.endObject()),
XContentType.JSON));
DocumentMapper mapper = createDocumentMapper(
fieldMapping(b -> b.field("type", "geo_point").field("doc_values", false).field("store", true))
);
ParsedDocument doc = mapper.parse(source(b -> {
b.startArray("field");
b.startObject().field("lat", 1.2).field("lon", 1.3).endObject();
b.startObject().field("lat", 1.4).field("lon", 1.5).endObject();
b.endArray();
}));
// doc values are enabled by default, but in this test we disable them; we should only have 2 points
assertThat(doc.rootDoc().getFields("point"), notNullValue());
assertThat(doc.rootDoc().getFields("point").length, equalTo(4));
assertThat(doc.rootDoc().getFields("field"), notNullValue());
assertThat(doc.rootDoc().getFields("field").length, equalTo(4));
}
public void testLatLonInOneValue() throws Exception {
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point");
String mapping = Strings.toString(xContentBuilder.endObject().endObject().endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("point", "1.2,1.3")
.endObject()),
XContentType.JSON));
assertThat(doc.rootDoc().getField("point"), notNullValue());
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
ParsedDocument doc = mapper.parse(source(b -> b.field("field", "1.2,1.3")));
assertThat(doc.rootDoc().getField("field"), notNullValue());
}
public void testLatLonStringWithZValue() throws Exception {
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point")
.field(IGNORE_Z_VALUE.getPreferredName(), true);
String mapping = Strings.toString(xContentBuilder.endObject().endObject().endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("point", "1.2,1.3,10.0")
.endObject()),
XContentType.JSON));
assertThat(doc.rootDoc().getField("point"), notNullValue());
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "geo_point").field("ignore_z_value", true)));
ParsedDocument doc = mapper.parse(source(b -> b.field("field", "1.2,1.3,10.0")));
assertThat(doc.rootDoc().getField("field"), notNullValue());
}
public void testLatLonStringWithZValueException() throws Exception {
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point")
.field(IGNORE_Z_VALUE.getPreferredName(), false);
String mapping = Strings.toString(xContentBuilder.endObject().endObject().endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
SourceToParse source = new SourceToParse("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("point", "1.2,1.3,10.0")
.endObject()),
XContentType.JSON);
Exception e = expectThrows(MapperParsingException.class, () -> defaultMapper.parse(source));
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "geo_point").field("ignore_z_value", false)));
Exception e = expectThrows(MapperParsingException.class, () -> mapper.parse(source(b -> b.field("field", "1.2,1.3,10.0"))));
assertThat(e.getCause().getMessage(), containsString("but [ignore_z_value] parameter is [false]"));
}
public void testLatLonInOneValueStored() throws Exception {
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point");
String mapping = Strings.toString(xContentBuilder.field("store", true).endObject().endObject().endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("point", "1.2,1.3")
.endObject()),
XContentType.JSON));
assertThat(doc.rootDoc().getField("point"), notNullValue());
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "geo_point").field("store", true)));
ParsedDocument doc = mapper.parse(source(b -> b.field("field", "1.2,1.3")));
assertThat(doc.rootDoc().getField("field"), notNullValue());
}
public void testLatLonInOneValueArray() throws Exception {
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("doc_values", false);
String mapping = Strings.toString(xContentBuilder.field("store", true).endObject().endObject().endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.startArray("point")
.value("1.2,1.3")
.value("1.4,1.5")
.endArray()
.endObject()),
XContentType.JSON));
DocumentMapper mapper = createDocumentMapper(
fieldMapping(b -> b.field("type", "geo_point").field("doc_values", false).field("store", true))
);
ParsedDocument doc = mapper.parse(source(b -> b.startArray("field").value("1.2,1.3").value("1.4,1.5").endArray()));
// doc values are enabled by default, but in this test we disable them; we should only have 2 points
IndexableField[] fields = doc.rootDoc().getFields("point");
assertThat(doc.rootDoc().getFields("point"), notNullValue());
assertThat(doc.rootDoc().getFields("point").length, equalTo(4));
assertThat(doc.rootDoc().getFields("field"), notNullValue());
assertThat(doc.rootDoc().getFields("field"), arrayWithSize(4));
}
public void testLonLatArray() throws Exception {
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point");
String mapping = Strings.toString(xContentBuilder.endObject().endObject().endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.startArray("point").value(1.3).value(1.2).endArray()
.endObject()),
XContentType.JSON));
assertThat(doc.rootDoc().getField("point"), notNullValue());
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
ParsedDocument doc = mapper.parse(source(b -> b.startArray("field").value(1.3).value(1.2).endArray()));
assertThat(doc.rootDoc().getField("field"), notNullValue());
}
public void testLonLatArrayDynamic() throws Exception {
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
.startArray("dynamic_templates").startObject().startObject("point").field("match", "point*")
.startObject("mapping").field("type", "geo_point");
String mapping = Strings.toString(xContentBuilder.endObject().endObject().endObject().endArray().endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.startArray("point").value(1.3).value(1.2).endArray()
.endObject()),
XContentType.JSON));
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("_doc").startArray("dynamic_templates");
{
mapping.startObject().startObject("point");
{
mapping.field("match", "point*");
mapping.startObject("mapping").field("type", "geo_point").endObject();
}
mapping.endObject().endObject();
}
mapping.endArray().endObject().endObject();
DocumentMapper mapper = createDocumentMapper(mapping);
ParsedDocument doc = mapper.parse(source(b -> b.startArray("point").value(1.3).value(1.2).endArray()));
assertThat(doc.rootDoc().getField("point"), notNullValue());
}
public void testLonLatArrayStored() throws Exception {
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point");
String mapping = Strings.toString(xContentBuilder.field("store", true).endObject().endObject().endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.startArray("point").value(1.3).value(1.2).endArray()
.endObject()),
XContentType.JSON));
assertThat(doc.rootDoc().getField("point"), notNullValue());
assertThat(doc.rootDoc().getFields("point").length, equalTo(3));
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "geo_point").field("store", true)));
ParsedDocument doc = mapper.parse(source(b -> b.startArray("field").value(1.3).value(1.2).endArray()));
assertThat(doc.rootDoc().getFields("field").length, equalTo(3));
}
public void testLonLatArrayArrayStored() throws Exception {
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point");
String mapping = Strings.toString(xContentBuilder.field("store", true)
.field("doc_values", false).endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.startArray("point")
.startArray().value(1.3).value(1.2).endArray()
.startArray().value(1.5).value(1.4).endArray()
.endArray()
.endObject()),
XContentType.JSON));
assertThat(doc.rootDoc().getFields("point"), notNullValue());
assertThat(doc.rootDoc().getFields("point").length, CoreMatchers.equalTo(4));
DocumentMapper mapper = createDocumentMapper(
fieldMapping(b -> b.field("type", "geo_point").field("store", true).field("doc_values", false))
);
ParsedDocument doc = mapper.parse(source(b -> {
b.startArray("field");
b.startArray().value(1.3).value(1.2).endArray();
b.startArray().value(1.5).value(1.4).endArray();
b.endArray();
}));
assertThat(doc.rootDoc().getFields("field"), notNullValue());
assertThat(doc.rootDoc().getFields("field").length, CoreMatchers.equalTo(4));
}
/**
* Test that accept_z_value parameter correctly parses
*/
public void testIgnoreZValue() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_point")
.field(IGNORE_Z_VALUE.getPreferredName(), "true")
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("location");
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "geo_point").field("ignore_z_value", true)));
Mapper fieldMapper = mapper.mappers().getMapper("field");
assertThat(fieldMapper, instanceOf(GeoPointFieldMapper.class));
boolean ignoreZValue = ((GeoPointFieldMapper)fieldMapper).ignoreZValue().value();
assertThat(ignoreZValue, equalTo(true));
// explicit false accept_z_value test
mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_point")
.field(IGNORE_Z_VALUE.getPreferredName(), "false")
.endObject().endObject()
.endObject().endObject());
defaultMapper = createIndex("test2").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
fieldMapper = defaultMapper.mappers().getMapper("location");
mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "geo_point").field("ignore_z_value", false)));
fieldMapper = mapper.mappers().getMapper("field");
assertThat(fieldMapper, instanceOf(GeoPointFieldMapper.class));
ignoreZValue = ((GeoPointFieldMapper)fieldMapper).ignoreZValue().value();
assertThat(ignoreZValue, equalTo(false));
}
public void testMultiField() throws Exception {
int numDocs = randomIntBetween(10, 100);
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("pin")
.startObject("properties").startObject("location")
.field("type", "geo_point")
.startObject("fields")
.startObject("geohash").field("type", "keyword").endObject() // test geohash as keyword
.startObject("latlon").field("type", "keyword").endObject() // test geohash as string
.endObject()
.endObject().endObject().endObject().endObject());
CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("test")
.addMapping("pin", mapping, XContentType.JSON);
mappingRequest.execute().actionGet();
// create index and add random test points
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();
for (int i=0; i<numDocs; ++i) {
final GeoPoint pt = RandomGeoGenerator.randomPoint(random());
client().prepareIndex("test", "pin").setSource(jsonBuilder().startObject()
.startObject("location").field("lat", pt.lat())
.field("lon", pt.lon()).endObject().endObject()).setRefreshPolicy(IMMEDIATE).get();
}
// TODO these tests are bogus and need to be Fix
// query by geohash subfield
SearchResponse searchResponse = client().prepareSearch().addStoredField("location.geohash")
.setQuery(matchAllQuery()).execute().actionGet();
assertEquals(numDocs, searchResponse.getHits().getTotalHits().value);
// query by latlon subfield
searchResponse = client().prepareSearch().addStoredField("location.latlon").setQuery(matchAllQuery()).execute().actionGet();
assertEquals(numDocs, searchResponse.getHits().getTotalHits().value);
}
public void testEmptyName() throws Exception {
// after 5.x
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("").field("type", "geo_point").endObject().endObject()
.endObject().endObject());
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> parser.parse("type", new CompressedXContent(mapping))
);
assertThat(e.getMessage(), containsString("name cannot be empty string"));
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> {
b.field("type", "geo_point").field("doc_values", false);
b.startObject("fields");
{
b.startObject("geohash").field("type", "keyword").field("doc_values", false).endObject(); // test geohash as keyword
b.startObject("latlon").field("type", "text").field("doc_values", false).endObject(); // test geohash as text
}
b.endObject();
}));
ParseContext.Document doc = mapper.parse(source(b -> b.field("field", "POINT (2 3)"))).rootDoc();
assertThat(doc.getFields("field"), arrayWithSize(1));
assertThat(doc.getField("field"), hasToString(both(containsString("field:2.999")).and(containsString("1.999"))));
assertThat(doc.getFields("field.geohash"), arrayWithSize(1));
assertThat(doc.getField("field.geohash").binaryValue().utf8ToString(), equalTo("s093jd0k72s1"));
assertThat(doc.getFields("field.latlon"), arrayWithSize(1));
assertThat(doc.getField("field.latlon").stringValue(), equalTo("s093jd0k72s1"));
}
public void testNullValue() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("location")
.field("type", "geo_point")
.field(NULL_VALUE.getPreferredName(), "1,2")
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("location");
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "geo_point").field("null_value", "1,2")));
Mapper fieldMapper = mapper.mappers().getMapper("field");
assertThat(fieldMapper, instanceOf(GeoPointFieldMapper.class));
AbstractPointGeometryFieldMapper.ParsedPoint nullValue = ((GeoPointFieldMapper) fieldMapper).nullValue;
assertThat(nullValue, equalTo(new GeoPoint(1, 2)));
ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.nullField("location")
.endObject()),
XContentType.JSON));
ParsedDocument doc = mapper.parse(source(b -> b.nullField("field")));
assertThat(doc.rootDoc().getField("field"), notNullValue());
BytesRef defaultValue = doc.rootDoc().getBinaryValue("field");
assertThat(doc.rootDoc().getField("location"), notNullValue());
BytesRef defaultValue = doc.rootDoc().getBinaryValue("location");
doc = defaultMapper.parse(new SourceToParse("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("location", "1, 2")
.endObject()),
XContentType.JSON));
// Shouldn't matter if we specify the value explicitly or use null value
assertThat(defaultValue, equalTo(doc.rootDoc().getBinaryValue("location")));
doc = mapper.parse(source(b -> b.field("field", "1, 2")));
assertThat(defaultValue, equalTo(doc.rootDoc().getBinaryValue("field")));
doc = defaultMapper.parse(new SourceToParse("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("location", "3, 4")
.endObject()),
XContentType.JSON));
// Shouldn't matter if we specify the value explicitly or use null value
assertThat(defaultValue, not(equalTo(doc.rootDoc().getBinaryValue("location"))));
doc = mapper.parse(source(b -> b.field("field", "3, 4")));
assertThat(defaultValue, not(equalTo(doc.rootDoc().getBinaryValue("field"))));
}
/**
@ -447,19 +231,15 @@ public class GeoPointFieldMapperTests extends FieldMapperTestCase<GeoPointFieldM
*/
public void testNullValueWithIgnoreMalformed() throws Exception {
// Set ignore_z_value = false and ignore_malformed = true and test that a malformed point for null_value is normalized.
String mapping = Strings.toString(XContentFactory.jsonBuilder()
.startObject().startObject("type")
.startObject("properties").startObject("location")
.field("type", "geo_point")
.field(IGNORE_Z_VALUE.getPreferredName(), false)
.field(IGNORE_MALFORMED.getPreferredName(), true)
.field(NULL_VALUE.getPreferredName(), "91,181")
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("location");
DocumentMapper mapper = createDocumentMapper(
fieldMapping(
b -> b.field("type", "geo_point")
.field("ignore_z_value", false)
.field("ignore_malformed", true)
.field("null_value", "91,181")
)
);
Mapper fieldMapper = mapper.mappers().getMapper("field");
assertThat(fieldMapper, instanceOf(GeoPointFieldMapper.class));
AbstractPointGeometryFieldMapper.ParsedPoint nullValue = ((GeoPointFieldMapper) fieldMapper).nullValue;
@ -468,128 +248,55 @@ public class GeoPointFieldMapperTests extends FieldMapperTestCase<GeoPointFieldM
}
public void testInvalidGeohashIgnored() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("location")
.field("type", "geo_point")
.field("ignore_malformed", "true")
.endObject()
.endObject().endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("location", "1234.333")
.endObject()),
XContentType.JSON));
assertThat(doc.rootDoc().getField("location"), nullValue());
DocumentMapper mapper = createDocumentMapper(
fieldMapping(b -> b.field("type", "geo_point").field("ignore_malformed", "true"))
);
ParsedDocument doc = mapper.parse(source(b -> b.field("field", "1234.333")));
assertThat(doc.rootDoc().getField("field"), nullValue());
}
public void testInvalidGeohashNotIgnored() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("location")
.field("type", "geo_point")
.endObject()
.endObject().endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
MapperParsingException ex = expectThrows(MapperParsingException.class,
() -> defaultMapper.parse(new SourceToParse("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("location", "1234.333")
.endObject()),
XContentType.JSON)));
assertThat(ex.getMessage(), equalTo("failed to parse field [location] of type [geo_point]"));
assertThat(ex.getRootCause().getMessage(), equalTo("unsupported symbol [.] in geohash [1234.333]"));
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
MapperParsingException e = expectThrows(
MapperParsingException.class,
() -> mapper.parse(source(b -> b.field("field", "1234.333")))
);
assertThat(e.getMessage(), containsString("failed to parse field [field] of type [geo_point]"));
assertThat(e.getRootCause().getMessage(), containsString("unsupported symbol [.] in geohash [1234.333]"));
}
public void testInvalidGeopointValuesIgnored() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("location")
.field("type", "geo_point")
.field("ignore_malformed", "true")
.endObject()
.endObject().endObject().endObject());
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "geo_point").field("ignore_malformed", "true")));
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type", new CompressedXContent(mapping));
assertThat(defaultMapper.parse(new SourceToParse("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject().field("location", "1234.333").endObject()
), XContentType.JSON)).rootDoc().getField("location"), nullValue());
assertThat(defaultMapper.parse(new SourceToParse("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject().field("lat", "-").field("lon", 1.3).endObject()
), XContentType.JSON)).rootDoc().getField("location"), nullValue());
assertThat(defaultMapper.parse(new SourceToParse("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject().field("lat", 1.3).field("lon", "-").endObject()
), XContentType.JSON)).rootDoc().getField("location"), nullValue());
assertThat(defaultMapper.parse(new SourceToParse("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject().field("location", "-,1.3").endObject()
), XContentType.JSON)).rootDoc().getField("location"), nullValue());
assertThat(defaultMapper.parse(new SourceToParse("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject().field("location", "1.3,-").endObject()
), XContentType.JSON)).rootDoc().getField("location"), nullValue());
assertThat(defaultMapper.parse(new SourceToParse("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject().field("lat", "NaN").field("lon", "NaN").endObject()
), XContentType.JSON)).rootDoc().getField("location"), nullValue());
assertThat(defaultMapper.parse(new SourceToParse("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject().field("lat", 12).field("lon", "NaN").endObject()
), XContentType.JSON)).rootDoc().getField("location"), nullValue());
assertThat(defaultMapper.parse(new SourceToParse("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject().field("lat", "NaN").field("lon", 10).endObject()
), XContentType.JSON)).rootDoc().getField("location"), nullValue());
assertThat(defaultMapper.parse(new SourceToParse("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject().field("location", "NaN,NaN").endObject()
), XContentType.JSON)).rootDoc().getField("location"), nullValue());
assertThat(defaultMapper.parse(new SourceToParse("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject().field("location", "10,NaN").endObject()
), XContentType.JSON)).rootDoc().getField("location"), nullValue());
assertThat(defaultMapper.parse(new SourceToParse("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject().field("location", "NaN,12").endObject()
), XContentType.JSON)).rootDoc().getField("location"), nullValue());
assertThat(defaultMapper.parse(new SourceToParse("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject().startObject("location").nullField("lat").field("lon", 1).endObject().endObject()
), XContentType.JSON)).rootDoc().getField("location"), nullValue());
assertThat(defaultMapper.parse(new SourceToParse("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject().startObject("location").nullField("lat").nullField("lon").endObject().endObject()
), XContentType.JSON)).rootDoc().getField("location"), nullValue());
assertThat(mapper.parse(source(b -> b.field("field", "1234.333"))).rootDoc().getField("field"), nullValue());
assertThat(
mapper.parse(source(b -> b.startObject("field").field("lat", "-").field("lon", 1.3).endObject())).rootDoc().getField("field"),
nullValue()
);
assertThat(
mapper.parse(source(b -> b.startObject("field").field("lat", 1.3).field("lon", "-").endObject())).rootDoc().getField("field"),
nullValue()
);
assertThat(mapper.parse(source(b -> b.field("field", "-,1.3"))).rootDoc().getField("field"), nullValue());
assertThat(mapper.parse(source(b -> b.field("field", "1.3,-"))).rootDoc().getField("field"), nullValue());
assertThat(
mapper.parse(source(b -> b.startObject("field").field("lat", "NaN").field("lon", 1.2).endObject())).rootDoc().getField("field"),
nullValue()
);
assertThat(
mapper.parse(source(b -> b.startObject("field").field("lat", 1.2).field("lon", "NaN").endObject())).rootDoc().getField("field"),
nullValue()
);
assertThat(mapper.parse(source(b -> b.field("field", "1.3,NaN"))).rootDoc().getField("field"), nullValue());
assertThat(mapper.parse(source(b -> b.field("field", "NaN,1.3"))).rootDoc().getField("field"), nullValue());
assertThat(
mapper.parse(source(b -> b.startObject("field").nullField("lat").field("lon", 1.2).endObject())).rootDoc().getField("field"),
nullValue()
);
assertThat(
mapper.parse(source(b -> b.startObject("field").field("lat", 1.2).nullField("lon").endObject())).rootDoc().getField("field"),
nullValue()
);
}
public void testFetchSourceValue() {

View File

@ -18,22 +18,16 @@
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.List;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.geo.builders.ShapeBuilder;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.InternalSettingsPlugin;
import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin;
import org.junit.Before;
@ -43,13 +37,12 @@ import java.util.Collections;
import java.util.Map;
import java.util.Set;
import static org.elasticsearch.index.mapper.AbstractGeometryFieldMapper.Names.IGNORE_Z_VALUE;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.instanceOf;
public class GeoShapeFieldMapperTests extends FieldMapperTestCase<GeoShapeFieldMapper.Builder> {
public class GeoShapeFieldMapperTests extends FieldMapperTestCase2<GeoShapeFieldMapper.Builder> {
@Override
protected Set<String> unsupportedProperties() {
@ -70,22 +63,19 @@ public class GeoShapeFieldMapperTests extends FieldMapperTestCase<GeoShapeFieldM
}
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return pluginList(InternalSettingsPlugin.class, TestGeoShapeFieldMapperPlugin.class);
protected Collection<? extends Plugin> getPlugins() {
return List.of(new TestGeoShapeFieldMapperPlugin());
}
@Override
protected void minimalMapping(XContentBuilder b) throws IOException {
b.field("type", "geo_shape");
}
public void testDefaultConfiguration() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("location");
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
Mapper fieldMapper = mapper.mappers().getMapper("field");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper;
assertThat(geoShapeFieldMapper.fieldType().orientation(),
equalTo(GeoShapeFieldMapper.Defaults.ORIENTATION.value()));
@ -96,16 +86,8 @@ public class GeoShapeFieldMapperTests extends FieldMapperTestCase<GeoShapeFieldM
* Test that orientation parameter correctly parses
*/
public void testOrientationParsing() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
.field("orientation", "left")
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("location");
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "geo_shape").field("orientation", "left")));
Mapper fieldMapper = mapper.mappers().getMapper("field");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
ShapeBuilder.Orientation orientation = ((GeoShapeFieldMapper)fieldMapper).fieldType().orientation();
@ -114,16 +96,8 @@ public class GeoShapeFieldMapperTests extends FieldMapperTestCase<GeoShapeFieldM
assertThat(orientation, equalTo(ShapeBuilder.Orientation.CW));
// explicit right orientation test
mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
.field("orientation", "right")
.endObject().endObject()
.endObject().endObject());
defaultMapper = createIndex("test2").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
fieldMapper = defaultMapper.mappers().getMapper("location");
mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "geo_shape").field("orientation", "right")));
fieldMapper = mapper.mappers().getMapper("field");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
orientation = ((GeoShapeFieldMapper)fieldMapper).fieldType().orientation();
@ -136,34 +110,16 @@ public class GeoShapeFieldMapperTests extends FieldMapperTestCase<GeoShapeFieldM
* Test that coerce parameter correctly parses
*/
public void testCoerceParsing() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
.field("coerce", "true")
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("location");
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "geo_shape").field("coerce", true)));
Mapper fieldMapper = mapper.mappers().getMapper("field");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
boolean coerce = ((GeoShapeFieldMapper)fieldMapper).coerce().value();
assertThat(coerce, equalTo(true));
// explicit false coerce test
mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
.field("coerce", "false")
.endObject().endObject()
.endObject().endObject());
defaultMapper = createIndex("test2").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
fieldMapper = defaultMapper.mappers().getMapper("location");
mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "geo_shape").field("coerce", false)));
fieldMapper = mapper.mappers().getMapper("field");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
coerce = ((GeoShapeFieldMapper)fieldMapper).coerce().value();
assertThat(coerce, equalTo(false));
assertFieldWarnings("tree");
@ -174,32 +130,16 @@ public class GeoShapeFieldMapperTests extends FieldMapperTestCase<GeoShapeFieldM
* Test that accept_z_value parameter correctly parses
*/
public void testIgnoreZValue() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
.field(IGNORE_Z_VALUE.getPreferredName(), "true")
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("location");
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "geo_shape").field("ignore_z_value", true)));
Mapper fieldMapper = mapper.mappers().getMapper("field");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
boolean ignoreZValue = ((GeoShapeFieldMapper)fieldMapper).ignoreZValue().value();
assertThat(ignoreZValue, equalTo(true));
// explicit false accept_z_value test
mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
.field(IGNORE_Z_VALUE.getPreferredName(), "false")
.endObject().endObject()
.endObject().endObject());
defaultMapper = createIndex("test2").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
fieldMapper = defaultMapper.mappers().getMapper("location");
mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "geo_shape").field("ignore_z_value", false)));
fieldMapper = mapper.mappers().getMapper("field");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
ignoreZValue = ((GeoShapeFieldMapper)fieldMapper).ignoreZValue().value();
@ -210,34 +150,16 @@ public class GeoShapeFieldMapperTests extends FieldMapperTestCase<GeoShapeFieldM
* Test that ignore_malformed parameter correctly parses
*/
public void testIgnoreMalformedParsing() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
.field("ignore_malformed", "true")
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("location");
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "geo_shape").field("ignore_malformed", true)));
Mapper fieldMapper = mapper.mappers().getMapper("field");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
Explicit<Boolean> ignoreMalformed = ((GeoShapeFieldMapper)fieldMapper).ignoreMalformed();
assertThat(ignoreMalformed.value(), equalTo(true));
// explicit false ignore_malformed test
mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
.field("ignore_malformed", "false")
.endObject().endObject()
.endObject().endObject());
defaultMapper = createIndex("test2").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
fieldMapper = defaultMapper.mappers().getMapper("location");
mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "geo_shape").field("ignore_malformed", false)));
fieldMapper = mapper.mappers().getMapper("field");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
ignoreMalformed = ((GeoShapeFieldMapper)fieldMapper).ignoreMalformed();
assertThat(ignoreMalformed.explicit(), equalTo(true));
assertThat(ignoreMalformed.value(), equalTo(false));
@ -253,115 +175,43 @@ public class GeoShapeFieldMapperTests extends FieldMapperTestCase<GeoShapeFieldM
}
public void testGeoShapeMapperMerge() throws Exception {
String stage1Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("shape").field("type", "geo_shape")
.field("orientation", "ccw")
.endObject().endObject().endObject().endObject());
MapperService mapperService = createIndex("test").mapperService();
DocumentMapper docMapper = mapperService.merge("type", new CompressedXContent(stage1Mapping),
MapperService.MergeReason.MAPPING_UPDATE);
// verify nothing changed
Mapper fieldMapper = docMapper.mappers().getMapper("shape");
MapperService mapperService = createMapperService(fieldMapping(b -> b.field("type", "geo_shape").field("orientation", "ccw")));
Mapper fieldMapper = mapperService.documentMapper().mappers().getMapper("field");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper;
assertThat(geoShapeFieldMapper.fieldType().orientation(), equalTo(ShapeBuilder.Orientation.CCW));
// change mapping; orientation
String stage2Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("shape").field("type", "geo_shape")
.field("orientation", "cw").endObject().endObject().endObject().endObject());
docMapper = mapperService.merge("type", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE);
fieldMapper = docMapper.mappers().getMapper("shape");
merge(mapperService, fieldMapping(b -> b.field("type", "geo_shape").field("orientation", "cw")));
fieldMapper = mapperService.documentMapper().mappers().getMapper("field");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper;
assertThat(geoShapeFieldMapper.fieldType().orientation(), equalTo(ShapeBuilder.Orientation.CW));
}
public void testEmptyName() throws Exception {
// after 5.x
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("")
.field("type", "geo_shape")
.endObject().endObject()
.endObject().endObject());
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> parser.parse("type1", new CompressedXContent(mapping))
);
assertThat(e.getMessage(), containsString("name cannot be empty string"));
}
public void testSerializeDefaults() throws Exception {
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
{
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping));
String serialized = toXContentString((GeoShapeFieldMapper) defaultMapper.mappers().getMapper("location"));
assertTrue(serialized, serialized.contains("\"orientation\":\"" +
AbstractShapeGeometryFieldMapper.Defaults.ORIENTATION.value() + "\""));
}
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
assertThat(
Strings.toString(
mapper.mappers().getMapper("field"),
new ToXContent.MapParams(Collections.singletonMap("include_defaults", "true"))
),
containsString("\"orientation\":\"" + AbstractShapeGeometryFieldMapper.Defaults.ORIENTATION.value() + "\"")
);
}
public void testGeoShapeArrayParsing() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder()
.startObject()
.startObject("_doc")
.startObject("properties")
.startObject("location")
.field("type", "geo_shape")
.endObject()
.endObject()
.endObject()
.endObject());
DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser()
.parse("_doc", new CompressedXContent(mapping));
BytesReference arrayedDoc = BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.startArray("shape")
.startObject()
.field("type", "Point")
.startArray("coordinates").value(176.0).value(15.0).endArray()
.endObject()
.startObject()
.field("type", "Point")
.startArray("coordinates").value(76.0).value(-15.0).endArray()
.endObject()
.endArray()
.endObject()
);
SourceToParse sourceToParse = new SourceToParse("test", "_doc", "1", arrayedDoc, XContentType.JSON);
ParsedDocument document = mapper.parse(sourceToParse);
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
ParsedDocument document = mapper.parse(source(b -> {
b.startArray("field");
{
b.startObject().field("type", "Point").startArray("coordinates").value(176.0).value(15.0).endArray().endObject();
b.startObject().field("type", "Point").startArray("coordinates").value(76.0).value(-15.0).endArray().endObject();
}
b.endArray();
}));
assertThat(document.docs(), hasSize(1));
IndexableField[] fields = document.docs().get(0).getFields("shape.type");
assertThat(fields.length, equalTo(2));
}
public String toXContentString(GeoShapeFieldMapper mapper, boolean includeDefaults) throws IOException {
XContentBuilder builder = XContentFactory.jsonBuilder().startObject();
ToXContent.Params params;
if (includeDefaults) {
params = new ToXContent.MapParams(Collections.singletonMap("include_defaults", "true"));
} else {
params = ToXContent.EMPTY_PARAMS;
}
mapper.doXContentBody(builder, includeDefaults, params);
return Strings.toString(builder.endObject());
}
public String toXContentString(GeoShapeFieldMapper mapper) throws IOException {
return toXContentString(mapper, true);
assertThat(document.docs().get(0).getFields("field").length, equalTo(2));
}
public void testFetchSourceValue() {
@ -396,4 +246,9 @@ public class GeoShapeFieldMapperTests extends FieldMapperTestCase<GeoShapeFieldM
assertEquals(List.of(jsonLineString, jsonPoint), fetchSourceValue(mapper, sourceValue, null));
assertEquals(List.of(wktLineString, wktPoint), fetchSourceValue(mapper, sourceValue, "wkt"));
}
@Override
protected boolean supportsMeta() {
return false;
}
}

View File

@ -34,11 +34,9 @@ import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AnalyzerScope;
import org.elasticsearch.index.analysis.CharFilterFactory;
import org.elasticsearch.index.analysis.CustomAnalyzer;
@ -62,7 +60,6 @@ import java.util.Map;
import static java.util.Collections.singletonList;
import static java.util.Collections.singletonMap;
import static org.apache.lucene.analysis.BaseTokenStreamTestCase.assertTokenStreamContents;
import static org.elasticsearch.index.mapper.FieldMapperTestCase.fetchSourceValue;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
@ -96,7 +93,7 @@ public class KeywordFieldMapperTests extends MapperTestCase {
}
@Override
protected IndexAnalyzers createIndexAnalyzers() {
protected IndexAnalyzers createIndexAnalyzers(IndexSettings indexSettings) {
return new IndexAnalyzers(
singletonMap("default", new NamedAnalyzer("default", AnalyzerScope.INDEX, new StandardAnalyzer())),
org.elasticsearch.common.collect.Map.of(
@ -139,16 +136,7 @@ public class KeywordFieldMapperTests extends MapperTestCase {
DocumentMapper mapper = createDocumentMapper(mapping);
assertEquals(Strings.toString(mapping), mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(
new SourceToParse(
"test",
"_doc",
"1",
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "1234").endObject()),
XContentType.JSON
)
);
ParsedDocument doc = mapper.parse(source(b -> b.field("field", "1234")));
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(2, fields.length);
@ -176,69 +164,25 @@ public class KeywordFieldMapperTests extends MapperTestCase {
public void testIgnoreAbove() throws IOException {
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "keyword").field("ignore_above", 5)));
ParsedDocument doc = mapper.parse(
new SourceToParse(
"test",
"_doc",
"1",
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "elk").endObject()),
XContentType.JSON
)
);
ParsedDocument doc = mapper.parse(source(b -> b.field("field", "elk")));
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(2, fields.length);
doc = mapper.parse(
new SourceToParse(
"test",
"_doc",
"1",
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "elasticsearch").endObject()),
XContentType.JSON
)
);
doc = mapper.parse(source(b -> b.field("field", "elasticsearch")));
fields = doc.rootDoc().getFields("field");
assertEquals(0, fields.length);
}
public void testNullValue() throws IOException {
DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping));
ParsedDocument doc = mapper.parse(
new SourceToParse(
"test",
"_doc",
"1",
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().nullField("field").endObject()),
XContentType.JSON
)
);
ParsedDocument doc = mapper.parse(source(b -> b.nullField("field")));
assertArrayEquals(new IndexableField[0], doc.rootDoc().getFields("field"));
mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "keyword").field("null_value", "uri")));
doc = mapper.parse(
new SourceToParse(
"test",
"_doc",
"1",
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().endObject()),
XContentType.JSON
)
);
doc = mapper.parse(source(b -> {}));
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(0, fields.length);
doc = mapper.parse(
new SourceToParse(
"test",
"_doc",
"1",
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().nullField("field").endObject()),
XContentType.JSON
)
);
doc = mapper.parse(source(b -> b.nullField("field")));
fields = doc.rootDoc().getFields("field");
assertEquals(2, fields.length);
assertEquals(new BytesRef("uri"), fields[0].binaryValue());
@ -246,16 +190,7 @@ public class KeywordFieldMapperTests extends MapperTestCase {
public void testEnableStore() throws IOException {
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "keyword").field("store", true)));
ParsedDocument doc = mapper.parse(
new SourceToParse(
"test",
"_doc",
"1",
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "1234").endObject()),
XContentType.JSON
)
);
ParsedDocument doc = mapper.parse(source(b -> b.field("field", "1234")));
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(2, fields.length);
assertTrue(fields[0].fieldType().stored());
@ -263,16 +198,7 @@ public class KeywordFieldMapperTests extends MapperTestCase {
public void testDisableIndex() throws IOException {
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "keyword").field("index", false)));
ParsedDocument doc = mapper.parse(
new SourceToParse(
"test",
"_doc",
"1",
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "1234").endObject()),
XContentType.JSON
)
);
ParsedDocument doc = mapper.parse(source(b -> b.field("field", "1234")));
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(1, fields.length);
assertEquals(IndexOptions.NONE, fields[0].fieldType().indexOptions());
@ -281,16 +207,7 @@ public class KeywordFieldMapperTests extends MapperTestCase {
public void testDisableDocValues() throws IOException {
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "keyword").field("doc_values", false)));
ParsedDocument doc = mapper.parse(
new SourceToParse(
"test",
"_doc",
"1",
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "1234").endObject()),
XContentType.JSON
)
);
ParsedDocument doc = mapper.parse(source(b -> b.field("field", "1234")));
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(1, fields.length);
assertEquals(DocValuesType.NONE, fields[0].fieldType().docValuesType());
@ -298,16 +215,7 @@ public class KeywordFieldMapperTests extends MapperTestCase {
public void testIndexOptions() throws IOException {
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "keyword").field("index_options", "freqs")));
ParsedDocument doc = mapper.parse(
new SourceToParse(
"test",
"_doc",
"1",
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "1234").endObject()),
XContentType.JSON
)
);
ParsedDocument doc = mapper.parse(source(b -> b.field("field", "1234")));
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(2, fields.length);
assertEquals(IndexOptions.DOCS_AND_FREQS, fields[0].fieldType().indexOptions());
@ -333,16 +241,7 @@ public class KeywordFieldMapperTests extends MapperTestCase {
DocumentMapper mapper = createDocumentMapper(
fieldMapping(b -> b.field("type", "keyword").field("doc_values", false).field("norms", true))
);
ParsedDocument doc = mapper.parse(
new SourceToParse(
"test",
"_doc",
"1",
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "1234").endObject()),
XContentType.JSON
)
);
ParsedDocument doc = mapper.parse(source(b -> b.field("field", "1234")));
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(1, fields.length);
assertFalse(fields[0].fieldType().omitNorms());
@ -353,15 +252,7 @@ public class KeywordFieldMapperTests extends MapperTestCase {
public void testNormalizer() throws IOException {
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "keyword").field("normalizer", "lowercase")));
ParsedDocument doc = mapper.parse(
new SourceToParse(
"test",
"_doc",
"1",
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "AbC").endObject()),
XContentType.JSON
)
);
ParsedDocument doc = mapper.parse(source(b -> b.field("field", "AbC")));
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(2, fields.length);
@ -387,12 +278,9 @@ public class KeywordFieldMapperTests extends MapperTestCase {
public void testParsesKeywordNestedEmptyObjectStrict() throws IOException {
DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(this::minimalMapping));
BytesReference source = BytesReference.bytes(
XContentFactory.jsonBuilder().startObject().startObject("field").endObject().endObject()
);
MapperParsingException ex = expectThrows(
MapperParsingException.class,
() -> defaultMapper.parse(new SourceToParse("test", "_doc", "1", source, XContentType.JSON))
() -> defaultMapper.parse(source(b -> b.startObject("field").endObject()))
);
assertEquals(
"failed to parse field [field] of type [keyword] in document with id '1'. " + "Preview of field's value: '{}'",
@ -402,36 +290,33 @@ public class KeywordFieldMapperTests extends MapperTestCase {
public void testParsesKeywordNestedListStrict() throws IOException {
DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(this::minimalMapping));
BytesReference source = BytesReference.bytes(
XContentFactory.jsonBuilder()
.startObject()
.startArray("field")
.startObject()
.startArray("array_name")
.value("inner_field_first")
.value("inner_field_second")
.endArray()
.endObject()
.endArray()
.endObject());
MapperParsingException ex = expectThrows(MapperParsingException.class,
() -> defaultMapper.parse(new SourceToParse("test", "_doc", "1", source, XContentType.JSON)));
assertEquals("failed to parse field [field] of type [keyword] in document with id '1'. " +
"Preview of field's value: '{array_name=[inner_field_first, inner_field_second]}'", ex.getMessage());
MapperParsingException ex = expectThrows(MapperParsingException.class, () -> defaultMapper.parse(source(b -> {
b.startArray("field");
{
b.startObject();
{
b.startArray("array_name").value("inner_field_first").value("inner_field_second").endArray();
}
b.endObject();
}
b.endArray();
})));
assertEquals(
"failed to parse field [field] of type [keyword] in document with id '1'. "
+ "Preview of field's value: '{array_name=[inner_field_first, inner_field_second]}'",
ex.getMessage()
);
}
public void testParsesKeywordNullStrict() throws IOException {
DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(this::minimalMapping));
BytesReference source = BytesReference.bytes(
XContentFactory.jsonBuilder().startObject().startObject("field").nullField("field_name").endObject().endObject()
);
MapperParsingException ex = expectThrows(
Exception e = expectThrows(
MapperParsingException.class,
() -> defaultMapper.parse(new SourceToParse("test", "_doc", "1", source, XContentType.JSON))
() -> defaultMapper.parse(source(b -> b.startObject("field").nullField("field_name").endObject()))
);
assertEquals(
"failed to parse field [field] of type [keyword] in document with id '1'. " + "Preview of field's value: '{field_name=null}'",
ex.getMessage()
e.getMessage()
);
}
@ -523,7 +408,7 @@ public class KeywordFieldMapperTests extends MapperTestCase {
assertEquals(org.elasticsearch.common.collect.List.of("42"), fetchSourceValue(ignoreAboveMapper, 42L));
assertEquals(org.elasticsearch.common.collect.List.of("true"), fetchSourceValue(ignoreAboveMapper, true));
KeywordFieldMapper normalizerMapper = new KeywordFieldMapper.Builder("field", createIndexAnalyzers()).normalizer("lowercase")
KeywordFieldMapper normalizerMapper = new KeywordFieldMapper.Builder("field", createIndexAnalyzers(null)).normalizer("lowercase")
.build(context);
assertEquals(org.elasticsearch.common.collect.List.of("value"), fetchSourceValue(normalizerMapper, "VALUE"));
assertEquals(org.elasticsearch.common.collect.List.of("42"), fetchSourceValue(normalizerMapper, 42L));

View File

@ -24,14 +24,11 @@ import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy;
import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree;
import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.List;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.geo.ShapeRelation;
import org.elasticsearch.common.geo.SpatialStrategy;
@ -40,21 +37,18 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.geometry.Point;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.InternalSettingsPlugin;
import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin;
import org.junit.Before;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
import static org.elasticsearch.index.mapper.AbstractGeometryFieldMapper.Names.IGNORE_Z_VALUE;
import static java.util.Collections.singletonMap;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize;
@ -63,7 +57,7 @@ import static org.hamcrest.Matchers.not;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class LegacyGeoShapeFieldMapperTests extends FieldMapperTestCase<LegacyGeoShapeFieldMapper.Builder> {
public class LegacyGeoShapeFieldMapperTests extends FieldMapperTestCase2<LegacyGeoShapeFieldMapper.Builder> {
@Override
protected LegacyGeoShapeFieldMapper.Builder newBuilder() {
@ -104,23 +98,26 @@ public class LegacyGeoShapeFieldMapperTests extends FieldMapperTestCase<LegacyGe
}
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return pluginList(InternalSettingsPlugin.class, TestGeoShapeFieldMapperPlugin.class);
protected Collection<? extends Plugin> getPlugins() {
return List.of(new TestGeoShapeFieldMapperPlugin());
}
@Override
protected void minimalMapping(XContentBuilder b) throws IOException {
b.field("type", "geo_shape").field("strategy", "recursive");
}
@Override
protected boolean supportsMeta() {
return false;
}
public void testDefaultConfiguration() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
.field("strategy", "recursive")
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("location");
XContentBuilder mapping = fieldMapping(this::minimalMapping);
DocumentMapper mapper = createDocumentMapper(mapping);
Mapper fieldMapper = mapper.mappers().getMapper("field");
assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class));
assertEquals(mapping, defaultMapper.mappingSource().toString());
assertEquals(Strings.toString(mapping), mapper.mappingSource().toString());
LegacyGeoShapeFieldMapper geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper;
assertThat(geoShapeFieldMapper.fieldType().tree(),
@ -139,82 +136,50 @@ public class LegacyGeoShapeFieldMapperTests extends FieldMapperTestCase<LegacyGe
* Test that orientation parameter correctly parses
*/
public void testOrientationParsing() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
.field("tree", "quadtree")
.field("orientation", "left")
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("location");
DocumentMapper mapper = createDocumentMapper(
fieldMapping(b -> b.field("type", "geo_shape").field("tree", "quadtree").field("orientation", "left"))
);
Mapper fieldMapper = mapper.mappers().getMapper("field");
assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class));
ShapeBuilder.Orientation orientation = ((LegacyGeoShapeFieldMapper)fieldMapper).fieldType().orientation();
assertThat(orientation, equalTo(ShapeBuilder.Orientation.CLOCKWISE));
assertThat(orientation, equalTo(ShapeBuilder.Orientation.LEFT));
assertThat(orientation, equalTo(ShapeBuilder.Orientation.CW));
// explicit right orientation test
mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
.field("tree", "quadtree")
.field("orientation", "right")
.endObject().endObject()
.endObject().endObject());
defaultMapper = createIndex("test2").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
fieldMapper = defaultMapper.mappers().getMapper("location");
mapper = createDocumentMapper(
fieldMapping(b -> b.field("type", "geo_shape").field("tree", "quadtree").field("orientation", "right"))
);
fieldMapper = mapper.mappers().getMapper("field");
assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class));
orientation = ((LegacyGeoShapeFieldMapper)fieldMapper).fieldType().orientation();
assertThat(orientation, equalTo(ShapeBuilder.Orientation.COUNTER_CLOCKWISE));
assertThat(orientation, equalTo(ShapeBuilder.Orientation.RIGHT));
assertThat(orientation, equalTo(ShapeBuilder.Orientation.CCW));
assertFieldWarnings("tree");
assertFieldWarnings("tree", "strategy");
}
/**
* Test that coerce parameter correctly parses
*/
public void testCoerceParsing() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
.field("tree", "quadtree")
.field("coerce", "true")
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("location");
DocumentMapper mapper = createDocumentMapper(
fieldMapping(b -> b.field("type", "geo_shape").field("tree", "quadtree").field("coerce", true))
);
Mapper fieldMapper = mapper.mappers().getMapper("field");
assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class));
boolean coerce = ((LegacyGeoShapeFieldMapper)fieldMapper).coerce().value();
assertThat(coerce, equalTo(true));
// explicit false coerce test
mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
.field("tree", "quadtree")
.field("coerce", "false")
.endObject().endObject()
.endObject().endObject());
defaultMapper = createIndex("test2").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
fieldMapper = defaultMapper.mappers().getMapper("location");
mapper = createDocumentMapper(
fieldMapping(b -> b.field("type", "geo_shape").field("tree", "quadtree").field("coerce", false))
);
fieldMapper = mapper.mappers().getMapper("field");
assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class));
coerce = ((LegacyGeoShapeFieldMapper)fieldMapper).coerce().value();
assertThat(coerce, equalTo(false));
assertFieldWarnings("tree");
assertFieldWarnings("tree", "strategy");
}
@ -222,36 +187,20 @@ public class LegacyGeoShapeFieldMapperTests extends FieldMapperTestCase<LegacyGe
* Test that accept_z_value parameter correctly parses
*/
public void testIgnoreZValue() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
.field("strategy", "recursive")
.field(IGNORE_Z_VALUE.getPreferredName(), "true")
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("location");
DocumentMapper mapper = createDocumentMapper(
fieldMapping(b -> b.field("type", "geo_shape").field("tree", "quadtree").field("ignore_z_value", true))
);
Mapper fieldMapper = mapper.mappers().getMapper("field");
assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class));
boolean ignoreZValue = ((LegacyGeoShapeFieldMapper)fieldMapper).ignoreZValue().value();
assertThat(ignoreZValue, equalTo(true));
// explicit false accept_z_value test
mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
.field("tree", "quadtree")
.field(IGNORE_Z_VALUE.getPreferredName(), "false")
.endObject().endObject()
.endObject().endObject());
defaultMapper = createIndex("test2").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
fieldMapper = defaultMapper.mappers().getMapper("location");
mapper = createDocumentMapper(
fieldMapping(b -> b.field("type", "geo_shape").field("tree", "quadtree").field("ignore_z_value", false))
);
fieldMapper = mapper.mappers().getMapper("field");
assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class));
ignoreZValue = ((LegacyGeoShapeFieldMapper)fieldMapper).ignoreZValue().value();
assertThat(ignoreZValue, equalTo(false));
assertFieldWarnings("strategy", "tree");
@ -261,55 +210,33 @@ public class LegacyGeoShapeFieldMapperTests extends FieldMapperTestCase<LegacyGe
* Test that ignore_malformed parameter correctly parses
*/
public void testIgnoreMalformedParsing() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
.field("tree", "quadtree")
.field("ignore_malformed", "true")
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("location");
DocumentMapper mapper = createDocumentMapper(
fieldMapping(b -> b.field("type", "geo_shape").field("tree", "quadtree").field("ignore_malformed", true))
);
Mapper fieldMapper = mapper.mappers().getMapper("field");
assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class));
Explicit<Boolean> ignoreMalformed = ((LegacyGeoShapeFieldMapper)fieldMapper).ignoreMalformed();
assertThat(ignoreMalformed.value(), equalTo(true));
// explicit false ignore_malformed test
mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
.field("tree", "quadtree")
.field("ignore_malformed", "false")
.endObject().endObject()
.endObject().endObject());
defaultMapper = createIndex("test2").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
fieldMapper = defaultMapper.mappers().getMapper("location");
mapper = createDocumentMapper(
fieldMapping(b -> b.field("type", "geo_shape").field("tree", "quadtree").field("ignore_malformed", false))
);
fieldMapper = mapper.mappers().getMapper("field");
assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class));
ignoreMalformed = ((LegacyGeoShapeFieldMapper)fieldMapper).ignoreMalformed();
assertThat(ignoreMalformed.explicit(), equalTo(true));
assertThat(ignoreMalformed.value(), equalTo(false));
assertFieldWarnings("tree");
assertFieldWarnings("tree", "strategy");
}
public void testGeohashConfiguration() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
.field("tree", "geohash")
.field("tree_levels", "4")
.field("distance_error_pct", "0.1")
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("location");
DocumentMapper mapper = createDocumentMapper(
fieldMapping(
b -> b.field("type", "geo_shape").field("tree", "geohash").field("tree_levels", "4").field("distance_error_pct", "0.1")
)
);
Mapper fieldMapper = mapper.mappers().getMapper("field");
assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class));
LegacyGeoShapeFieldMapper geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper;
@ -318,23 +245,20 @@ public class LegacyGeoShapeFieldMapperTests extends FieldMapperTestCase<LegacyGe
assertThat(strategy.getDistErrPct(), equalTo(0.1));
assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class));
assertThat(strategy.getGrid().getMaxLevels(), equalTo(4));
assertFieldWarnings("tree", "tree_levels", "distance_error_pct");
assertFieldWarnings("tree", "tree_levels", "distance_error_pct", "strategy");
}
public void testQuadtreeConfiguration() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
DocumentMapper mapper = createDocumentMapper(
fieldMapping(
b -> b.field("type", "geo_shape")
.field("tree", "quadtree")
.field("tree_levels", "6")
.field("distance_error_pct", "0.5")
.field("points_only", true)
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("location");
)
);
Mapper fieldMapper = mapper.mappers().getMapper("field");
assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class));
LegacyGeoShapeFieldMapper geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper;
@ -344,7 +268,7 @@ public class LegacyGeoShapeFieldMapperTests extends FieldMapperTestCase<LegacyGe
assertThat(strategy.getGrid(), instanceOf(QuadPrefixTree.class));
assertThat(strategy.getGrid().getMaxLevels(), equalTo(6));
assertThat(strategy.isPointsOnly(), equalTo(true));
assertFieldWarnings("tree", "tree_levels", "distance_error_pct", "points_only");
assertFieldWarnings("tree", "tree_levels", "distance_error_pct", "points_only", "strategy");
}
private void assertFieldWarnings(String... fieldNames) {
@ -357,22 +281,17 @@ public class LegacyGeoShapeFieldMapperTests extends FieldMapperTestCase<LegacyGe
}
public void testLevelPrecisionConfiguration() throws IOException {
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
{
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
DocumentMapper mapper = createDocumentMapper(
fieldMapping(
b -> b.field("type", "geo_shape")
.field("tree", "quadtree")
.field("tree_levels", "6")
.field("precision", "70m")
.field("distance_error_pct", "0.5")
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("location");
)
);
Mapper fieldMapper = mapper.mappers().getMapper("field");
assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class));
LegacyGeoShapeFieldMapper geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper;
@ -385,18 +304,12 @@ public class LegacyGeoShapeFieldMapperTests extends FieldMapperTestCase<LegacyGe
}
{
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
.field("tree", "quadtree")
.field("tree_levels", "26")
.field("precision", "70m")
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("location");
DocumentMapper mapper = createDocumentMapper(
fieldMapping(
b -> b.field("type", "geo_shape").field("tree", "quadtree").field("tree_levels", "26").field("precision", "70m")
)
);
Mapper fieldMapper = mapper.mappers().getMapper("field");
assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class));
LegacyGeoShapeFieldMapper geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper;
@ -411,18 +324,16 @@ public class LegacyGeoShapeFieldMapperTests extends FieldMapperTestCase<LegacyGe
}
{
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
DocumentMapper mapper = createDocumentMapper(
fieldMapping(
b -> b.field("type", "geo_shape")
.field("tree", "geohash")
.field("tree_levels", "6")
.field("precision", "70m")
.field("distance_error_pct", "0.5")
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("location");
)
);
Mapper fieldMapper = mapper.mappers().getMapper("field");
assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class));
LegacyGeoShapeFieldMapper geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper;
@ -435,18 +346,16 @@ public class LegacyGeoShapeFieldMapperTests extends FieldMapperTestCase<LegacyGe
}
{
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
DocumentMapper mapper = createDocumentMapper(
fieldMapping(
b -> b.field("type", "geo_shape")
.field("tree", "geohash")
.field("tree_levels", GeoUtils.geoHashLevelsForPrecision(70d)+1)
.field("tree_levels", GeoUtils.geoHashLevelsForPrecision(70d) + 1)
.field("precision", "70m")
.field("distance_error_pct", "0.5")
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("location");
)
);
Mapper fieldMapper = mapper.mappers().getMapper("field");
assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class));
LegacyGeoShapeFieldMapper geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper;
@ -454,22 +363,20 @@ public class LegacyGeoShapeFieldMapperTests extends FieldMapperTestCase<LegacyGe
assertThat(strategy.getDistErrPct(), equalTo(0.5));
assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class));
assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.geoHashLevelsForPrecision(70d)+1));
assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.geoHashLevelsForPrecision(70d) + 1));
}
{
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
DocumentMapper mapper = createDocumentMapper(
fieldMapping(
b -> b.field("type", "geo_shape")
.field("tree", "quadtree")
.field("tree_levels", GeoUtils.quadTreeLevelsForPrecision(70d)+1)
.field("tree_levels", GeoUtils.quadTreeLevelsForPrecision(70d) + 1)
.field("precision", "70m")
.field("distance_error_pct", "0.5")
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("location");
)
);
Mapper fieldMapper = mapper.mappers().getMapper("field");
assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class));
LegacyGeoShapeFieldMapper geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper;
@ -477,23 +384,16 @@ public class LegacyGeoShapeFieldMapperTests extends FieldMapperTestCase<LegacyGe
assertThat(strategy.getDistErrPct(), equalTo(0.5));
assertThat(strategy.getGrid(), instanceOf(QuadPrefixTree.class));
assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.quadTreeLevelsForPrecision(70d)+1));
assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.quadTreeLevelsForPrecision(70d) + 1));
}
assertFieldWarnings("tree", "tree_levels", "precision", "distance_error_pct");
assertFieldWarnings("tree", "tree_levels", "precision", "distance_error_pct", "strategy");
}
public void testPointsOnlyOption() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
.field("tree", "geohash")
.field("points_only", true)
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("location");
DocumentMapper mapper = createDocumentMapper(
fieldMapping(b -> b.field("type", "geo_shape").field("tree", "geohash").field("points_only", true))
);
Mapper fieldMapper = mapper.mappers().getMapper("field");
assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class));
LegacyGeoShapeFieldMapper geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper;
@ -501,23 +401,15 @@ public class LegacyGeoShapeFieldMapperTests extends FieldMapperTestCase<LegacyGe
assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class));
assertThat(strategy.isPointsOnly(), equalTo(true));
assertFieldWarnings("tree", "points_only");
assertFieldWarnings("tree", "points_only", "strategy");
}
public void testLevelDefaults() throws IOException {
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
{
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
.field("tree", "quadtree")
.field("distance_error_pct", "0.5")
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("location");
DocumentMapper mapper = createDocumentMapper(
fieldMapping(b -> b.field("type", "geo_shape").field("tree", "quadtree").field("distance_error_pct", "0.5"))
);
Mapper fieldMapper = mapper.mappers().getMapper("field");
assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class));
LegacyGeoShapeFieldMapper geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper;
@ -530,16 +422,10 @@ public class LegacyGeoShapeFieldMapperTests extends FieldMapperTestCase<LegacyGe
}
{
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
.field("tree", "geohash")
.field("distance_error_pct", "0.5")
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("location");
DocumentMapper mapper = createDocumentMapper(
fieldMapping(b -> b.field("type", "geo_shape").field("tree", "geohash").field("distance_error_pct", "0.5"))
);
Mapper fieldMapper = mapper.mappers().getMapper("field");
assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class));
LegacyGeoShapeFieldMapper geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper;
@ -550,47 +436,41 @@ public class LegacyGeoShapeFieldMapperTests extends FieldMapperTestCase<LegacyGe
/* 50m is default */
assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.geoHashLevelsForPrecision(50d)));
}
assertFieldWarnings("tree", "distance_error_pct");
assertFieldWarnings("tree", "distance_error_pct", "strategy");
}
public void testGeoShapeMapperMerge() throws Exception {
String stage1Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("shape").field("type", "geo_shape").field("tree", "geohash")
.field("strategy", "recursive")
.field("precision", "1m").field("tree_levels", 8).field("distance_error_pct", 0.01)
.field("orientation", "ccw")
.endObject().endObject().endObject().endObject());
MapperService mapperService = createIndex("test").mapperService();
DocumentMapper docMapper = mapperService.merge("type", new CompressedXContent(stage1Mapping),
MapperService.MergeReason.MAPPING_UPDATE);
Mapper fieldMapper = docMapper.mappers().getMapper("shape");
MapperService mapperService = createMapperService(
fieldMapping(
b -> b.field("type", "geo_shape")
.field("tree", "geohash")
.field("strategy", "recursive")
.field("precision", "1m")
.field("tree_levels", 8)
.field("distance_error_pct", 0.01)
.field("orientation", "ccw")
)
);
Mapper fieldMapper = mapperService.documentMapper().mappers().getMapper("field");
LegacyGeoShapeFieldMapper geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper;
assertThat(geoShapeFieldMapper.fieldType().orientation(), equalTo(ShapeBuilder.Orientation.CCW));
String stage2Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("shape").field("type", "geo_shape")
.field("tree", "quadtree")
.field("strategy", "term").field("precision", "1km")
.field("tree_levels", 26).field("distance_error_pct", 26)
.field("orientation", "cw").endObject().endObject().endObject().endObject());
try {
mapperService.merge("type", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE);
fail();
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString("mapper [shape] has different [strategy]"));
assertThat(e.getMessage(), containsString("mapper [shape] has different [tree]"));
assertThat(e.getMessage(), containsString("mapper [shape] has different [tree_levels]"));
assertThat(e.getMessage(), containsString("mapper [shape] has different [precision]"));
}
Exception e = expectThrows(IllegalArgumentException.class, () -> merge(mapperService, fieldMapping(b -> b.field("type", "geo_shape")
.field("tree", "quadtree")
.field("strategy", "term").field("precision", "1km")
.field("tree_levels", 26).field("distance_error_pct", 26)
.field("orientation", "cw"))));
assertThat(e.getMessage(), containsString("mapper [field] has different [strategy]"));
assertThat(e.getMessage(), containsString("mapper [field] has different [tree]"));
assertThat(e.getMessage(), containsString("mapper [field] has different [tree_levels]"));
assertThat(e.getMessage(), containsString("mapper [field] has different [precision]"));
// verify nothing changed
fieldMapper = docMapper.mappers().getMapper("shape");
fieldMapper = mapperService.documentMapper().mappers().getMapper("field");
assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class));
geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper;
PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultPrefixTreeStrategy();
assertThat(strategy, instanceOf(RecursivePrefixTreeStrategy.class));
assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class));
assertThat(strategy.getDistErrPct(), equalTo(0.01));
@ -598,16 +478,13 @@ public class LegacyGeoShapeFieldMapperTests extends FieldMapperTestCase<LegacyGe
assertThat(geoShapeFieldMapper.fieldType().orientation(), equalTo(ShapeBuilder.Orientation.CCW));
// correct mapping
stage2Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("shape").field("type", "geo_shape")
.field("tree", "geohash")
.field("strategy", "recursive")
.field("precision", "1m")
.field("tree_levels", 8).field("distance_error_pct", 0.001)
.field("orientation", "cw").endObject().endObject().endObject().endObject());
docMapper = mapperService.merge("type", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE);
fieldMapper = docMapper.mappers().getMapper("shape");
merge(mapperService, fieldMapping(b -> b.field("type", "geo_shape")
.field("tree", "geohash")
.field("strategy", "recursive")
.field("precision", "1m")
.field("tree_levels", 8).field("distance_error_pct", 0.001)
.field("orientation", "cw")));
fieldMapper = mapperService.documentMapper().mappers().getMapper("field");
assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class));
geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper;
@ -622,90 +499,45 @@ public class LegacyGeoShapeFieldMapperTests extends FieldMapperTestCase<LegacyGe
assertFieldWarnings("tree", "strategy", "precision", "tree_levels", "distance_error_pct");
}
public void testEmptyName() throws Exception {
// after 5.x
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("")
.field("type", "geo_shape")
.field("tree", "quadtree")
.endObject().endObject()
.endObject().endObject());
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> parser.parse("type1", new CompressedXContent(mapping))
);
assertThat(e.getMessage(), containsString("fieldName is required"));
assertFieldWarnings("tree");
}
public void testSerializeDefaults() throws Exception {
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
ToXContent.Params includeDefaults = new ToXContent.MapParams(singletonMap("include_defaults", "true"));
{
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
.field("tree", "quadtree")
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping));
String serialized = toXContentString((LegacyGeoShapeFieldMapper) defaultMapper.mappers().getMapper("location"));
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "geo_shape").field("tree", "quadtree")));
String serialized = Strings.toString(mapper.mappers().getMapper("field"), includeDefaults);
assertTrue(serialized, serialized.contains("\"precision\":\"50.0m\""));
assertTrue(serialized, serialized.contains("\"tree_levels\":21"));
}
{
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
.field("tree", "geohash")
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping));
String serialized = toXContentString((LegacyGeoShapeFieldMapper) defaultMapper.mappers().getMapper("location"));
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "geo_shape").field("tree", "geohash")));
String serialized = Strings.toString(mapper.mappers().getMapper("field"), includeDefaults);
assertTrue(serialized, serialized.contains("\"precision\":\"50.0m\""));
assertTrue(serialized, serialized.contains("\"tree_levels\":9"));
}
{
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
.field("tree", "quadtree")
.field("tree_levels", "6")
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping));
String serialized = toXContentString((LegacyGeoShapeFieldMapper) defaultMapper.mappers().getMapper("location"));
DocumentMapper mapper = createDocumentMapper(
fieldMapping(b -> b.field("type", "geo_shape").field("tree", "quadtree").field("tree_levels", "6"))
);
String serialized = Strings.toString(mapper.mappers().getMapper("field"), includeDefaults);
assertFalse(serialized, serialized.contains("\"precision\":"));
assertTrue(serialized, serialized.contains("\"tree_levels\":6"));
}
{
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
.field("tree", "quadtree")
.field("precision", "6")
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping));
String serialized = toXContentString((LegacyGeoShapeFieldMapper) defaultMapper.mappers().getMapper("location"));
DocumentMapper mapper = createDocumentMapper(
fieldMapping(b -> b.field("type", "geo_shape").field("tree", "quadtree").field("precision", "6"))
);
String serialized = Strings.toString(mapper.mappers().getMapper("field"), includeDefaults);
assertTrue(serialized, serialized.contains("\"precision\":\"6.0m\""));
assertFalse(serialized, serialized.contains("\"tree_levels\":"));
}
{
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
.field("tree", "quadtree")
.field("precision", "6m")
.field("tree_levels", "5")
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping));
String serialized = toXContentString((LegacyGeoShapeFieldMapper) defaultMapper.mappers().getMapper("location"));
DocumentMapper mapper = createDocumentMapper(
fieldMapping(b -> b.field("type", "geo_shape").field("tree", "quadtree").field("precision", "6m").field("tree_levels", "5"))
);
String serialized = Strings.toString(mapper.mappers().getMapper("field"), includeDefaults);
assertTrue(serialized, serialized.contains("\"precision\":\"6.0m\""));
assertTrue(serialized, serialized.contains("\"tree_levels\":5"));
}
assertFieldWarnings("tree", "tree_levels", "precision");
assertFieldWarnings("tree", "tree_levels", "precision", "strategy");
}
public void testPointsOnlyDefaultsWithTermStrategy() throws IOException {
@ -718,9 +550,10 @@ public class LegacyGeoShapeFieldMapperTests extends FieldMapperTestCase<LegacyGe
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("location");
DocumentMapper mapper = createDocumentMapper(
fieldMapping(b -> b.field("type", "geo_shape").field("tree", "quadtree").field("precision", "10m").field("strategy", "term"))
);
Mapper fieldMapper = mapper.mappers().getMapper("field");
assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class));
LegacyGeoShapeFieldMapper geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper;
@ -731,26 +564,23 @@ public class LegacyGeoShapeFieldMapperTests extends FieldMapperTestCase<LegacyGe
assertThat(strategy.getGrid().getMaxLevels(), equalTo(23));
assertThat(strategy.isPointsOnly(), equalTo(true));
// term strategy changes the default for points_only, check that we handle it correctly
assertThat(toXContentString(geoShapeFieldMapper, false), not(containsString("points_only")));
assertThat(Strings.toString(geoShapeFieldMapper), not(containsString("points_only")));
assertFieldWarnings("tree", "precision", "strategy");
}
public void testPointsOnlyFalseWithTermStrategy() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
.field("tree", "quadtree")
.field("precision", "10m")
.field("strategy", "term")
.field("points_only", false)
.endObject().endObject()
.endObject().endObject());
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class,
() -> parser.parse("type1", new CompressedXContent(mapping))
Exception e = expectThrows(
MapperParsingException.class,
() -> createMapperService(
fieldMapping(
b -> b.field("type", "geo_shape")
.field("tree", "quadtree")
.field("precision", "10m")
.field("strategy", "term")
.field("points_only", false)
)
)
);
assertThat(e.getMessage(), containsString("points_only cannot be set to false for term strategy"));
assertFieldWarnings("tree", "precision", "strategy", "points_only");
@ -759,31 +589,26 @@ public class LegacyGeoShapeFieldMapperTests extends FieldMapperTestCase<LegacyGe
public void testDisallowExpensiveQueries() throws IOException {
QueryShardContext queryShardContext = mock(QueryShardContext.class);
when(queryShardContext.allowExpensiveQueries()).thenReturn(false);
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
.field("tree", "quadtree")
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("location");
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "geo_shape").field("tree", "quadtree")));
Mapper fieldMapper = mapper.mappers().getMapper("field");
assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class));
LegacyGeoShapeFieldMapper geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper;
ElasticsearchException e = expectThrows(ElasticsearchException.class,
() -> geoShapeFieldMapper.fieldType().geometryQueryBuilder().process(
new Point(-10, 10), "location", SpatialStrategy.TERM, ShapeRelation.INTERSECTS, queryShardContext));
assertEquals("[geo-shape] queries on [PrefixTree geo shapes] cannot be executed when " +
"'search.allow_expensive_queries' is set to false.", e.getMessage());
assertFieldWarnings("tree");
assertFieldWarnings("tree", "strategy");
}
@Override
public void testSerialization() throws IOException {
super.testSerialization();
protected void assertParseMinimalWarnings() {
assertWarnings("Field parameter [strategy] is deprecated and will be removed in a future version.");
}
@Override
protected void assertSerializationWarnings() {
assertWarnings("Field parameter [strategy] is deprecated and will be removed in a future version.",
"Field parameter [tree] is deprecated and will be removed in a future version.",
"Field parameter [tree_levels] is deprecated and will be removed in a future version.",
@ -793,58 +618,19 @@ public class LegacyGeoShapeFieldMapperTests extends FieldMapperTestCase<LegacyGe
}
public void testGeoShapeArrayParsing() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder()
.startObject()
.startObject("_doc")
.startObject("properties")
.startObject("location")
.field("type", "geo_shape")
.field("tree", "quadtree")
.endObject()
.endObject()
.endObject()
.endObject());
DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser()
.parse("_doc", new CompressedXContent(mapping));
BytesReference arrayedDoc = BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.startArray("shape")
.startObject()
.field("type", "Point")
.startArray("coordinates").value(176.0).value(15.0).endArray()
.endObject()
.startObject()
.field("type", "Point")
.startArray("coordinates").value(76.0).value(-15.0).endArray()
.endObject()
.endArray()
.endObject()
);
SourceToParse sourceToParse = new SourceToParse("test", "_doc", "1", arrayedDoc, XContentType.JSON);
ParsedDocument document = mapper.parse(sourceToParse);
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "geo_shape").field("tree", "quadtree")));
ParsedDocument document = mapper.parse(source(b -> {
b.startArray("field");
{
b.startObject().field("type", "Point").startArray("coordinates").value(176.0).value(15.0).endArray().endObject();
b.startObject().field("type", "Point").startArray("coordinates").value(76.0).value(-15.0).endArray().endObject();
}
b.endArray();
}));
assertThat(document.docs(), hasSize(1));
IndexableField[] fields = document.docs().get(0).getFields("shape.type");
IndexableField[] fields = document.docs().get(0).getFields("field");
assertThat(fields.length, equalTo(2));
assertFieldWarnings("tree");
}
public String toXContentString(LegacyGeoShapeFieldMapper mapper, boolean includeDefaults) throws IOException {
XContentBuilder builder = XContentFactory.jsonBuilder().startObject();
ToXContent.Params params;
if (includeDefaults) {
params = new ToXContent.MapParams(Collections.singletonMap("include_defaults", "true"));
} else {
params = ToXContent.EMPTY_PARAMS;
}
mapper.doXContentBody(builder, includeDefaults, params);
return Strings.toString(builder.endObject());
}
public String toXContentString(LegacyGeoShapeFieldMapper mapper) throws IOException {
return toXContentString(mapper, true);
assertFieldWarnings("tree", "strategy");
}
public void testFetchSourceValue() {

View File

@ -19,61 +19,52 @@
package org.elasticsearch.index.mapper;
import com.carrotsearch.randomizedtesting.annotations.Timeout;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.Version;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType;
import org.elasticsearch.index.mapper.NumberFieldTypeTests.OutOfRangeSpec;
import org.elasticsearch.index.termvectors.TermVectorsService;
import org.elasticsearch.rest.RestStatus;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.math.BigInteger;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.mapper.FieldMapperTestCase.fetchSourceValue;
import static org.hamcrest.Matchers.arrayWithSize;
import static org.hamcrest.Matchers.containsString;
public class NumberFieldMapperTests extends AbstractNumericFieldMapperTestCase {
@Override
protected void setTypeList() {
TYPES = new HashSet<>(Arrays.asList("byte", "short", "integer", "long", "float", "double", "half_float"));
WHOLE_TYPES = new HashSet<>(Arrays.asList("byte", "short", "integer", "long"));
protected Set<String> types() {
return org.elasticsearch.common.collect.Set.of("byte", "short", "integer", "long", "float", "double", "half_float");
}
@Override
protected Set<String> wholeTypes() {
return org.elasticsearch.common.collect.Set.of("byte", "short", "integer", "long");
}
@Override
protected void minimalMapping(XContentBuilder b) throws IOException {
b.field("type", "long");
}
@Override
public void doTestDefaults(String type) throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", type).endObject().endObject()
.endObject().endObject());
XContentBuilder mapping = fieldMapping(b -> b.field("type", type));
DocumentMapper mapper = createDocumentMapper(mapping);
assertEquals(Strings.toString(mapping), mapper.mappingSource().toString());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", 123)
.endObject()),
XContentType.JSON));
ParsedDocument doc = mapper.parse(source(b -> b.field("field", 123)));
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(2, fields.length);
@ -88,20 +79,8 @@ public class NumberFieldMapperTests extends AbstractNumericFieldMapperTestCase {
@Override
public void doTestNotIndexed(String type) throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", type).field("index", false).endObject().endObject()
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", 123)
.endObject()),
XContentType.JSON));
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", type).field("index", false)));
ParsedDocument doc = mapper.parse(source(b -> b.field("field", 123)));
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(1, fields.length);
@ -111,20 +90,8 @@ public class NumberFieldMapperTests extends AbstractNumericFieldMapperTestCase {
@Override
public void doTestNoDocValues(String type) throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", type).field("doc_values", false).endObject().endObject()
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", 123)
.endObject()),
XContentType.JSON));
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", type).field("doc_values", false)));
ParsedDocument doc = mapper.parse(source(b -> b.field("field", 123)));
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(1, fields.length);
@ -135,20 +102,8 @@ public class NumberFieldMapperTests extends AbstractNumericFieldMapperTestCase {
@Override
public void doTestStore(String type) throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", type).field("store", true).endObject().endObject()
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", 123)
.endObject()),
XContentType.JSON));
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", type).field("store", true)));
ParsedDocument doc = mapper.parse(source(b -> b.field("field", 123)));
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(3, fields.length);
@ -164,20 +119,8 @@ public class NumberFieldMapperTests extends AbstractNumericFieldMapperTestCase {
@Override
public void doTestCoerce(String type) throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", type).endObject().endObject()
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "123")
.endObject()),
XContentType.JSON));
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", type)));
ParsedDocument doc = mapper.parse(source(b -> b.field("field", "123")));
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(2, fields.length);
@ -187,59 +130,27 @@ public class NumberFieldMapperTests extends AbstractNumericFieldMapperTestCase {
IndexableField dvField = fields[1];
assertEquals(DocValuesType.SORTED_NUMERIC, dvField.fieldType().docValuesType());
mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", type).field("coerce", false).endObject().endObject()
.endObject().endObject());
DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper2.mappingSource().toString());
ThrowingRunnable runnable = () -> mapper2.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "123")
.endObject()),
XContentType.JSON));
MapperParsingException e = expectThrows(MapperParsingException.class, runnable);
DocumentMapper mapper2 = createDocumentMapper(fieldMapping(b -> b.field("type", type).field("coerce", false)));
MapperParsingException e = expectThrows(MapperParsingException.class, () -> mapper2.parse(source(b -> b.field("field", "123"))));
assertThat(e.getCause().getMessage(), containsString("passed as String"));
}
@Override
protected void doTestDecimalCoerce(String type) throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", type).endObject().endObject()
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "7.89")
.endObject()),
XContentType.JSON));
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", type)));
ParsedDocument doc = mapper.parse(source(b -> b.field("field", "7.89")));
IndexableField[] fields = doc.rootDoc().getFields("field");
IndexableField pointField = fields[0];
assertEquals(7, pointField.numericValue().doubleValue(), 0d);
}
public void testIgnoreMalformed() throws Exception {
for (String type : TYPES) {
for (String type : types()) {
DocumentMapper notIgnoring = createDocumentMapper(fieldMapping(b -> b.field("type", type)));
DocumentMapper ignoring = createDocumentMapper(fieldMapping(b -> b.field("type", type).field("ignore_malformed", true)));
for (Object malformedValue : new Object[] { "a", Boolean.FALSE }) {
String mapping = Strings.toString(jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("field").field("type", type).endObject().endObject().endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ThrowingRunnable runnable = () -> mapper.parse(new SourceToParse("test", "type", "1",
BytesReference.bytes(jsonBuilder().startObject().field("field", malformedValue).endObject()), XContentType.JSON));
MapperParsingException e = expectThrows(MapperParsingException.class, runnable);
SourceToParse source = source(b -> b.field("field", malformedValue));
MapperParsingException e = expectThrows(MapperParsingException.class, () -> notIgnoring.parse(source));
if (malformedValue instanceof String) {
assertThat(e.getCause().getMessage(), containsString("For input string: \"a\""));
} else {
@ -247,14 +158,7 @@ public class NumberFieldMapperTests extends AbstractNumericFieldMapperTestCase {
assertThat(e.getCause().getMessage(), containsString("not numeric, can not use numeric value accessors"));
}
mapping = Strings.toString(jsonBuilder().startObject().startObject("type").startObject("properties").startObject("field")
.field("type", type).field("ignore_malformed", true).endObject().endObject().endObject().endObject());
DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = mapper2.parse(new SourceToParse("test", "type", "1",
BytesReference.bytes(jsonBuilder().startObject().field("field", malformedValue).endObject()), XContentType.JSON));
ParsedDocument doc = ignoring.parse(source);
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(0, fields.length);
assertArrayEquals(new String[] { "field" }, TermVectorsService.getValues(doc.rootDoc().getFields("_ignored")));
@ -266,19 +170,13 @@ public class NumberFieldMapperTests extends AbstractNumericFieldMapperTestCase {
* Test that in case the malformed value is an xContent object we throw error regardless of `ignore_malformed`
*/
public void testIgnoreMalformedWithObject() throws Exception {
for (String type : TYPES) {
Object malformedValue = (ToXContentObject) (builder, params) -> builder.startObject().field("foo", "bar").endObject();
SourceToParse malformed = source(b -> b.startObject("field").field("foo", "bar").endObject());
for (String type : types()) {
for (Boolean ignoreMalformed : new Boolean[] { true, false }) {
String mapping = Strings.toString(
jsonBuilder().startObject().startObject("type").startObject("properties").startObject("field").field("type", type)
.field("ignore_malformed", ignoreMalformed).endObject().endObject().endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
MapperParsingException e = expectThrows(MapperParsingException.class,
() -> mapper.parse(new SourceToParse("test", "type", "1",
BytesReference.bytes(jsonBuilder().startObject().field("field", malformedValue).endObject()),
XContentType.JSON)));
DocumentMapper mapper = createDocumentMapper(
fieldMapping(b -> b.field("type", type).field("ignore_malformed", ignoreMalformed))
);
MapperParsingException e = expectThrows(MapperParsingException.class, () -> mapper.parse(malformed));
assertThat(e.getCause().getMessage(), containsString("Current token"));
assertThat(e.getCause().getMessage(), containsString("not numeric, can not use numeric value accessors"));
}
@ -287,51 +185,14 @@ public class NumberFieldMapperTests extends AbstractNumericFieldMapperTestCase {
@Override
protected void doTestNullValue(String type) throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("type")
.startObject("properties")
.startObject("field")
.field("type", type)
.endObject()
.endObject()
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.nullField("field")
.endObject()),
XContentType.JSON));
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", type)));
SourceToParse source = source(b -> b.nullField("field"));
ParsedDocument doc = mapper.parse(source);
assertArrayEquals(new IndexableField[0], doc.rootDoc().getFields("field"));
Object missing;
if (Arrays.asList("float", "double", "half_float").contains(type)) {
missing = 123d;
} else {
missing = 123L;
}
mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("type")
.startObject("properties")
.startObject("field")
.field("type", type)
.field("null_value", missing)
.endObject()
.endObject()
.endObject().endObject());
mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.nullField("field")
.endObject()),
XContentType.JSON));
Object missing = Arrays.asList("float", "double", "half_float").contains(type) ? 123d : 123L;
mapper = createDocumentMapper(fieldMapping(b -> b.field("type", type).field("null_value", missing)));
doc = mapper.parse(source);
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(2, fields.length);
IndexableField pointField = fields[0];
@ -343,21 +204,6 @@ public class NumberFieldMapperTests extends AbstractNumericFieldMapperTestCase {
assertFalse(dvField.fieldType().stored());
}
@Override
public void testEmptyName() throws IOException {
// after version 5
for (String type : TYPES) {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("").field("type", type).endObject().endObject()
.endObject().endObject());
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> parser.parse("type", new CompressedXContent(mapping))
);
assertThat(e.getMessage(), containsString("name cannot be empty string"));
}
}
public void testFetchSourceValue() {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
@ -373,9 +219,8 @@ public class NumberFieldMapperTests extends AbstractNumericFieldMapperTestCase {
assertEquals(org.elasticsearch.common.collect.List.of(2.71f), fetchSourceValue(nullValueMapper, null));
}
@Timeout(millis = 30000)
public void testOutOfRangeValues() throws IOException {
final List<OutOfRangeSpec<Object>> inputs = Arrays.asList(
final List<OutOfRangeSpec> inputs = Arrays.asList(
OutOfRangeSpec.of(NumberType.BYTE, "128", "is out of range for a byte"),
OutOfRangeSpec.of(NumberType.SHORT, "32768", "is out of range for a short"),
OutOfRangeSpec.of(NumberType.INTEGER, "2147483648", "is out of range for an integer"),
@ -419,9 +264,10 @@ public class NumberFieldMapperTests extends AbstractNumericFieldMapperTestCase {
OutOfRangeSpec.of(NumberType.DOUBLE, Double.NEGATIVE_INFINITY, "[double] supports only finite values")
);
for(OutOfRangeSpec<Object> item: inputs) {
for(OutOfRangeSpec item: inputs) {
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", item.type.typeName())));
try {
parseRequest(item.type, createIndexRequest(item.value));
mapper.parse(source(item::write));
fail("Mapper parsing exception expected for [" + item.type + "] with value [" + item.value + "]");
} catch (MapperParsingException e) {
assertThat("Incorrect error message for [" + item.type + "] with value [" + item.value + "]",
@ -430,56 +276,18 @@ public class NumberFieldMapperTests extends AbstractNumericFieldMapperTestCase {
}
// the following two strings are in-range for a long after coercion
parseRequest(NumberType.LONG, createIndexRequest("9223372036854775807.9"));
parseRequest(NumberType.LONG, createIndexRequest("-9223372036854775808.9"));
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "long")));
ParsedDocument doc = mapper.parse(source(b -> b.field("field", "9223372036854775807.9")));
assertThat(doc.rootDoc().getFields("field"), arrayWithSize(2));
doc = mapper.parse(source(b -> b.field("field", "-9223372036854775808.9")));
assertThat(doc.rootDoc().getFields("field"), arrayWithSize(2));
}
public void testLongIndexingOutOfRange() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder()
.startObject().startObject("_doc")
.startObject("properties")
.startObject("number")
.field("type", "long")
.field("ignore_malformed", true)
.endObject().endObject()
.endObject().endObject());
createIndex("test57287");
client().admin().indices().preparePutMapping("test57287")
.setType("_doc").setSource(mapping, XContentType.JSON).get();
String doc = "{\"number\" : 9223372036854775808}";
IndexResponse response = client().index(new IndexRequest("test57287").source(doc, XContentType.JSON)).get();
assertSame(response.status(), RestStatus.CREATED);
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "long").field("ignore_malformed", true)));
ParsedDocument doc = mapper.parse(
source(b -> b.rawField("field", new BytesArray("9223372036854775808").streamInput(), XContentType.JSON))
);
assertEquals(0, doc.rootDoc().getFields("field").length);
}
private void parseRequest(NumberType type, BytesReference content) throws IOException {
createDocumentMapper(type).parse(new SourceToParse("test", "type", "1", content, XContentType.JSON));
}
private DocumentMapper createDocumentMapper(NumberType type) throws IOException {
String mapping = Strings
.toString(XContentFactory.jsonBuilder()
.startObject()
.startObject("type")
.startObject("properties")
.startObject("field")
.field("type", type.typeName())
.endObject()
.endObject()
.endObject()
.endObject());
return parser.parse("type", new CompressedXContent(mapping));
}
private BytesReference createIndexRequest(Object value) throws IOException {
if (value instanceof BigInteger) {
return BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.rawField("field", new ByteArrayInputStream(value.toString().getBytes(StandardCharsets.UTF_8)), XContentType.JSON)
.endObject());
} else {
return BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", value).endObject());
}
}
}

View File

@ -20,6 +20,7 @@
package org.elasticsearch.index.mapper;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.DoublePoint;
import org.apache.lucene.document.FloatPoint;
@ -44,6 +45,8 @@ import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
@ -54,6 +57,7 @@ import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.MultiValueMode;
import org.junit.Before;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.math.BigDecimal;
import java.math.BigInteger;
@ -491,7 +495,7 @@ public class NumberFieldTypeTests extends FieldTypeTestCase {
}
public void testParseOutOfRangeValues() throws IOException {
final List<OutOfRangeSpec<Object>> inputs = Arrays.asList(
final List<OutOfRangeSpec> inputs = Arrays.asList(
OutOfRangeSpec.of(NumberType.BYTE, "128", "out of range for a byte"),
OutOfRangeSpec.of(NumberType.BYTE, 128, "is out of range for a byte"),
OutOfRangeSpec.of(NumberType.BYTE, -129, "is out of range for a byte"),
@ -533,7 +537,7 @@ public class NumberFieldTypeTests extends FieldTypeTestCase {
OutOfRangeSpec.of(NumberType.DOUBLE, Double.NEGATIVE_INFINITY, "[double] supports only finite values")
);
for (OutOfRangeSpec<Object> item: inputs) {
for (OutOfRangeSpec item: inputs) {
try {
item.type.parse(item.value, false);
fail("Parsing exception expected for [" + item.type + "] with value [" + item.value + "]");
@ -544,21 +548,29 @@ public class NumberFieldTypeTests extends FieldTypeTestCase {
}
}
static class OutOfRangeSpec<V> {
static class OutOfRangeSpec {
final NumberType type;
final V value;
final Object value;
final String message;
static <V> OutOfRangeSpec<V> of(NumberType t, V v, String m) {
return new OutOfRangeSpec<>(t, v, m);
static OutOfRangeSpec of(NumberType t, Object v, String m) {
return new OutOfRangeSpec(t, v, m);
}
OutOfRangeSpec(NumberType t, V v, String m) {
OutOfRangeSpec(NumberType t, Object v, String m) {
type = t;
value = v;
message = m;
}
public void write(XContentBuilder b) throws IOException {
if (value instanceof BigInteger) {
b.rawField("field", new ByteArrayInputStream(value.toString().getBytes("UTF-8")), XContentType.JSON);
} else {
b.field("field", value);
}
}
}
public void testDisplayValue() {

View File

@ -24,28 +24,21 @@ import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.CheckedConsumer;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.List;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.network.InetAddresses;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.InternalSettingsPlugin;
import java.io.IOException;
import java.net.InetAddress;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import static org.elasticsearch.index.mapper.FieldMapperTestCase.fetchSourceValue;
import static org.elasticsearch.index.query.RangeQueryBuilder.GTE_FIELD;
import static org.elasticsearch.index.query.RangeQueryBuilder.GT_FIELD;
import static org.elasticsearch.index.query.RangeQueryBuilder.LTE_FIELD;
@ -55,26 +48,36 @@ import static org.hamcrest.Matchers.containsString;
public class RangeFieldMapperTests extends AbstractNumericFieldMapperTestCase {
private static final String FROM_DATE = "2016-10-31";
private static final String TO_DATE = "2016-11-01 20:00:00";
private static final String FROM_IP = "::ffff:c0a8:107";
private static final String TO_IP = "2001:db8::";
private static final int FROM = 5;
private static final String FROM_STR = FROM + "";
private static final int TO = 10;
private static final String TO_STR = TO + "";
private static final String DATE_FORMAT = "yyyy-MM-dd HH:mm:ss||yyyy-MM-dd||epoch_millis";
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return pluginList(InternalSettingsPlugin.class);
protected Set<String> types() {
return org.elasticsearch.common.collect.Set.of(
"date_range",
"ip_range",
"float_range",
"double_range",
"integer_range",
"long_range"
);
}
private static String FROM_DATE = "2016-10-31";
private static String TO_DATE = "2016-11-01 20:00:00";
private static String FROM_IP = "::ffff:c0a8:107";
private static String TO_IP = "2001:db8::";
private static int FROM = 5;
private static String FROM_STR = FROM + "";
private static int TO = 10;
private static String TO_STR = TO + "";
private static String DATE_FORMAT = "yyyy-MM-dd HH:mm:ss||yyyy-MM-dd||epoch_millis";
@Override
protected Set<String> wholeTypes() {
return org.elasticsearch.common.collect.Set.of("integer_range", "long_range");
}
@Override
protected void setTypeList() {
TYPES = new HashSet<>(Arrays.asList("date_range", "ip_range", "float_range", "double_range", "integer_range", "long_range"));
WHOLE_TYPES = new HashSet<>(Arrays.asList("integer_range", "long_range"));
protected void minimalMapping(XContentBuilder b) throws IOException {
b.field("type", "long_range");
}
private Object getFrom(String type) {
@ -116,27 +119,25 @@ public class RangeFieldMapperTests extends AbstractNumericFieldMapperTestCase {
return Double.POSITIVE_INFINITY;
}
private XContentBuilder rangeFieldMapping(String type, CheckedConsumer<XContentBuilder, IOException> extra) throws IOException {
return fieldMapping(b -> {
b.field("type", type);
if (type.contentEquals("date_range")) {
b.field("format", DATE_FORMAT);
}
extra.accept(b);
});
}
@Override
public void doTestDefaults(String type) throws Exception {
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", type);
if (type.equals("date_range")) {
mapping = mapping.field("format", DATE_FORMAT);
}
mapping = mapping.endObject().endObject().endObject().endObject();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(Strings.toString(mapping)));
XContentBuilder mapping = rangeFieldMapping(type, b -> {});
DocumentMapper mapper = createDocumentMapper(mapping);
assertEquals(Strings.toString(mapping), mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.startObject("field")
.field(getFromField(), getFrom(type))
.field(getToField(), getTo(type))
.endObject()
.endObject()),
XContentType.JSON));
ParsedDocument doc = mapper.parse(
source(b -> b.startObject("field").field(getFromField(), getFrom(type)).field(getToField(), getTo(type)).endObject())
);
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(2, fields.length);
IndexableField dvField = fields[0];
@ -149,49 +150,20 @@ public class RangeFieldMapperTests extends AbstractNumericFieldMapperTestCase {
@Override
protected void doTestNotIndexed(String type) throws Exception {
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", type).field("index", false);
if (type.equals("date_range")) {
mapping = mapping.field("format", DATE_FORMAT);
}
mapping = mapping.endObject().endObject().endObject().endObject();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(Strings.toString(mapping)));
assertEquals(Strings.toString(mapping), mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.startObject("field")
.field(getFromField(), getFrom(type))
.field(getToField(), getTo(type))
.endObject()
.endObject()),
XContentType.JSON));
DocumentMapper mapper = createDocumentMapper(rangeFieldMapping(type, b -> b.field("index", false)));
ParsedDocument doc = mapper.parse(
source(b -> b.startObject("field").field(getFromField(), getFrom(type)).field(getToField(), getTo(type)).endObject())
);
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(1, fields.length);
}
@Override
protected void doTestNoDocValues(String type) throws Exception {
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", type).field("doc_values", false);
if (type.equals("date_range")) {
mapping = mapping.field("format", DATE_FORMAT);
}
mapping = mapping.endObject().endObject().endObject().endObject();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(Strings.toString(mapping)));
assertEquals(Strings.toString(mapping), mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.startObject("field")
.field(getFromField(), getFrom(type))
.field(getToField(), getTo(type))
.endObject()
.endObject()),
XContentType.JSON));
DocumentMapper mapper = createDocumentMapper(rangeFieldMapping(type, b -> b.field("doc_values", false)));
ParsedDocument doc = mapper.parse(
source(b -> b.startObject("field").field(getFromField(), getFrom(type)).field(getToField(), getTo(type)).endObject())
);
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(1, fields.length);
IndexableField pointField = fields[0];
@ -200,24 +172,10 @@ public class RangeFieldMapperTests extends AbstractNumericFieldMapperTestCase {
@Override
protected void doTestStore(String type) throws Exception {
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", type).field("store", true);
if (type.equals("date_range")) {
mapping = mapping.field("format", DATE_FORMAT);
}
mapping = mapping.endObject().endObject().endObject().endObject();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(Strings.toString(mapping)));
assertEquals(Strings.toString(mapping), mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.startObject("field")
.field(getFromField(), getFrom(type))
.field(getToField(), getTo(type))
.endObject()
.endObject()),
XContentType.JSON));
DocumentMapper mapper = createDocumentMapper(rangeFieldMapping(type, b -> b.field("store", true)));
ParsedDocument doc = mapper.parse(
source(b -> b.startObject("field").field(getFromField(), getFrom(type)).field(getToField(), getTo(type)).endObject())
);
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(3, fields.length);
IndexableField dvField = fields[0];
@ -238,24 +196,10 @@ public class RangeFieldMapperTests extends AbstractNumericFieldMapperTestCase {
@Override
public void doTestCoerce(String type) throws IOException {
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", type);
if (type.equals("date_range")) {
mapping = mapping.field("format", DATE_FORMAT);
}
mapping = mapping.endObject().endObject().endObject().endObject();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(Strings.toString(mapping)));
assertEquals(Strings.toString(mapping), mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.startObject("field")
.field(getFromField(), getFrom(type))
.field(getToField(), getTo(type))
.endObject()
.endObject()),
XContentType.JSON));
DocumentMapper mapper = createDocumentMapper(rangeFieldMapping(type, b -> {}));
ParsedDocument doc = mapper.parse(
source(b -> b.startObject("field").field(getFromField(), getFrom(type)).field(getToField(), getTo(type)).endObject())
);
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(2, fields.length);
@ -266,19 +210,12 @@ public class RangeFieldMapperTests extends AbstractNumericFieldMapperTestCase {
// date_range ignores the coerce parameter and epoch_millis date format truncates floats (see issue: #14641)
if (type.equals("date_range") == false) {
DocumentMapper mapper2 = createDocumentMapper(rangeFieldMapping(type, b -> b.field("coerce", false)));
mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties").startObject("field")
.field("type", type).field("coerce", false).endObject().endObject().endObject().endObject();
DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(Strings.toString(mapping)));
assertEquals(Strings.toString(mapping), mapper2.mappingSource().toString());
ThrowingRunnable runnable = () -> mapper2
.parse(new SourceToParse(
"test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().startObject("field")
.field(getFromField(), "5.2").field(getToField(), "10").endObject().endObject()),
XContentType.JSON));
MapperParsingException e = expectThrows(MapperParsingException.class, runnable);
MapperParsingException e = expectThrows(
MapperParsingException.class,
() -> mapper2.parse(source(b -> b.startObject("field").field(getFromField(), "5.2").field(getToField(), "10").endObject()))
);
assertThat(e.getCause().getMessage(), anyOf(containsString("passed as String"), containsString("failed to parse date"),
containsString("is not an IP string literal")));
}
@ -286,31 +223,20 @@ public class RangeFieldMapperTests extends AbstractNumericFieldMapperTestCase {
@Override
protected void doTestDecimalCoerce(String type) throws IOException {
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", type);
DocumentMapper mapper = createDocumentMapper(rangeFieldMapping(type, b -> {}));
mapping = mapping.endObject().endObject().endObject().endObject();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(Strings.toString(mapping)));
ParsedDocument doc1 = mapper.parse(
source(
b -> b.startObject("field")
.field(GT_FIELD.getPreferredName(), "2.34")
.field(LT_FIELD.getPreferredName(), "5.67")
.endObject()
)
);
assertEquals(Strings.toString(mapping), mapper.mappingSource().toString());
ParsedDocument doc1 = mapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.startObject("field")
.field(GT_FIELD.getPreferredName(), "2.34")
.field(LT_FIELD.getPreferredName(), "5.67")
.endObject()
.endObject()),
XContentType.JSON));
ParsedDocument doc2 = mapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.startObject("field")
.field(GT_FIELD.getPreferredName(), "2")
.field(LT_FIELD.getPreferredName(), "5")
.endObject()
.endObject()),
XContentType.JSON));
ParsedDocument doc2 = mapper.parse(
source(b -> b.startObject("field").field(GT_FIELD.getPreferredName(), "2").field(LT_FIELD.getPreferredName(), "5").endObject())
);
IndexableField[] fields1 = doc1.rootDoc().getFields("field");
IndexableField[] fields2 = doc2.rootDoc().getFields("field");
@ -320,25 +246,12 @@ public class RangeFieldMapperTests extends AbstractNumericFieldMapperTestCase {
@Override
protected void doTestNullValue(String type) throws IOException {
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", type).field("store", true);
if (type.equals("date_range")) {
mapping = mapping.field("format", DATE_FORMAT);
}
mapping = mapping.endObject().endObject().endObject().endObject();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(Strings.toString(mapping)));
assertEquals(Strings.toString(mapping), mapper.mappingSource().toString());
DocumentMapper mapper = createDocumentMapper(rangeFieldMapping(type, b -> b.field("store", true)));
// test null value for min and max
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.startObject("field")
.nullField(getFromField())
.nullField(getToField())
.endObject()
.endObject()),
XContentType.JSON));
ParsedDocument doc = mapper.parse(
source(b -> b.startObject("field").nullField(getFromField()).nullField(getToField()).endObject())
);
assertEquals(3, doc.rootDoc().getFields("field").length);
IndexableField[] fields = doc.rootDoc().getFields("field");
IndexableField storedField = fields[2];
@ -346,15 +259,7 @@ public class RangeFieldMapperTests extends AbstractNumericFieldMapperTestCase {
assertThat(storedField.stringValue(), containsString(expected));
// test null max value
doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.startObject("field")
.field(getFromField(), getFrom(type))
.nullField(getToField())
.endObject()
.endObject()),
XContentType.JSON));
doc = mapper.parse(source(b -> b.startObject("field").field(getFromField(), getFrom(type)).nullField(getToField()).endObject()));
fields = doc.rootDoc().getFields("field");
assertEquals(3, fields.length);
IndexableField dvField = fields[0];
@ -374,39 +279,21 @@ public class RangeFieldMapperTests extends AbstractNumericFieldMapperTestCase {
assertThat(storedField.stringValue(), containsString(strVal));
// test null range
doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.nullField("field")
.endObject()),
XContentType.JSON));
doc = mapper.parse(source(b -> b.nullField("field")));
assertNull(doc.rootDoc().get("field"));
}
public void testNoBounds() throws Exception {
for (String type : TYPES) {
for (String type : types()) {
doTestNoBounds(type);
}
}
public void doTestNoBounds(String type) throws IOException {
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", type).field("store", true);
if (type.equals("date_range")) {
mapping = mapping.field("format", DATE_FORMAT);
}
mapping = mapping.endObject().endObject().endObject().endObject();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(Strings.toString(mapping)));
assertEquals(Strings.toString(mapping), mapper.mappingSource().toString());
DocumentMapper mapper = createDocumentMapper(rangeFieldMapping(type, b -> b.field("store", true)));
// test no bounds specified
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.startObject("field")
.endObject()
.endObject()),
XContentType.JSON));
ParsedDocument doc = mapper.parse(source(b -> b.startObject("field").endObject()));
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(3, fields.length);
@ -422,22 +309,16 @@ public class RangeFieldMapperTests extends AbstractNumericFieldMapperTestCase {
}
public void testIllegalArguments() throws Exception {
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", RangeType.INTEGER.name)
.field("format", DATE_FORMAT).endObject().endObject().endObject().endObject();
ThrowingRunnable runnable = () -> parser.parse("type", new CompressedXContent(Strings.toString(mapping)));
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, runnable);
Exception e = expectThrows(
MapperParsingException.class,
() -> createDocumentMapper(fieldMapping(b -> b.field("type", RangeType.INTEGER.name).field("format", DATE_FORMAT)))
);
assertThat(e.getMessage(), containsString("should not define a dateTimeFormatter"));
}
public void testSerializeDefaults() throws Exception {
for (String type : TYPES) {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", type).endObject().endObject()
.endObject().endObject());
DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping));
for (String type : types()) {
DocumentMapper docMapper = createDocumentMapper(fieldMapping(b -> b.field("type", type)));
RangeFieldMapper mapper = (RangeFieldMapper) docMapper.root().getMapper("field");
XContentBuilder builder = XContentFactory.jsonBuilder().startObject();
mapper.doXContentBody(builder, true, ToXContent.EMPTY_PARAMS);
@ -451,21 +332,11 @@ public class RangeFieldMapperTests extends AbstractNumericFieldMapperTestCase {
}
public void testIllegalFormatField() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder()
.startObject()
.startObject("type")
.startObject("properties")
.startObject("field")
.field("type", "date_range")
.array("format", "test_format")
.endObject()
.endObject()
.endObject()
.endObject());
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> parser.parse("type", new CompressedXContent(mapping)));
assertEquals("Invalid format: [[test_format]]: Unknown pattern letter: t", e.getMessage());
Exception e = expectThrows(
MapperParsingException.class,
() -> createMapperService(fieldMapping(b -> b.field("type", "date_range").array("format", "test_format")))
);
assertThat(e.getMessage(), containsString("Invalid format: [[test_format]]: Unknown pattern letter: t"));
}
public void testFetchSourceValue() {

View File

@ -18,141 +18,74 @@
*/
package org.elasticsearch.index.mapper;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.MapperService.MergeReason;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
import org.junit.Before;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.Set;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.hasItem;
public abstract class AbstractNumericFieldMapperTestCase extends ESSingleNodeTestCase {
protected Set<String> TYPES;
protected Set<String> WHOLE_TYPES;
protected IndexService indexService;
protected DocumentMapperParser parser;
public abstract class AbstractNumericFieldMapperTestCase extends MapperTestCase {
protected abstract Set<String> types();
protected abstract Set<String> wholeTypes();
@Before
public void setup() {
indexService = createIndex("test");
parser = indexService.mapperService().documentMapperParser();
setTypeList();
public final void testTypesAndWholeTypes() {
for (String wholeType : wholeTypes()) {
assertThat(types(), hasItem(wholeType));
}
}
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return pluginList(InternalSettingsPlugin.class);
}
protected abstract void setTypeList();
public void testDefaults() throws Exception {
for (String type : TYPES) {
public final void testDefaults() throws Exception {
for (String type : types()) {
doTestDefaults(type);
}
}
protected abstract void doTestDefaults(String type) throws Exception;
public void testNotIndexed() throws Exception {
for (String type : TYPES) {
public final void testNotIndexed() throws Exception {
for (String type : types()) {
doTestNotIndexed(type);
}
}
protected abstract void doTestNotIndexed(String type) throws Exception;
public void testNoDocValues() throws Exception {
for (String type : TYPES) {
public final void testNoDocValues() throws Exception {
for (String type : types()) {
doTestNoDocValues(type);
}
}
protected abstract void doTestNoDocValues(String type) throws Exception;
public void testStore() throws Exception {
for (String type : TYPES) {
public final void testStore() throws Exception {
for (String type : types()) {
doTestStore(type);
}
}
protected abstract void doTestStore(String type) throws Exception;
public void testCoerce() throws Exception {
for (String type : TYPES) {
public final void testCoerce() throws Exception {
for (String type : types()) {
doTestCoerce(type);
}
}
protected abstract void doTestCoerce(String type) throws IOException;
public void testDecimalCoerce() throws Exception {
for (String type : WHOLE_TYPES) {
public final void testDecimalCoerce() throws Exception {
for (String type : wholeTypes()) {
doTestDecimalCoerce(type);
}
}
protected abstract void doTestDecimalCoerce(String type) throws IOException;
public void testNullValue() throws IOException {
for (String type : TYPES) {
public final void testNullValue() throws IOException {
for (String type : types()) {
doTestNullValue(type);
}
}
protected abstract void doTestNullValue(String type) throws IOException;
public void testEmptyName() throws IOException {
// after version 5
for (String type : TYPES) {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("").field("type", type).endObject().endObject()
.endObject().endObject());
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> parser.parse("type", new CompressedXContent(mapping))
);
assertThat(e.getMessage(), containsString("name cannot be empty string"));
}
}
public void testMeta() throws Exception {
for (String type : TYPES) {
IndexService indexService = createIndex("test-" + type);
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc")
.startObject("properties").startObject("field").field("type", type)
.field("meta", Collections.singletonMap("foo", "bar"))
.endObject().endObject().endObject().endObject());
DocumentMapper mapper = indexService.mapperService().merge("_doc",
new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
assertEquals(mapping, mapper.mappingSource().toString());
String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc")
.startObject("properties").startObject("field").field("type", type)
.endObject().endObject().endObject().endObject());
mapper = indexService.mapperService().merge("_doc",
new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE);
assertEquals(mapping2, mapper.mappingSource().toString());
String mapping3 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc")
.startObject("properties").startObject("field").field("type", type)
.field("meta", Collections.singletonMap("baz", "quux"))
.endObject().endObject().endObject().endObject());
mapper = indexService.mapperService().merge("_doc",
new CompressedXContent(mapping3), MergeReason.MAPPING_UPDATE);
assertEquals(mapping3, mapper.mappingSource().toString());
}
}
}

View File

@ -202,8 +202,12 @@ public abstract class FieldMapperTestCase2<T extends FieldMapper.Builder<?>> ext
assertSerializes(builder1);
assertSerializes(builder2);
}
assertSerializationWarnings();
}
protected void assertSerializationWarnings() {
// Most mappers don't emit any warnings
}
protected void assertSerializes(T builder) throws IOException {
Mapper.BuilderContext context = new Mapper.BuilderContext(getIndexSettings(), new ContentPath(1));

View File

@ -20,15 +20,23 @@
package org.elasticsearch.index.mapper;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.store.Directory;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.CheckedConsumer;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AnalyzerScope;
import org.elasticsearch.index.analysis.IndexAnalyzers;
@ -40,17 +48,21 @@ import org.elasticsearch.indices.mapper.MapperRegistry;
import org.elasticsearch.plugins.MapperPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import static java.util.Collections.emptyList;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonMap;
import static java.util.stream.Collectors.toList;
import static org.hamcrest.Matchers.containsString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
/**
* Base class for testing {@link Mapper}s.
@ -63,10 +75,10 @@ public abstract class MapperTestCase extends ESTestCase {
}
protected Settings getIndexSettings() {
return Settings.EMPTY;
return SETTINGS;
}
protected IndexAnalyzers createIndexAnalyzers() {
protected IndexAnalyzers createIndexAnalyzers(IndexSettings indexSettings) {
return new IndexAnalyzers(
singletonMap("default", new NamedAnalyzer("default", AnalyzerScope.INDEX, new StandardAnalyzer())),
emptyMap(),
@ -103,7 +115,7 @@ public abstract class MapperTestCase extends ESTestCase {
SimilarityService similarityService = new SimilarityService(indexSettings, scriptService, emptyMap());
MapperService mapperService = new MapperService(
indexSettings,
createIndexAnalyzers(),
createIndexAnalyzers(indexSettings),
xContentRegistry(),
similarityService,
mapperRegistry,
@ -114,6 +126,29 @@ public abstract class MapperTestCase extends ESTestCase {
return mapperService;
}
protected final void withLuceneIndex(
MapperService mapperService,
CheckedConsumer<RandomIndexWriter, IOException> builder,
CheckedConsumer<IndexReader, IOException> test
) throws IOException {
try (
Directory dir = newDirectory();
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, new IndexWriterConfig(mapperService.indexAnalyzer()))
) {
builder.accept(iw);
try (IndexReader reader = iw.getReader()) {
test.accept(reader);
}
}
}
protected final SourceToParse source(CheckedConsumer<XContentBuilder, IOException> build) throws IOException {
XContentBuilder builder = JsonXContent.contentBuilder().startObject();
build.accept(builder);
builder.endObject();
return new SourceToParse("test", "_doc", "1", BytesReference.bytes(builder), XContentType.JSON);
}
/**
* Merge a new mapping into the one in the provided {@link MapperService}.
*/
@ -144,9 +179,33 @@ public abstract class MapperTestCase extends ESTestCase {
b.endObject();
})));
assertThat(e.getMessage(), containsString("name cannot be empty string"));
assertParseMinimalWarnings();
}
public final void testMeta() throws Exception {
public final void testMinimalSerializesToItself() throws IOException {
XContentBuilder orig = JsonXContent.contentBuilder().startObject();
createMapperService(fieldMapping(this::minimalMapping)).documentMapper().mapping().toXContent(orig, ToXContent.EMPTY_PARAMS);
orig.endObject();
XContentBuilder parsedFromOrig = JsonXContent.contentBuilder().startObject();
createMapperService(orig).documentMapper().mapping().toXContent(parsedFromOrig, ToXContent.EMPTY_PARAMS);
parsedFromOrig.endObject();
assertEquals(Strings.toString(orig), Strings.toString(parsedFromOrig));
assertParseMinimalWarnings();
}
protected void assertParseMinimalWarnings() {
// Most mappers don't emit any warnings
}
/**
* Override to disable testing {@code meta} in fields that don't support it.
*/
protected boolean supportsMeta() {
return true;
}
public final void testMeta() throws IOException {
assumeTrue("Field doesn't support meta", supportsMeta());
XContentBuilder mapping = fieldMapping(
b -> {
minimalMapping(b);
@ -176,4 +235,20 @@ public abstract class MapperTestCase extends ESTestCase {
XContentHelper.convertToMap(mapperService.documentMapper().mappingSource().uncompressed(), false, mapping.contentType())
);
}
public static List<?> fetchSourceValue(FieldMapper mapper, Object sourceValue) {
return fetchSourceValue(mapper, sourceValue, null);
}
public static List<?> fetchSourceValue(FieldMapper mapper, Object sourceValue, String format) {
String field = mapper.name();
MapperService mapperService = mock(MapperService.class);
when(mapperService.sourcePath(field)).thenReturn(org.elasticsearch.common.collect.Set.of(field));
ValueFetcher fetcher = mapper.valueFetcher(mapperService, format);
SourceLookup lookup = new SourceLookup();
lookup.setSource(Collections.singletonMap(field, sourceValue));
return fetcher.fetchValues(lookup);
}
}