[GEO] Fix validate_* merge policy for GeoPointFieldMapper

Fail merge if validate_lat or validate_lon values are not equal. This will prevent inconsistencies between geo_points in a merged index, and parse exceptions for bounding_box and distance filters.

Also merged separate GeoPoint test classes into a single GeoPointFieldMapperTest to be consistent with GeoShapeFieldMapperTests.

closes #10164
This commit is contained in:
Nicholas Knize 2015-03-19 13:10:58 -05:00
parent c6ce64a5e5
commit e43f3a941b
3 changed files with 141 additions and 146 deletions

View File

@ -665,11 +665,11 @@ public class GeoPointFieldMapper extends AbstractFieldMapper<GeoPoint> implement
if (!Objects.equal(this.precisionStep, fieldMergeWith.precisionStep)) { if (!Objects.equal(this.precisionStep, fieldMergeWith.precisionStep)) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different precision_step"); mergeContext.addConflict("mapper [" + names.fullName() + "] has different precision_step");
} }
if (this.validateLat != fieldMergeWith.validateLat) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different validate_lat");
if (!mergeContext.mergeFlags().simulate()) { }
this.validateLat = fieldMergeWith.validateLat; if (this.validateLon != fieldMergeWith.validateLon) {
this.validateLon = fieldMergeWith.validateLon; mergeContext.addConflict("mapper [" + names.fullName() + "] has different validate_lon");
} }
} }

View File

@ -16,23 +16,123 @@
* specific language governing permissions and limitations * specific language governing permissions and limitations
* under the License. * under the License.
*/ */
package org.elasticsearch.index.mapper.geo; package org.elasticsearch.index.mapper.geo;
import org.elasticsearch.common.geo.GeoHashUtils; import org.elasticsearch.common.geo.GeoHashUtils;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.test.ElasticsearchSingleNodeTest; import org.elasticsearch.test.ElasticsearchSingleNodeTest;
import org.junit.Test; import org.junit.Test;
import static org.hamcrest.Matchers.*; import java.util.ArrayList;
import java.util.Arrays;
/** import static org.elasticsearch.index.mapper.DocumentMapper.MergeFlags.mergeFlags;
* import static org.hamcrest.Matchers.*;
*/ import static org.hamcrest.Matchers.is;
public class LatLonMappingGeoPointTests extends ElasticsearchSingleNodeTest { import static org.hamcrest.Matchers.nullValue;
public class GeoPointFieldMapperTests extends ElasticsearchSingleNodeTest {
@Test
public void testLatLonValues() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("point").field("lat", 1.2).field("lon", 1.3).endObject()
.endObject()
.bytes());
assertThat(doc.rootDoc().getField("point.lat"), notNullValue());
assertThat(doc.rootDoc().getField("point.lat").fieldType().stored(), is(false));
assertThat(doc.rootDoc().getField("point.lon"), notNullValue());
assertThat(doc.rootDoc().getField("point.lon").fieldType().stored(), is(false));
assertThat(doc.rootDoc().getField("point.geohash"), nullValue());
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
}
@Test
public void testLatLonValuesWithGeohash() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true).endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("point").field("lat", 1.2).field("lon", 1.3).endObject()
.endObject()
.bytes());
assertThat(doc.rootDoc().getField("point.lat"), notNullValue());
assertThat(doc.rootDoc().getField("point.lon"), notNullValue());
assertThat(doc.rootDoc().get("point.geohash"), equalTo(GeoHashUtils.encode(1.2, 1.3)));
}
@Test
public void testLatLonInOneValueWithGeohash() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true).endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("point", "1.2,1.3")
.endObject()
.bytes());
assertThat(doc.rootDoc().getField("point.lat"), notNullValue());
assertThat(doc.rootDoc().getField("point.lon"), notNullValue());
assertThat(doc.rootDoc().get("point.geohash"), equalTo(GeoHashUtils.encode(1.2, 1.3)));
}
@Test
public void testGeoHashIndexValue() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true).endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("point", GeoHashUtils.encode(1.2, 1.3))
.endObject()
.bytes());
assertThat(doc.rootDoc().getField("point.lat"), notNullValue());
assertThat(doc.rootDoc().getField("point.lon"), notNullValue());
assertThat(doc.rootDoc().get("point.geohash"), equalTo(GeoHashUtils.encode(1.2, 1.3)));
}
@Test
public void testGeoHashValue() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("point", GeoHashUtils.encode(1.2, 1.3))
.endObject()
.bytes());
assertThat(doc.rootDoc().getField("point.lat"), notNullValue());
assertThat(doc.rootDoc().getField("point.lon"), notNullValue());
assertThat(doc.rootDoc().get("point"), notNullValue());
}
@Test @Test
public void testNormalizeLatLonValuesDefault() throws Exception { public void testNormalizeLatLonValuesDefault() throws Exception {
@ -128,7 +228,6 @@ public class LatLonMappingGeoPointTests extends ElasticsearchSingleNodeTest {
} }
} }
@Test @Test
public void testNoValidateLatLonValues() throws Exception { public void testNoValidateLatLonValues() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
@ -169,28 +268,6 @@ public class LatLonMappingGeoPointTests extends ElasticsearchSingleNodeTest {
.bytes()); .bytes());
} }
@Test
public void testLatLonValues() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("point").field("lat", 1.2).field("lon", 1.3).endObject()
.endObject()
.bytes());
assertThat(doc.rootDoc().getField("point.lat"), notNullValue());
assertThat(doc.rootDoc().getField("point.lat").fieldType().stored(), is(false));
assertThat(doc.rootDoc().getField("point.lon"), notNullValue());
assertThat(doc.rootDoc().getField("point.lon").fieldType().stored(), is(false));
assertThat(doc.rootDoc().getField("point.geohash"), nullValue());
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
}
@Test @Test
public void testLatLonValuesStored() throws Exception { public void testLatLonValuesStored() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
@ -307,25 +384,6 @@ public class LatLonMappingGeoPointTests extends ElasticsearchSingleNodeTest {
assertThat(doc.rootDoc().getFields("point")[1].stringValue(), equalTo("1.4,1.5")); assertThat(doc.rootDoc().getFields("point")[1].stringValue(), equalTo("1.4,1.5"));
} }
@Test
public void testGeoHashValue() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("point", GeoHashUtils.encode(1.2, 1.3))
.endObject()
.bytes());
assertThat(doc.rootDoc().getField("point.lat"), notNullValue());
assertThat(doc.rootDoc().getField("point.lon"), notNullValue());
assertThat(doc.rootDoc().get("point"), notNullValue());
}
@Test @Test
public void testLonLatArray() throws Exception { public void testLonLatArray() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
@ -413,4 +471,34 @@ public class LatLonMappingGeoPointTests extends ElasticsearchSingleNodeTest {
assertThat(doc.rootDoc().getFields("point.lon")[1].numericValue().doubleValue(), equalTo(1.5)); assertThat(doc.rootDoc().getFields("point.lon")[1].numericValue().doubleValue(), equalTo(1.5));
assertThat(doc.rootDoc().getFields("point")[1].stringValue(), equalTo("1.4,1.5")); assertThat(doc.rootDoc().getFields("point")[1].stringValue(), equalTo("1.4,1.5"));
} }
@Test
public void testGeoPointMapperMerge() throws Exception {
String stage1Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
.field("validate", true).endObject().endObject()
.endObject().endObject().string();
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
DocumentMapper stage1 = parser.parse(stage1Mapping);
String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
.field("validate", false).endObject().endObject()
.endObject().endObject().string();
DocumentMapper stage2 = parser.parse(stage2Mapping);
DocumentMapper.MergeResult mergeResult = stage1.merge(stage2, mergeFlags().simulate(false));
assertThat(mergeResult.hasConflicts(), equalTo(true));
assertThat(mergeResult.conflicts().length, equalTo(2));
// todo better way of checking conflict?
assertThat("mapper [point] has different validate_lat", isIn(new ArrayList<>(Arrays.asList(mergeResult.conflicts()))));
// correct mapping and ensure no failures
stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
.field("validate", true).field("normalize", true).endObject().endObject()
.endObject().endObject().string();
stage2 = parser.parse(stage2Mapping);
mergeResult = stage1.merge(stage2, mergeFlags().simulate(false));
assertThat(mergeResult.hasConflicts(), equalTo(false));
}
} }

View File

@ -1,93 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.geo;
import org.elasticsearch.common.geo.GeoHashUtils;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
import org.junit.Test;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.notNullValue;
/**
*
*/
public class LatLonAndGeohashMappingGeoPointTests extends ElasticsearchSingleNodeTest {
@Test
public void testLatLonValues() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true).endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("point").field("lat", 1.2).field("lon", 1.3).endObject()
.endObject()
.bytes());
assertThat(doc.rootDoc().getField("point.lat"), notNullValue());
assertThat(doc.rootDoc().getField("point.lon"), notNullValue());
assertThat(doc.rootDoc().get("point.geohash"), equalTo(GeoHashUtils.encode(1.2, 1.3)));
}
@Test
public void testLatLonInOneValue() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true).endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("point", "1.2,1.3")
.endObject()
.bytes());
assertThat(doc.rootDoc().getField("point.lat"), notNullValue());
assertThat(doc.rootDoc().getField("point.lon"), notNullValue());
assertThat(doc.rootDoc().get("point.geohash"), equalTo(GeoHashUtils.encode(1.2, 1.3)));
}
@Test
public void testGeoHashValue() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true).endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("point", GeoHashUtils.encode(1.2, 1.3))
.endObject()
.bytes());
assertThat(doc.rootDoc().getField("point.lat"), notNullValue());
assertThat(doc.rootDoc().getField("point.lon"), notNullValue());
assertThat(doc.rootDoc().get("point.geohash"), equalTo(GeoHashUtils.encode(1.2, 1.3)));
}
}