After #53562, the `geo_shape` field mapper is registered within a module. This opens the door for introducing a new `geo_shape` field mapper into the Spatial Plugin that has doc-values support. This is very much an extension of server's GeoShapeFieldMapper, but with the addition of the doc values implementation.
This commit is contained in:
parent
8d05d7dace
commit
0844455505
|
@ -317,6 +317,12 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
|
|||
excludePlatforms = []
|
||||
}
|
||||
from(buildModules) {
|
||||
// geo registers the geo_shape mapper that is overridden by
|
||||
// the geo_shape mapper registered in the x-pack-spatial plugin
|
||||
if (oss == false) {
|
||||
exclude "**/geo/**"
|
||||
}
|
||||
|
||||
for (String excludePlatform : excludePlatforms) {
|
||||
exclude "**/platform/${excludePlatform}/**"
|
||||
}
|
||||
|
|
|
@ -21,3 +21,7 @@ esplugin {
|
|||
description 'Placeholder plugin for geospatial features in ES. only registers geo_shape field mapper for now'
|
||||
classname 'org.elasticsearch.geo.GeoPlugin'
|
||||
}
|
||||
|
||||
artifacts {
|
||||
restTests(new File(projectDir, "src/test/resources/rest-api-spec/test"))
|
||||
}
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.geo;
|
||||
|
||||
import org.elasticsearch.index.mapper.AbstractGeometryFieldMapper;
|
||||
import org.elasticsearch.index.mapper.GeoShapeFieldMapper;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.plugins.MapperPlugin;
|
||||
|
@ -32,6 +31,6 @@ public class GeoPlugin extends Plugin implements MapperPlugin {
|
|||
|
||||
@Override
|
||||
public Map<String, Mapper.TypeParser> getMappers() {
|
||||
return Collections.singletonMap(GeoShapeFieldMapper.CONTENT_TYPE, new AbstractGeometryFieldMapper.TypeParser());
|
||||
return Collections.singletonMap(GeoShapeFieldMapper.CONTENT_TYPE, new GeoShapeFieldMapper.TypeParser());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -110,6 +110,14 @@ public class ByteBufferStreamInput extends StreamInput {
|
|||
}
|
||||
}
|
||||
|
||||
public void position(int newPosition) throws IOException {
|
||||
buffer.position(newPosition);
|
||||
}
|
||||
|
||||
public int position() throws IOException {
|
||||
return buffer.position();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void reset() throws IOException {
|
||||
buffer.reset();
|
||||
|
|
|
@ -185,7 +185,9 @@ public abstract class AbstractGeometryFieldMapper<Parsed, Processed> extends Fie
|
|||
|
||||
protected static final String DEPRECATED_PARAMETERS_KEY = "deprecated_parameters";
|
||||
|
||||
public static class TypeParser implements Mapper.TypeParser {
|
||||
public abstract static class TypeParser implements Mapper.TypeParser {
|
||||
protected abstract Builder newBuilder(String name, Map<String, Object> params);
|
||||
|
||||
protected boolean parseXContentParameters(String name, Map.Entry<String, Object> entry, Map<String, Object> params)
|
||||
throws MapperParsingException {
|
||||
if (DeprecatedParameters.parse(name, entry.getKey(), entry.getValue(),
|
||||
|
@ -195,13 +197,6 @@ public abstract class AbstractGeometryFieldMapper<Parsed, Processed> extends Fie
|
|||
return false;
|
||||
}
|
||||
|
||||
protected Builder newBuilder(String name, Map<String, Object> params) {
|
||||
if (params.containsKey(DEPRECATED_PARAMETERS_KEY)) {
|
||||
return new LegacyGeoShapeFieldMapper.Builder(name, (DeprecatedParameters)params.get(DEPRECATED_PARAMETERS_KEY));
|
||||
}
|
||||
return new GeoShapeFieldMapper.Builder(name);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
|
|
|
@ -26,6 +26,8 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.geometry.Geometry;
|
||||
import org.elasticsearch.index.query.VectorGeoShapeQueryProcessor;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* FieldMapper for indexing {@link LatLonShape}s.
|
||||
* <p>
|
||||
|
@ -76,7 +78,7 @@ public class GeoShapeFieldMapper extends AbstractGeometryFieldMapper<Geometry, G
|
|||
}
|
||||
}
|
||||
|
||||
public static final class GeoShapeFieldType extends AbstractGeometryFieldType<Geometry, Geometry> {
|
||||
public static class GeoShapeFieldType extends AbstractGeometryFieldType<Geometry, Geometry> {
|
||||
public GeoShapeFieldType() {
|
||||
super();
|
||||
}
|
||||
|
@ -96,6 +98,18 @@ public class GeoShapeFieldMapper extends AbstractGeometryFieldMapper<Geometry, G
|
|||
}
|
||||
}
|
||||
|
||||
public static final class TypeParser extends AbstractGeometryFieldMapper.TypeParser {
|
||||
|
||||
@Override
|
||||
protected AbstractGeometryFieldMapper.Builder newBuilder(String name, Map<String, Object> params) {
|
||||
if (params.containsKey(DEPRECATED_PARAMETERS_KEY)) {
|
||||
return new LegacyGeoShapeFieldMapper.Builder(name,
|
||||
(LegacyGeoShapeFieldMapper.DeprecatedParameters)params.get(DEPRECATED_PARAMETERS_KEY));
|
||||
}
|
||||
return new GeoShapeFieldMapper.Builder(name);
|
||||
}
|
||||
}
|
||||
|
||||
public GeoShapeFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
|
||||
Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
|
||||
Explicit<Boolean> ignoreZValue, Settings indexSettings,
|
||||
|
|
|
@ -49,7 +49,7 @@ import static org.elasticsearch.common.geo.GeoUtils.normalizePoint;
|
|||
/**
|
||||
* Utility class that converts geometries into Lucene-compatible form for indexing in a geo_shape field.
|
||||
*/
|
||||
public final class GeoShapeIndexer implements AbstractGeometryFieldMapper.Indexer<Geometry, Geometry> {
|
||||
public class GeoShapeIndexer implements AbstractGeometryFieldMapper.Indexer<Geometry, Geometry> {
|
||||
|
||||
private final boolean orientation;
|
||||
private final String name;
|
||||
|
|
|
@ -57,7 +57,7 @@ public enum MissingValues {
|
|||
};
|
||||
}
|
||||
|
||||
static SortedBinaryDocValues replaceMissing(final SortedBinaryDocValues values, final BytesRef missing) {
|
||||
public static SortedBinaryDocValues replaceMissing(final SortedBinaryDocValues values, final BytesRef missing) {
|
||||
return new SortedBinaryDocValues() {
|
||||
|
||||
private int count;
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
*/
|
||||
package org.elasticsearch.test;
|
||||
|
||||
import org.elasticsearch.index.mapper.AbstractGeometryFieldMapper;
|
||||
import org.elasticsearch.index.mapper.GeoShapeFieldMapper;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.plugins.MapperPlugin;
|
||||
|
@ -40,7 +39,7 @@ public class TestGeoShapeFieldMapperPlugin extends Plugin implements MapperPlugi
|
|||
@Override
|
||||
public Map<String, Mapper.TypeParser> getMappers() {
|
||||
Map<String, Mapper.TypeParser> mappers = new LinkedHashMap<>();
|
||||
mappers.put(GeoShapeFieldMapper.CONTENT_TYPE, new AbstractGeometryFieldMapper.TypeParser());
|
||||
mappers.put(GeoShapeFieldMapper.CONTENT_TYPE, new GeoShapeFieldMapper.TypeParser());
|
||||
return Collections.unmodifiableMap(mappers);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,15 +12,24 @@ esplugin {
|
|||
dependencies {
|
||||
compileOnly project(path: xpackModule('core'), configuration: 'default')
|
||||
testCompile project(path: xpackModule('core'), configuration: 'testArtifacts')
|
||||
testCompile project(path: ':modules:geo', configuration: 'runtime')
|
||||
compile project(path: ':modules:geo', configuration: 'default')
|
||||
restTestConfig project(path: ':modules:geo', configuration: 'restTests')
|
||||
}
|
||||
|
||||
restResources {
|
||||
restApi {
|
||||
includeCore '_common', 'indices', 'index', 'search'
|
||||
}
|
||||
restTests {
|
||||
includeCore 'geo_shape'
|
||||
}
|
||||
}
|
||||
|
||||
testClusters.integTest {
|
||||
testDistribution = 'DEFAULT'
|
||||
}
|
||||
|
||||
licenseHeaders {
|
||||
// This class was sourced from apache lucene's sandbox module tests
|
||||
excludes << 'org/apache/lucene/geo/XShapeTestUtil.java'
|
||||
}
|
||||
|
||||
// xpack modules are installed in real clusters as the meta plugin, so
|
||||
// installing them as individual plugins for integ tests doesn't make sense,
|
||||
// so we disable integ tests
|
||||
integTest.enabled = false
|
||||
|
|
|
@ -6,14 +6,14 @@
|
|||
package org.elasticsearch.xpack.spatial;
|
||||
|
||||
import org.elasticsearch.common.inject.Module;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.geo.GeoPlugin;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.ingest.Processor;
|
||||
import org.elasticsearch.plugins.IngestPlugin;
|
||||
import org.elasticsearch.plugins.MapperPlugin;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.xpack.core.XPackPlugin;
|
||||
import org.elasticsearch.plugins.SearchPlugin;
|
||||
import org.elasticsearch.xpack.core.XPackPlugin;
|
||||
import org.elasticsearch.xpack.spatial.index.mapper.GeoShapeWithDocValuesFieldMapper;
|
||||
import org.elasticsearch.xpack.spatial.index.mapper.PointFieldMapper;
|
||||
import org.elasticsearch.xpack.spatial.index.mapper.ShapeFieldMapper;
|
||||
import org.elasticsearch.xpack.spatial.index.query.ShapeQueryBuilder;
|
||||
|
@ -21,16 +21,13 @@ import org.elasticsearch.xpack.spatial.ingest.CircleProcessor;
|
|||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.util.Collections.singletonList;
|
||||
|
||||
public class SpatialPlugin extends Plugin implements MapperPlugin, SearchPlugin, IngestPlugin {
|
||||
|
||||
public SpatialPlugin(Settings settings) {
|
||||
}
|
||||
public class SpatialPlugin extends GeoPlugin implements MapperPlugin, SearchPlugin, IngestPlugin {
|
||||
|
||||
public Collection<Module> createGuiceModules() {
|
||||
return Collections.singletonList(b -> {
|
||||
|
@ -40,9 +37,10 @@ public class SpatialPlugin extends Plugin implements MapperPlugin, SearchPlugin,
|
|||
|
||||
@Override
|
||||
public Map<String, Mapper.TypeParser> getMappers() {
|
||||
Map<String, Mapper.TypeParser> mappers = new LinkedHashMap<>();
|
||||
Map<String, Mapper.TypeParser> mappers = new HashMap<>(super.getMappers());
|
||||
mappers.put(ShapeFieldMapper.CONTENT_TYPE, new ShapeFieldMapper.TypeParser());
|
||||
mappers.put(PointFieldMapper.CONTENT_TYPE, new PointFieldMapper.TypeParser());
|
||||
mappers.put(GeoShapeWithDocValuesFieldMapper.CONTENT_TYPE, new GeoShapeWithDocValuesFieldMapper.TypeParser());
|
||||
return Collections.unmodifiableMap(mappers);
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,51 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.spatial.index.mapper;
|
||||
|
||||
import org.apache.lucene.util.Accountable;
|
||||
import org.elasticsearch.index.fielddata.ScriptDocValues;
|
||||
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
|
||||
public abstract class AbstractAtomicGeoShapeShapeFieldData implements LeafGeoShapeFieldData {
|
||||
|
||||
@Override
|
||||
public final SortedBinaryDocValues getBytesValues() {
|
||||
throw new UnsupportedOperationException("scripts and term aggs are not supported by geo_shape doc values");
|
||||
}
|
||||
|
||||
@Override
|
||||
public final ScriptDocValues.BytesRefs getScriptValues() {
|
||||
throw new UnsupportedOperationException("scripts are not supported by geo_shape doc values");
|
||||
}
|
||||
|
||||
public static LeafGeoShapeFieldData empty(final int maxDoc) {
|
||||
return new AbstractAtomicGeoShapeShapeFieldData() {
|
||||
|
||||
@Override
|
||||
public long ramBytesUsed() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Accountable> getChildResources() {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public MultiGeoShapeValues getGeoShapeValues() {
|
||||
return MultiGeoShapeValues.EMPTY;
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
|
@ -0,0 +1,87 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.spatial.index.mapper;
|
||||
|
||||
import org.apache.lucene.index.DocValuesType;
|
||||
import org.apache.lucene.index.FieldInfo;
|
||||
import org.apache.lucene.index.LeafReader;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.search.SortField;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
|
||||
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.indices.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.MultiValueMode;
|
||||
import org.elasticsearch.search.sort.BucketedSort;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
|
||||
public abstract class AbstractLatLonShapeDVIndexFieldData extends DocValuesIndexFieldData implements IndexGeoShapeFieldData {
|
||||
AbstractLatLonShapeDVIndexFieldData(Index index, String fieldName) {
|
||||
super(index, fieldName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public SortField sortField(@Nullable Object missingValue, MultiValueMode sortMode, XFieldComparatorSource.Nested nested,
|
||||
boolean reverse) {
|
||||
throw new IllegalArgumentException("can't sort on geo_shape field without using specific sorting feature, like geo_distance");
|
||||
}
|
||||
|
||||
public static class LatLonShapeDVIndexFieldData extends AbstractLatLonShapeDVIndexFieldData {
|
||||
public LatLonShapeDVIndexFieldData(Index index, String fieldName) {
|
||||
super(index, fieldName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public LeafGeoShapeFieldData load(LeafReaderContext context) {
|
||||
LeafReader reader = context.reader();
|
||||
FieldInfo info = reader.getFieldInfos().fieldInfo(fieldName);
|
||||
if (info != null) {
|
||||
checkCompatible(info);
|
||||
}
|
||||
return new LatLonShapeDVAtomicShapeFieldData(reader, fieldName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public LeafGeoShapeFieldData loadDirect(LeafReaderContext context) throws Exception {
|
||||
return load(context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BucketedSort newBucketedSort(BigArrays bigArrays, Object missingValue, MultiValueMode sortMode,
|
||||
IndexFieldData.XFieldComparatorSource.Nested nested, SortOrder sortOrder, DocValueFormat format,
|
||||
int bucketSize, BucketedSort.ExtraData extra) {
|
||||
throw new IllegalArgumentException("can't sort on geo_shape field without using specific sorting feature, like geo_distance");
|
||||
}
|
||||
|
||||
/** helper: checks a fieldinfo and throws exception if its definitely not a LatLonDocValuesField */
|
||||
static void checkCompatible(FieldInfo fieldInfo) {
|
||||
// dv properties could be "unset", if you e.g. used only StoredField with this same name in the segment.
|
||||
if (fieldInfo.getDocValuesType() != DocValuesType.NONE
|
||||
&& fieldInfo.getDocValuesType() != DocValuesType.BINARY) {
|
||||
throw new IllegalArgumentException("field=\"" + fieldInfo.name + "\" was indexed with docValuesType="
|
||||
+ fieldInfo.getDocValuesType() + " but this type has docValuesType="
|
||||
+ DocValuesType.BINARY + ", is the field really a geo-shape field?");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static class Builder implements IndexFieldData.Builder {
|
||||
@Override
|
||||
public IndexFieldData<?> build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache,
|
||||
CircuitBreakerService breakerService, MapperService mapperService) {
|
||||
// ignore breaker
|
||||
return new LatLonShapeDVIndexFieldData(indexSettings.getIndex(), fieldType.name());
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,48 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.spatial.index.mapper;
|
||||
|
||||
import org.apache.lucene.document.ShapeField;
|
||||
import org.apache.lucene.store.ByteBuffersDataOutput;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.index.mapper.CustomDocValuesField;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
public class BinaryGeoShapeDocValuesField extends CustomDocValuesField {
|
||||
|
||||
private final List<ShapeField.DecodedTriangle> triangles;
|
||||
private final CentroidCalculator centroidCalculator;
|
||||
|
||||
public BinaryGeoShapeDocValuesField(String name, ShapeField.DecodedTriangle[] triangles, CentroidCalculator centroidCalculator) {
|
||||
super(name);
|
||||
this.triangles = new ArrayList<>(triangles.length);
|
||||
this.centroidCalculator = centroidCalculator;
|
||||
this.triangles.addAll(Arrays.asList(triangles));
|
||||
}
|
||||
|
||||
public void add(ShapeField.DecodedTriangle[] triangles, CentroidCalculator centroidCalculator) {
|
||||
this.triangles.addAll(Arrays.asList(triangles));
|
||||
this.centroidCalculator.addFrom(centroidCalculator);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesRef binaryValue() {
|
||||
try {
|
||||
final TriangleTreeWriter writer = new TriangleTreeWriter(triangles, GeoShapeCoordinateEncoder.INSTANCE, centroidCalculator);
|
||||
ByteBuffersDataOutput output = new ByteBuffersDataOutput();
|
||||
writer.writeTo(output);
|
||||
return new BytesRef(output.toArrayCopy(), 0, Math.toIntExact(output.size()));
|
||||
} catch (IOException e) {
|
||||
throw new ElasticsearchException("failed to encode shape", e);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,300 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.spatial.index.mapper;
|
||||
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
import org.elasticsearch.geometry.Circle;
|
||||
import org.elasticsearch.geometry.Geometry;
|
||||
import org.elasticsearch.geometry.GeometryCollection;
|
||||
import org.elasticsearch.geometry.GeometryVisitor;
|
||||
import org.elasticsearch.geometry.Line;
|
||||
import org.elasticsearch.geometry.LinearRing;
|
||||
import org.elasticsearch.geometry.MultiLine;
|
||||
import org.elasticsearch.geometry.MultiPoint;
|
||||
import org.elasticsearch.geometry.MultiPolygon;
|
||||
import org.elasticsearch.geometry.Point;
|
||||
import org.elasticsearch.geometry.Polygon;
|
||||
import org.elasticsearch.geometry.Rectangle;
|
||||
import org.elasticsearch.search.aggregations.metrics.CompensatedSum;
|
||||
|
||||
/**
|
||||
* This class keeps a running Kahan-sum of coordinates
|
||||
* that are to be averaged in {@link TriangleTreeWriter} for use
|
||||
* as the centroid of a shape.
|
||||
*/
|
||||
public class CentroidCalculator {
|
||||
CompensatedSum compSumX;
|
||||
CompensatedSum compSumY;
|
||||
CompensatedSum compSumWeight;
|
||||
private CentroidCalculatorVisitor visitor;
|
||||
private DimensionalShapeType dimensionalShapeType;
|
||||
|
||||
public CentroidCalculator(Geometry geometry) {
|
||||
this.compSumX = new CompensatedSum(0, 0);
|
||||
this.compSumY = new CompensatedSum(0, 0);
|
||||
this.compSumWeight = new CompensatedSum(0, 0);
|
||||
this.dimensionalShapeType = null;
|
||||
this.visitor = new CentroidCalculatorVisitor(this);
|
||||
geometry.visit(visitor);
|
||||
this.dimensionalShapeType = visitor.calculator.dimensionalShapeType;
|
||||
}
|
||||
|
||||
/**
|
||||
* adds a single coordinate to the running sum and count of coordinates
|
||||
* for centroid calculation
|
||||
* @param x the x-coordinate of the point
|
||||
* @param y the y-coordinate of the point
|
||||
* @param weight the associated weight of the coordinate
|
||||
*/
|
||||
private void addCoordinate(double x, double y, double weight, DimensionalShapeType dimensionalShapeType) {
|
||||
// x and y can be infinite due to really small areas and rounding problems
|
||||
if (Double.isFinite(x) && Double.isFinite(y)) {
|
||||
if (this.dimensionalShapeType == null || this.dimensionalShapeType == dimensionalShapeType) {
|
||||
compSumX.add(x * weight);
|
||||
compSumY.add(y * weight);
|
||||
compSumWeight.add(weight);
|
||||
this.dimensionalShapeType = dimensionalShapeType;
|
||||
} else if (dimensionalShapeType.compareTo(this.dimensionalShapeType) > 0) {
|
||||
// reset counters
|
||||
compSumX.reset(x * weight, 0);
|
||||
compSumY.reset(y * weight, 0);
|
||||
compSumWeight.reset(weight, 0);
|
||||
this.dimensionalShapeType = dimensionalShapeType;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Adjusts the existing calculator to add the running sum and count
|
||||
* from another {@link CentroidCalculator}. This is used to keep
|
||||
* a running count of points from different sub-shapes of a single
|
||||
* geo-shape field
|
||||
*
|
||||
* @param otherCalculator the other centroid calculator to add from
|
||||
*/
|
||||
public void addFrom(CentroidCalculator otherCalculator) {
|
||||
int compared = dimensionalShapeType.compareTo(otherCalculator.dimensionalShapeType);
|
||||
if (compared < 0) {
|
||||
dimensionalShapeType = otherCalculator.dimensionalShapeType;
|
||||
this.compSumX = otherCalculator.compSumX;
|
||||
this.compSumY = otherCalculator.compSumY;
|
||||
this.compSumWeight = otherCalculator.compSumWeight;
|
||||
|
||||
} else if (compared == 0) {
|
||||
this.compSumX.add(otherCalculator.compSumX.value());
|
||||
this.compSumY.add(otherCalculator.compSumY.value());
|
||||
this.compSumWeight.add(otherCalculator.compSumWeight.value());
|
||||
} // else (compared > 0) do not modify centroid calculation since otherCalculator is of lower dimension than this calculator
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the x-coordinate centroid
|
||||
*/
|
||||
public double getX() {
|
||||
// normalization required due to floating point precision errors
|
||||
return GeoUtils.normalizeLon(compSumX.value() / compSumWeight.value());
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the y-coordinate centroid
|
||||
*/
|
||||
public double getY() {
|
||||
// normalization required due to floating point precision errors
|
||||
return GeoUtils.normalizeLat(compSumY.value() / compSumWeight.value());
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the sum of all the weighted coordinates summed in the calculator
|
||||
*/
|
||||
public double sumWeight() {
|
||||
return compSumWeight.value();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the highest dimensional shape type summed in the calculator
|
||||
*/
|
||||
public DimensionalShapeType getDimensionalShapeType() {
|
||||
return dimensionalShapeType;
|
||||
}
|
||||
|
||||
private static class CentroidCalculatorVisitor implements GeometryVisitor<Void, IllegalArgumentException> {
|
||||
|
||||
private final CentroidCalculator calculator;
|
||||
|
||||
private CentroidCalculatorVisitor(CentroidCalculator calculator) {
|
||||
this.calculator = calculator;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Void visit(Circle circle) {
|
||||
throw new IllegalArgumentException("invalid shape type found [Circle] while calculating centroid");
|
||||
}
|
||||
|
||||
@Override
|
||||
public Void visit(GeometryCollection<?> collection) {
|
||||
for (Geometry shape : collection) {
|
||||
shape.visit(this);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Void visit(Line line) {
|
||||
if (calculator.dimensionalShapeType != DimensionalShapeType.POLYGON) {
|
||||
visitLine(line.length(), line::getX, line::getY);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Void visit(LinearRing ring) {
|
||||
throw new IllegalArgumentException("invalid shape type found [LinearRing] while calculating centroid");
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public Void visit(MultiLine multiLine) {
|
||||
if (calculator.getDimensionalShapeType() != DimensionalShapeType.POLYGON) {
|
||||
for (Line line : multiLine) {
|
||||
visit(line);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Void visit(MultiPoint multiPoint) {
|
||||
if (calculator.getDimensionalShapeType() == null || calculator.getDimensionalShapeType() == DimensionalShapeType.POINT) {
|
||||
for (Point point : multiPoint) {
|
||||
visit(point);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Void visit(MultiPolygon multiPolygon) {
|
||||
for (Polygon polygon : multiPolygon) {
|
||||
visit(polygon);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Void visit(Point point) {
|
||||
if (calculator.getDimensionalShapeType() == null || calculator.getDimensionalShapeType() == DimensionalShapeType.POINT) {
|
||||
visitPoint(point.getX(), point.getY());
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Void visit(Polygon polygon) {
|
||||
// check area of polygon
|
||||
|
||||
double[] centroidX = new double[1 + polygon.getNumberOfHoles()];
|
||||
double[] centroidY = new double[1 + polygon.getNumberOfHoles()];
|
||||
double[] weight = new double[1 + polygon.getNumberOfHoles()];
|
||||
visitLinearRing(polygon.getPolygon().length(), polygon.getPolygon()::getX, polygon.getPolygon()::getY, false,
|
||||
centroidX, centroidY, weight, 0);
|
||||
for (int i = 0; i < polygon.getNumberOfHoles(); i++) {
|
||||
visitLinearRing(polygon.getHole(i).length(), polygon.getHole(i)::getX, polygon.getHole(i)::getY, true,
|
||||
centroidX, centroidY, weight, i + 1);
|
||||
}
|
||||
|
||||
double sumWeight = 0;
|
||||
for (double w : weight) {
|
||||
sumWeight += w;
|
||||
}
|
||||
|
||||
if (sumWeight == 0 && calculator.dimensionalShapeType != DimensionalShapeType.POLYGON) {
|
||||
visitLine(polygon.getPolygon().length(), polygon.getPolygon()::getX, polygon.getPolygon()::getY);
|
||||
} else {
|
||||
for (int i = 0; i < 1 + polygon.getNumberOfHoles(); i++) {
|
||||
calculator.addCoordinate(centroidX[i], centroidY[i], weight[i], DimensionalShapeType.POLYGON);
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Void visit(Rectangle rectangle) {
|
||||
double sumX = rectangle.getMaxX() + rectangle.getMinX();
|
||||
double sumY = rectangle.getMaxY() + rectangle.getMinY();
|
||||
double diffX = rectangle.getMaxX() - rectangle.getMinX();
|
||||
double diffY = rectangle.getMaxY() - rectangle.getMinY();
|
||||
if (diffX != 0 && diffY != 0) {
|
||||
calculator.addCoordinate(sumX / 2, sumY / 2, Math.abs(diffX * diffY), DimensionalShapeType.POLYGON);
|
||||
} else if (diffX != 0) {
|
||||
calculator.addCoordinate(sumX / 2, rectangle.getMinY(), diffX, DimensionalShapeType.LINE);
|
||||
} else if (diffY != 0) {
|
||||
calculator.addCoordinate(rectangle.getMinX(), sumY / 2, diffY, DimensionalShapeType.LINE);
|
||||
} else {
|
||||
visitPoint(rectangle.getMinX(), rectangle.getMinY());
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
private void visitPoint(double x, double y) {
|
||||
calculator.addCoordinate(x, y, 1.0, DimensionalShapeType.POINT);
|
||||
}
|
||||
|
||||
private void visitLine(int length, CoordinateSupplier x, CoordinateSupplier y) {
|
||||
// check line has length
|
||||
double originDiffX = x.get(0) - x.get(1);
|
||||
double originDiffY = y.get(0) - y.get(1);
|
||||
if (originDiffX != 0 || originDiffY != 0) {
|
||||
// a line's centroid is calculated by summing the center of each
|
||||
// line segment weighted by the line segment's length in degrees
|
||||
for (int i = 0; i < length - 1; i++) {
|
||||
double diffX = x.get(i) - x.get(i + 1);
|
||||
double diffY = y.get(i) - y.get(i + 1);
|
||||
double xAvg = (x.get(i) + x.get(i + 1)) / 2;
|
||||
double yAvg = (y.get(i) + y.get(i + 1)) / 2;
|
||||
double weight = Math.sqrt(diffX * diffX + diffY * diffY);
|
||||
calculator.addCoordinate(xAvg, yAvg, weight, DimensionalShapeType.LINE);
|
||||
}
|
||||
} else {
|
||||
visitPoint(x.get(0), y.get(0));
|
||||
}
|
||||
}
|
||||
|
||||
private void visitLinearRing(int length, CoordinateSupplier x, CoordinateSupplier y, boolean isHole,
|
||||
double[] centroidX, double[] centroidY, double[] weight, int idx) {
|
||||
// implementation of calculation defined in
|
||||
// https://www.seas.upenn.edu/~sys502/extra_materials/Polygon%20Area%20and%20Centroid.pdf
|
||||
//
|
||||
// centroid of a ring is a weighted coordinate based on the ring's area.
|
||||
// the sign of the area is positive for the outer-shell of a polygon and negative for the holes
|
||||
|
||||
int sign = isHole ? -1 : 1;
|
||||
double totalRingArea = 0.0;
|
||||
for (int i = 0; i < length - 1; i++) {
|
||||
totalRingArea += (x.get(i) * y.get(i + 1)) - (x.get(i + 1) * y.get(i));
|
||||
}
|
||||
totalRingArea = totalRingArea / 2;
|
||||
|
||||
double sumX = 0.0;
|
||||
double sumY = 0.0;
|
||||
for (int i = 0; i < length - 1; i++) {
|
||||
double twiceArea = (x.get(i) * y.get(i + 1)) - (x.get(i + 1) * y.get(i));
|
||||
sumX += twiceArea * (x.get(i) + x.get(i + 1));
|
||||
sumY += twiceArea * (y.get(i) + y.get(i + 1));
|
||||
}
|
||||
centroidX[idx] = sumX / (6 * totalRingArea);
|
||||
centroidY[idx] = sumY / (6 * totalRingArea);
|
||||
weight[idx] = sign * Math.abs(totalRingArea);
|
||||
}
|
||||
}
|
||||
|
||||
@FunctionalInterface
|
||||
private interface CoordinateSupplier {
|
||||
double get(int idx);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,18 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.spatial.index.mapper;
|
||||
|
||||
/**
|
||||
* Interface for classes that help encode double-valued spatial coordinates x/y to
|
||||
* their integer-encoded serialized form and decode them back
|
||||
*/
|
||||
public interface CoordinateEncoder {
|
||||
int encodeX(double x);
|
||||
int encodeY(double y);
|
||||
double decodeX(int x);
|
||||
double decodeY(int y);
|
||||
}
|
|
@ -0,0 +1,38 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.spatial.index.mapper;
|
||||
|
||||
import org.apache.lucene.store.ByteArrayDataInput;
|
||||
import org.apache.lucene.store.ByteBuffersDataOutput;
|
||||
import org.elasticsearch.geometry.GeometryCollection;
|
||||
import org.elasticsearch.geometry.ShapeType;
|
||||
|
||||
/**
|
||||
* Like {@link ShapeType} but has specific
|
||||
* types for when the geometry is a {@link GeometryCollection} and
|
||||
* more information about what the highest-dimensional sub-shape
|
||||
* is.
|
||||
*/
|
||||
public enum DimensionalShapeType {
|
||||
POINT,
|
||||
LINE,
|
||||
POLYGON;
|
||||
|
||||
private static DimensionalShapeType[] values = values();
|
||||
|
||||
public static DimensionalShapeType fromOrdinalByte(byte ordinal) {
|
||||
return values[Byte.toUnsignedInt(ordinal)];
|
||||
}
|
||||
|
||||
public void writeTo(ByteBuffersDataOutput out) {
|
||||
out.writeByte((byte) ordinal());
|
||||
}
|
||||
|
||||
public static DimensionalShapeType readFrom(ByteArrayDataInput in) {
|
||||
return fromOrdinalByte(in.readByte());
|
||||
}
|
||||
}
|
|
@ -0,0 +1,283 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.spatial.index.mapper;
|
||||
|
||||
import org.apache.lucene.store.ByteArrayDataInput;
|
||||
import org.apache.lucene.store.ByteBuffersDataOutput;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Object representing the extent of a geometry object within a {@link TriangleTreeWriter}.
|
||||
*/
|
||||
public class Extent {
|
||||
|
||||
public int top;
|
||||
public int bottom;
|
||||
public int negLeft;
|
||||
public int negRight;
|
||||
public int posLeft;
|
||||
public int posRight;
|
||||
|
||||
private static final byte NONE_SET = 0;
|
||||
private static final byte POSITIVE_SET = 1;
|
||||
private static final byte NEGATIVE_SET = 2;
|
||||
private static final byte CROSSES_LAT_AXIS = 3;
|
||||
private static final byte ALL_SET = 4;
|
||||
|
||||
|
||||
public Extent() {
|
||||
this.top = Integer.MIN_VALUE;
|
||||
this.bottom = Integer.MAX_VALUE;
|
||||
this.negLeft = Integer.MAX_VALUE;
|
||||
this.negRight = Integer.MIN_VALUE;
|
||||
this.posLeft = Integer.MAX_VALUE;
|
||||
this.posRight = Integer.MIN_VALUE;
|
||||
}
|
||||
|
||||
public Extent(int top, int bottom, int negLeft, int negRight, int posLeft, int posRight) {
|
||||
this.top = top;
|
||||
this.bottom = bottom;
|
||||
this.negLeft = negLeft;
|
||||
this.negRight = negRight;
|
||||
this.posLeft = posLeft;
|
||||
this.posRight = posRight;
|
||||
}
|
||||
|
||||
public void reset(int top, int bottom, int negLeft, int negRight, int posLeft, int posRight) {
|
||||
this.top = top;
|
||||
this.bottom = bottom;
|
||||
this.negLeft = negLeft;
|
||||
this.negRight = negRight;
|
||||
this.posLeft = posLeft;
|
||||
this.posRight = posRight;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds the extent of two points representing a bounding box's bottom-left
|
||||
* and top-right points. The bounding box must not cross the dateline.
|
||||
*
|
||||
* @param bottomLeftX the bottom-left x-coordinate
|
||||
* @param bottomLeftY the bottom-left y-coordinate
|
||||
* @param topRightX the top-right x-coordinate
|
||||
* @param topRightY the top-right y-coordinate
|
||||
*/
|
||||
public void addRectangle(int bottomLeftX, int bottomLeftY, int topRightX, int topRightY) {
|
||||
assert bottomLeftX <= topRightX;
|
||||
assert bottomLeftY <= topRightY;
|
||||
this.bottom = Math.min(this.bottom, bottomLeftY);
|
||||
this.top = Math.max(this.top, topRightY);
|
||||
if (bottomLeftX < 0 && topRightX < 0) {
|
||||
this.negLeft = Math.min(this.negLeft, bottomLeftX);
|
||||
this.negRight = Math.max(this.negRight, topRightX);
|
||||
} else if (bottomLeftX < 0) {
|
||||
this.negLeft = Math.min(this.negLeft, bottomLeftX);
|
||||
this.posRight = Math.max(this.posRight, topRightX);
|
||||
// this signal the extent cannot be wrapped around the dateline
|
||||
this.negRight = 0;
|
||||
this.posLeft = 0;
|
||||
} else {
|
||||
this.posLeft = Math.min(this.posLeft, bottomLeftX);
|
||||
this.posRight = Math.max(this.posRight, topRightX);
|
||||
}
|
||||
}
|
||||
|
||||
static void readFromCompressed(ByteArrayDataInput input, Extent extent) {
|
||||
final int top = input.readInt();
|
||||
final int bottom = Math.toIntExact(top - input.readVLong());
|
||||
final int negLeft;
|
||||
final int negRight;
|
||||
final int posLeft;
|
||||
final int posRight;
|
||||
byte type = input.readByte();
|
||||
switch (type) {
|
||||
case NONE_SET:
|
||||
negLeft = Integer.MAX_VALUE;
|
||||
negRight = Integer.MIN_VALUE;
|
||||
posLeft = Integer.MAX_VALUE;
|
||||
posRight = Integer.MIN_VALUE;
|
||||
break;
|
||||
case POSITIVE_SET:
|
||||
posLeft = input.readVInt();
|
||||
posRight = Math.toIntExact(input.readVLong() + posLeft);
|
||||
negLeft = Integer.MAX_VALUE;
|
||||
negRight = Integer.MIN_VALUE;
|
||||
break;
|
||||
case NEGATIVE_SET:
|
||||
negRight = -input.readVInt();
|
||||
negLeft = Math.toIntExact(negRight - input.readVLong());
|
||||
posLeft = Integer.MAX_VALUE;
|
||||
posRight = Integer.MIN_VALUE;
|
||||
break;
|
||||
case CROSSES_LAT_AXIS:
|
||||
posRight = input.readVInt();
|
||||
negLeft = -input.readVInt();
|
||||
posLeft = 0;
|
||||
negRight = 0;
|
||||
break;
|
||||
case ALL_SET:
|
||||
posLeft = input.readVInt();
|
||||
posRight = Math.toIntExact(input.readVLong() + posLeft);
|
||||
negRight = -input.readVInt();
|
||||
negLeft = Math.toIntExact(negRight - input.readVLong());
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException("invalid extent values-set byte read [" + type + "]");
|
||||
}
|
||||
extent.reset(top, bottom, negLeft, negRight, posLeft, posRight);
|
||||
}
|
||||
|
||||
void writeCompressed(ByteBuffersDataOutput output) throws IOException {
|
||||
output.writeInt(this.top);
|
||||
output.writeVLong((long) this.top - this.bottom);
|
||||
byte type;
|
||||
if (this.negLeft == Integer.MAX_VALUE && this.negRight == Integer.MIN_VALUE) {
|
||||
if (this.posLeft == Integer.MAX_VALUE && this.posRight == Integer.MIN_VALUE) {
|
||||
type = NONE_SET;
|
||||
} else {
|
||||
type = POSITIVE_SET;
|
||||
}
|
||||
} else if (this.posLeft == Integer.MAX_VALUE && this.posRight == Integer.MIN_VALUE) {
|
||||
type = NEGATIVE_SET;
|
||||
} else {
|
||||
if (posLeft == 0 && negRight == 0) {
|
||||
type = CROSSES_LAT_AXIS;
|
||||
} else {
|
||||
type = ALL_SET;
|
||||
}
|
||||
}
|
||||
output.writeByte(type);
|
||||
switch (type) {
|
||||
case NONE_SET : break;
|
||||
case POSITIVE_SET:
|
||||
output.writeVInt(this.posLeft);
|
||||
output.writeVLong((long) this.posRight - this.posLeft);
|
||||
break;
|
||||
case NEGATIVE_SET:
|
||||
output.writeVInt(-this.negRight);
|
||||
output.writeVLong((long) this.negRight - this.negLeft);
|
||||
break;
|
||||
case CROSSES_LAT_AXIS:
|
||||
output.writeVInt(this.posRight);
|
||||
output.writeVInt(-this.negLeft);
|
||||
break;
|
||||
case ALL_SET:
|
||||
output.writeVInt(this.posLeft);
|
||||
output.writeVLong((long) this.posRight - this.posLeft);
|
||||
output.writeVInt(-this.negRight);
|
||||
output.writeVLong((long) this.negRight - this.negLeft);
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException("invalid extent values-set byte read [" + type + "]");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* calculates the extent of a point, which is the point itself.
|
||||
* @param x the x-coordinate of the point
|
||||
* @param y the y-coordinate of the point
|
||||
* @return the extent of the point
|
||||
*/
|
||||
public static Extent fromPoint(int x, int y) {
|
||||
return new Extent(y, y,
|
||||
x < 0 ? x : Integer.MAX_VALUE,
|
||||
x < 0 ? x : Integer.MIN_VALUE,
|
||||
x >= 0 ? x : Integer.MAX_VALUE,
|
||||
x >= 0 ? x : Integer.MIN_VALUE);
|
||||
}
|
||||
|
||||
/**
|
||||
* calculates the extent of two points representing a bounding box's bottom-left
|
||||
* and top-right points. It is important that these points accurately represent the
|
||||
* bottom-left and top-right of the extent since there is no validation being done.
|
||||
*
|
||||
* @param bottomLeftX the bottom-left x-coordinate
|
||||
* @param bottomLeftY the bottom-left y-coordinate
|
||||
* @param topRightX the top-right x-coordinate
|
||||
* @param topRightY the top-right y-coordinate
|
||||
* @return the extent of the two points
|
||||
*/
|
||||
static Extent fromPoints(int bottomLeftX, int bottomLeftY, int topRightX, int topRightY) {
|
||||
int negLeft = Integer.MAX_VALUE;
|
||||
int negRight = Integer.MIN_VALUE;
|
||||
int posLeft = Integer.MAX_VALUE;
|
||||
int posRight = Integer.MIN_VALUE;
|
||||
if (bottomLeftX < 0 && topRightX < 0) {
|
||||
negLeft = bottomLeftX;
|
||||
negRight = topRightX;
|
||||
} else if (bottomLeftX < 0) {
|
||||
negLeft = bottomLeftX;
|
||||
posRight = topRightX;
|
||||
// this signal the extent cannot be wrapped around the dateline
|
||||
negRight = 0;
|
||||
posLeft = 0;
|
||||
} else {
|
||||
posLeft = bottomLeftX;
|
||||
posRight = topRightX;
|
||||
}
|
||||
return new Extent(topRightY, bottomLeftY, negLeft, negRight, posLeft, posRight);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the minimum y-coordinate of the extent
|
||||
*/
|
||||
public int minY() {
|
||||
return bottom;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the maximum y-coordinate of the extent
|
||||
*/
|
||||
public int maxY() {
|
||||
return top;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the absolute minimum x-coordinate of the extent, whether it is positive or negative.
|
||||
*/
|
||||
public int minX() {
|
||||
return Math.min(negLeft, posLeft);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the absolute maximum x-coordinate of the extent, whether it is positive or negative.
|
||||
*/
|
||||
public int maxX() {
|
||||
return Math.max(negRight, posRight);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
Extent extent = (Extent) o;
|
||||
return top == extent.top &&
|
||||
bottom == extent.bottom &&
|
||||
negLeft == extent.negLeft &&
|
||||
negRight == extent.negRight &&
|
||||
posLeft == extent.posLeft &&
|
||||
posRight == extent.posRight;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(top, bottom, negLeft, negRight, posLeft, posRight);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder builder = new StringBuilder("[");
|
||||
builder.append("top = " + top + ", ");
|
||||
builder.append("bottom = " + bottom + ", ");
|
||||
builder.append("negLeft = " + negLeft + ", ");
|
||||
builder.append("negRight = " + negRight + ", ");
|
||||
builder.append("posLeft = " + posLeft + ", ");
|
||||
builder.append("posRight = " + posRight + "]");
|
||||
return builder.toString();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,17 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.spatial.index.mapper;
|
||||
|
||||
/**
|
||||
* Enum for capturing relationships between a shape
|
||||
* and a query
|
||||
*/
|
||||
public enum GeoRelation {
|
||||
QUERY_CROSSES,
|
||||
QUERY_INSIDE,
|
||||
QUERY_DISJOINT
|
||||
}
|
|
@ -0,0 +1,45 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.spatial.index.mapper;
|
||||
|
||||
import org.apache.lucene.geo.GeoEncodingUtils;
|
||||
|
||||
public final class GeoShapeCoordinateEncoder implements CoordinateEncoder {
|
||||
public static final GeoShapeCoordinateEncoder INSTANCE = new GeoShapeCoordinateEncoder();
|
||||
|
||||
@Override
|
||||
public int encodeX(double x) {
|
||||
if (x == Double.NEGATIVE_INFINITY) {
|
||||
return Integer.MIN_VALUE;
|
||||
}
|
||||
if (x == Double.POSITIVE_INFINITY) {
|
||||
return Integer.MAX_VALUE;
|
||||
}
|
||||
return GeoEncodingUtils.encodeLongitude(x);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int encodeY(double y) {
|
||||
if (y == Double.NEGATIVE_INFINITY) {
|
||||
return Integer.MIN_VALUE;
|
||||
}
|
||||
if (y == Double.POSITIVE_INFINITY) {
|
||||
return Integer.MAX_VALUE;
|
||||
}
|
||||
return GeoEncodingUtils.encodeLatitude(y);
|
||||
}
|
||||
|
||||
@Override
|
||||
public double decodeX(int x) {
|
||||
return GeoEncodingUtils.decodeLongitude(x);
|
||||
}
|
||||
|
||||
@Override
|
||||
public double decodeY(int y) {
|
||||
return GeoEncodingUtils.decodeLatitude(y);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,62 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.spatial.index.mapper;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.elasticsearch.index.fielddata.DocValueBits;
|
||||
import org.elasticsearch.index.fielddata.FieldData;
|
||||
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public abstract class GeoShapeValuesSource extends ValuesSource {
|
||||
public static final GeoShapeValuesSource EMPTY = new GeoShapeValuesSource() {
|
||||
|
||||
@Override
|
||||
public MultiGeoShapeValues geoShapeValues(LeafReaderContext context) {
|
||||
return MultiGeoShapeValues.EMPTY;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SortedBinaryDocValues bytesValues(LeafReaderContext context) throws IOException {
|
||||
return FieldData.emptySortedBinary();
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
abstract MultiGeoShapeValues geoShapeValues(LeafReaderContext context);
|
||||
|
||||
@Override
|
||||
public DocValueBits docsWithValue(LeafReaderContext context) throws IOException {
|
||||
MultiGeoShapeValues values = geoShapeValues(context);
|
||||
return new DocValueBits() {
|
||||
@Override
|
||||
public boolean advanceExact(int doc) throws IOException {
|
||||
return values.advanceExact(doc);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public static class Fielddata extends GeoShapeValuesSource {
|
||||
|
||||
protected final IndexGeoShapeFieldData indexFieldData;
|
||||
|
||||
public Fielddata(IndexGeoShapeFieldData indexFieldData) {
|
||||
this.indexFieldData = indexFieldData;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SortedBinaryDocValues bytesValues(LeafReaderContext context) {
|
||||
return indexFieldData.load(context).getBytesValues();
|
||||
}
|
||||
|
||||
public MultiGeoShapeValues geoShapeValues(LeafReaderContext context) {
|
||||
return indexFieldData.load(context).getGeoShapeValues();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,116 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.spatial.index.mapper;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
|
||||
import org.elasticsearch.script.AggregationScript;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.support.FieldContext;
|
||||
import org.elasticsearch.search.aggregations.support.MissingValues;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.function.LongSupplier;
|
||||
|
||||
public class GeoShapeValuesSourceType implements Writeable, ValuesSourceType {
|
||||
|
||||
static GeoShapeValuesSourceType INSTANCE = new GeoShapeValuesSourceType();
|
||||
|
||||
@Override
|
||||
public ValuesSource getEmpty() {
|
||||
return GeoShapeValuesSource.EMPTY;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValuesSource getScript(AggregationScript.LeafFactory script, ValueType scriptValueType) {
|
||||
// TODO (support scripts)
|
||||
throw new UnsupportedOperationException("geo_shape");
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValuesSource getField(FieldContext fieldContext, AggregationScript.LeafFactory script) {
|
||||
boolean isGeoPoint = fieldContext.indexFieldData() instanceof IndexGeoPointFieldData;
|
||||
boolean isGeoShape = fieldContext.indexFieldData() instanceof IndexGeoShapeFieldData;
|
||||
if (isGeoPoint == false && isGeoShape == false) {
|
||||
throw new IllegalArgumentException("Expected geo_point or geo_shape type on field [" + fieldContext.field() +
|
||||
"], but got [" + fieldContext.fieldType().typeName() + "]");
|
||||
}
|
||||
if (isGeoPoint) {
|
||||
return new ValuesSource.GeoPoint.Fielddata((IndexGeoPointFieldData) fieldContext.indexFieldData());
|
||||
}
|
||||
return new GeoShapeValuesSource.Fielddata((IndexGeoShapeFieldData) fieldContext.indexFieldData());
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValuesSource replaceMissing(ValuesSource valuesSource, Object rawMissing, DocValueFormat docValueFormat, LongSupplier now) {
|
||||
GeoShapeValuesSource geoShapeValuesSource = (GeoShapeValuesSource) valuesSource;
|
||||
final MultiGeoShapeValues.GeoShapeValue missing = MultiGeoShapeValues.GeoShapeValue.missing(rawMissing.toString());
|
||||
return new GeoShapeValuesSource() {
|
||||
@Override
|
||||
MultiGeoShapeValues geoShapeValues(LeafReaderContext context) {
|
||||
MultiGeoShapeValues values = geoShapeValuesSource.geoShapeValues(context);
|
||||
return new MultiGeoShapeValues() {
|
||||
|
||||
private int count;
|
||||
|
||||
@Override
|
||||
public boolean advanceExact(int doc) throws IOException {
|
||||
if (values.advanceExact(doc)) {
|
||||
count = values.docValueCount();
|
||||
} else {
|
||||
count = 0;
|
||||
}
|
||||
// always return true because we want to return a value even if
|
||||
// the document does not have a value
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int docValueCount() {
|
||||
return count == 0 ? 1 : count;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValuesSourceType valuesSourceType() {
|
||||
return values.valuesSourceType();
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoShapeValue nextValue() throws IOException {
|
||||
if (count > 0) {
|
||||
return values.nextValue();
|
||||
} else {
|
||||
return missing;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "anon MultiGeoShapeValues of [" + super.toString() + "]";
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public SortedBinaryDocValues bytesValues(LeafReaderContext context) throws IOException {
|
||||
return MissingValues.replaceMissing(geoShapeValuesSource.bytesValues(context), new BytesRef(missing.toString()));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
|
||||
}
|
||||
}
|
|
@ -0,0 +1,236 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.spatial.index.mapper;
|
||||
|
||||
import org.apache.lucene.document.LatLonShape;
|
||||
import org.apache.lucene.document.ShapeField;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.DocValuesFieldExistsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Explicit;
|
||||
import org.elasticsearch.common.geo.GeometryParser;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
import org.elasticsearch.geometry.Geometry;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.mapper.AbstractGeometryFieldMapper;
|
||||
import org.elasticsearch.index.mapper.FieldNamesFieldMapper;
|
||||
import org.elasticsearch.index.mapper.GeoShapeFieldMapper;
|
||||
import org.elasticsearch.index.mapper.GeoShapeIndexer;
|
||||
import org.elasticsearch.index.mapper.LegacyGeoShapeFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.TypeParsers;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.query.VectorGeoShapeQueryProcessor;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Extension of {@link org.elasticsearch.index.mapper.GeoShapeFieldMapper} that supports docValues
|
||||
*
|
||||
* FieldMapper for indexing {@link LatLonShape}s.
|
||||
* <p>
|
||||
* Currently Shapes can only be indexed and can only be queried using
|
||||
* {@link org.elasticsearch.index.query.GeoShapeQueryBuilder}, consequently
|
||||
* a lot of behavior in this Mapper is disabled.
|
||||
* <p>
|
||||
* Format supported:
|
||||
* <p>
|
||||
* "field" : {
|
||||
* "type" : "polygon",
|
||||
* "coordinates" : [
|
||||
* [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ]
|
||||
* ]
|
||||
* }
|
||||
* <p>
|
||||
* or:
|
||||
* <p>
|
||||
* "field" : "POLYGON ((100.0 0.0, 101.0 0.0, 101.0 1.0, 100.0 1.0, 100.0 0.0))
|
||||
*/
|
||||
public class GeoShapeWithDocValuesFieldMapper extends GeoShapeFieldMapper {
|
||||
public static final String CONTENT_TYPE = "geo_shape";
|
||||
|
||||
|
||||
private Explicit<Boolean> docValues;
|
||||
|
||||
@Override
|
||||
public void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
|
||||
super.doXContentBody(builder, includeDefaults, params);
|
||||
if (includeDefaults || docValues.explicit()) {
|
||||
builder.field(TypeParsers.DOC_VALUES, docValues.value());
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
public static class Builder extends AbstractGeometryFieldMapper.Builder<AbstractGeometryFieldMapper.Builder,
|
||||
GeoShapeWithDocValuesFieldMapper> {
|
||||
public Builder(String name) {
|
||||
super (name, new GeoShapeWithDocValuesFieldType(), new GeoShapeWithDocValuesFieldType());
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoShapeWithDocValuesFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
return new GeoShapeWithDocValuesFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context),
|
||||
ignoreZValue(), docValues(), context.indexSettings(),
|
||||
multiFieldsBuilder.build(this, context), copyTo);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean defaultDocValues(Version indexCreated) {
|
||||
return Version.V_7_8_0.onOrBefore(indexCreated);
|
||||
}
|
||||
|
||||
protected Explicit<Boolean> docValues() {
|
||||
if (docValuesSet && fieldType.hasDocValues()) {
|
||||
return new Explicit<>(true, true);
|
||||
} else if (docValuesSet) {
|
||||
return new Explicit<>(false, true);
|
||||
}
|
||||
return new Explicit<>(fieldType.hasDocValues(), false);
|
||||
}
|
||||
|
||||
protected void setupFieldType(BuilderContext context) {
|
||||
super.setupFieldType(context);
|
||||
|
||||
GeoShapeWithDocValuesFieldType fieldType = (GeoShapeWithDocValuesFieldType)fieldType();
|
||||
boolean orientation = fieldType.orientation() == ShapeBuilder.Orientation.RIGHT;
|
||||
|
||||
GeometryParser geometryParser = new GeometryParser(orientation, coerce(context).value(), ignoreZValue().value());
|
||||
|
||||
fieldType.setGeometryIndexer(new GeoShapeIndexer(orientation, fieldType.name()) {
|
||||
@Override
|
||||
public List<IndexableField> indexShape(ParseContext context, Geometry shape) {
|
||||
List<IndexableField> fields = super.indexShape(context, shape);
|
||||
if (fieldType().hasDocValues()) {
|
||||
CentroidCalculator calculator = new CentroidCalculator(shape);
|
||||
final byte[] scratch = new byte[7 * Integer.BYTES];
|
||||
// doc values are generated from the indexed fields.
|
||||
ShapeField.DecodedTriangle[] triangles = new ShapeField.DecodedTriangle[fields.size()];
|
||||
for (int i = 0; i < fields.size(); i++) {
|
||||
BytesRef bytesRef = fields.get(i).binaryValue();
|
||||
assert bytesRef.length == 7 * Integer.BYTES;
|
||||
System.arraycopy(bytesRef.bytes, bytesRef.offset, scratch, 0, 7 * Integer.BYTES);
|
||||
ShapeField.decodeTriangle(scratch, triangles[i] = new ShapeField.DecodedTriangle());
|
||||
}
|
||||
BinaryGeoShapeDocValuesField docValuesField =
|
||||
(BinaryGeoShapeDocValuesField) context.doc().getByKey(name);
|
||||
if (docValuesField == null) {
|
||||
docValuesField = new BinaryGeoShapeDocValuesField(name, triangles, calculator);
|
||||
context.doc().addWithKey(name, docValuesField);
|
||||
} else {
|
||||
docValuesField.add(triangles, calculator);
|
||||
}
|
||||
}
|
||||
return fields;
|
||||
}
|
||||
});
|
||||
fieldType.setGeometryParser( (parser, mapper) -> geometryParser.parse(parser));
|
||||
fieldType.setGeometryQueryBuilder(new VectorGeoShapeQueryProcessor());
|
||||
}
|
||||
}
|
||||
|
||||
public static final class GeoShapeWithDocValuesFieldType extends GeoShapeFieldMapper.GeoShapeFieldType {
|
||||
public GeoShapeWithDocValuesFieldType() {
|
||||
super();
|
||||
}
|
||||
|
||||
protected GeoShapeWithDocValuesFieldType(GeoShapeWithDocValuesFieldType ref) {
|
||||
super(ref);
|
||||
}
|
||||
|
||||
public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName) {
|
||||
failIfNoDocValues();
|
||||
return new AbstractLatLonShapeDVIndexFieldData.Builder();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query existsQuery(QueryShardContext context) {
|
||||
if (hasDocValues()) {
|
||||
return new DocValuesFieldExistsQuery(name());
|
||||
} else {
|
||||
return new TermQuery(new Term(FieldNamesFieldMapper.NAME, name()));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoShapeWithDocValuesFieldType clone() {
|
||||
return new GeoShapeWithDocValuesFieldType(this);
|
||||
}
|
||||
}
|
||||
|
||||
public static final class TypeParser extends AbstractGeometryFieldMapper.TypeParser {
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("rawtypes")
|
||||
protected AbstractGeometryFieldMapper.Builder newBuilder(String name, Map<String, Object> params) {
|
||||
if (params.containsKey(DEPRECATED_PARAMETERS_KEY)) {
|
||||
return new LegacyGeoShapeFieldMapper.Builder(name,
|
||||
(LegacyGeoShapeFieldMapper.DeprecatedParameters)params.get(DEPRECATED_PARAMETERS_KEY));
|
||||
}
|
||||
return new GeoShapeWithDocValuesFieldMapper.Builder(name);
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("rawtypes")
|
||||
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
AbstractGeometryFieldMapper.Builder builder = (AbstractGeometryFieldMapper.Builder) super.parse(name, node, parserContext);
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
String fieldName = entry.getKey();
|
||||
Object fieldNode = entry.getValue();
|
||||
if (TypeParsers.DOC_VALUES.equals(fieldName)) {
|
||||
params.put(TypeParsers.DOC_VALUES, XContentMapValues.nodeBooleanValue(fieldNode, name + "." + TypeParsers.DOC_VALUES));
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
|
||||
if (params.containsKey(TypeParsers.DOC_VALUES)) {
|
||||
builder.docValues((Boolean) params.get(TypeParsers.DOC_VALUES));
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
|
||||
public GeoShapeWithDocValuesFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
|
||||
Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
|
||||
Explicit<Boolean> ignoreZValue, Explicit<Boolean> docValues, Settings indexSettings,
|
||||
MultiFields multiFields, CopyTo copyTo) {
|
||||
super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, ignoreZValue, indexSettings,
|
||||
multiFields, copyTo);
|
||||
this.docValues = docValues;
|
||||
}
|
||||
|
||||
public Explicit<Boolean> docValues() {
|
||||
return docValues;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoShapeWithDocValuesFieldType fieldType() {
|
||||
return (GeoShapeWithDocValuesFieldType) super.fieldType();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String contentType() {
|
||||
return CONTENT_TYPE;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,16 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.spatial.index.mapper;
|
||||
|
||||
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
|
||||
/**
|
||||
* Specialization of {@link IndexFieldData} for geo shapes.
|
||||
*/
|
||||
public interface IndexGeoShapeFieldData extends IndexFieldData<LeafGeoShapeFieldData> {
|
||||
}
|
|
@ -0,0 +1,79 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.spatial.index.mapper;
|
||||
|
||||
import org.apache.lucene.index.BinaryDocValues;
|
||||
import org.apache.lucene.index.DocValues;
|
||||
import org.apache.lucene.index.LeafReader;
|
||||
import org.apache.lucene.util.Accountable;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
|
||||
final class LatLonShapeDVAtomicShapeFieldData extends AbstractAtomicGeoShapeShapeFieldData {
|
||||
private final LeafReader reader;
|
||||
private final String fieldName;
|
||||
|
||||
LatLonShapeDVAtomicShapeFieldData(LeafReader reader, String fieldName) {
|
||||
super();
|
||||
this.reader = reader;
|
||||
this.fieldName = fieldName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long ramBytesUsed() {
|
||||
return 0; // not exposed by lucene
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Accountable> getChildResources() {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
// noop
|
||||
}
|
||||
|
||||
@Override
|
||||
public MultiGeoShapeValues getGeoShapeValues() {
|
||||
try {
|
||||
final BinaryDocValues binaryValues = DocValues.getBinary(reader, fieldName);
|
||||
final TriangleTreeReader reader = new TriangleTreeReader(GeoShapeCoordinateEncoder.INSTANCE);
|
||||
final MultiGeoShapeValues.GeoShapeValue geoShapeValue = new MultiGeoShapeValues.GeoShapeValue(reader);
|
||||
return new MultiGeoShapeValues() {
|
||||
|
||||
@Override
|
||||
public boolean advanceExact(int doc) throws IOException {
|
||||
return binaryValues.advanceExact(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int docValueCount() {
|
||||
return 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValuesSourceType valuesSourceType() {
|
||||
return GeoShapeValuesSourceType.INSTANCE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoShapeValue nextValue() throws IOException {
|
||||
final BytesRef encoded = binaryValues.binaryValue();
|
||||
reader.reset(encoded);
|
||||
return geoShapeValue;
|
||||
}
|
||||
};
|
||||
} catch (IOException e) {
|
||||
throw new IllegalStateException("Cannot load doc values", e);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,20 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.spatial.index.mapper;
|
||||
|
||||
import org.elasticsearch.index.fielddata.LeafFieldData;
|
||||
|
||||
/**
|
||||
* {@link LeafFieldData} specialization for geo points and shapes.
|
||||
*/
|
||||
public interface LeafGeoShapeFieldData extends LeafFieldData {
|
||||
/**
|
||||
* Return geo shape values.
|
||||
*/
|
||||
MultiGeoShapeValues getGeoShapeValues();
|
||||
|
||||
}
|
|
@ -0,0 +1,249 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.spatial.index.mapper;
|
||||
|
||||
import org.apache.lucene.document.ShapeField;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.apache.lucene.store.ByteBuffersDataOutput;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.geometry.Geometry;
|
||||
import org.elasticsearch.geometry.Rectangle;
|
||||
import org.elasticsearch.geometry.utils.GeographyValidator;
|
||||
import org.elasticsearch.geometry.utils.WellKnownText;
|
||||
import org.elasticsearch.index.mapper.GeoShapeIndexer;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.text.ParseException;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* A stateful lightweight per document set of geo values.
|
||||
* To iterate over values in a document use the following pattern:
|
||||
* <pre>
|
||||
* MultiGeoValues values = ..;
|
||||
* values.setDocId(docId);
|
||||
* final int numValues = values.count();
|
||||
* for (int i = 0; i < numValues; i++) {
|
||||
* GeoValue value = values.valueAt(i);
|
||||
* // process value
|
||||
* }
|
||||
* </pre>
|
||||
* The set of values associated with a document might contain duplicates and
|
||||
* comes in a non-specified order.
|
||||
*/
|
||||
public abstract class MultiGeoShapeValues {
|
||||
|
||||
static MultiGeoShapeValues EMPTY = new MultiGeoShapeValues() {
|
||||
@Override
|
||||
public boolean advanceExact(int doc) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int docValueCount() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValuesSourceType valuesSourceType() {
|
||||
return GeoShapeValuesSourceType.INSTANCE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoShapeValue nextValue() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a new {@link MultiGeoShapeValues} instance
|
||||
*/
|
||||
protected MultiGeoShapeValues() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Advance this instance to the given document id
|
||||
* @return true if there is a value for this document
|
||||
*/
|
||||
public abstract boolean advanceExact(int doc) throws IOException;
|
||||
|
||||
/**
|
||||
* Return the number of geo points the current document has.
|
||||
*/
|
||||
public abstract int docValueCount();
|
||||
|
||||
public abstract ValuesSourceType valuesSourceType();
|
||||
|
||||
/**
|
||||
* Return the next value associated with the current document. This must not be
|
||||
* called more than {@link #docValueCount()} times.
|
||||
*
|
||||
* Note: the returned {@link GeoShapeValue} might be shared across invocations.
|
||||
*
|
||||
* @return the next value for the current docID set to {@link #advanceExact(int)}.
|
||||
*/
|
||||
public abstract GeoShapeValue nextValue() throws IOException;
|
||||
|
||||
public static class GeoShapeValue {
|
||||
private static final WellKnownText MISSING_GEOMETRY_PARSER = new WellKnownText(true, new GeographyValidator(true));
|
||||
|
||||
private final TriangleTreeReader reader;
|
||||
private final BoundingBox boundingBox;
|
||||
|
||||
public GeoShapeValue(TriangleTreeReader reader) {
|
||||
this.reader = reader;
|
||||
this.boundingBox = new BoundingBox();
|
||||
}
|
||||
|
||||
public BoundingBox boundingBox() {
|
||||
boundingBox.reset(reader.getExtent(), GeoShapeCoordinateEncoder.INSTANCE);
|
||||
return boundingBox;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the latitude of the centroid of the shape
|
||||
*/
|
||||
public GeoRelation relate(Rectangle rectangle) {
|
||||
int minX = GeoShapeCoordinateEncoder.INSTANCE.encodeX(rectangle.getMinX());
|
||||
int maxX = GeoShapeCoordinateEncoder.INSTANCE.encodeX(rectangle.getMaxX());
|
||||
int minY = GeoShapeCoordinateEncoder.INSTANCE.encodeY(rectangle.getMinY());
|
||||
int maxY = GeoShapeCoordinateEncoder.INSTANCE.encodeY(rectangle.getMaxY());
|
||||
return reader.relateTile(minX, minY, maxX, maxY);
|
||||
}
|
||||
|
||||
public DimensionalShapeType dimensionalShapeType() {
|
||||
return reader.getDimensionalShapeType();
|
||||
}
|
||||
|
||||
public double weight() {
|
||||
return reader.getSumCentroidWeight();
|
||||
}
|
||||
|
||||
public double lat() {
|
||||
return reader.getCentroidY();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the longitude of the centroid of the shape
|
||||
*/
|
||||
public double lon() {
|
||||
return reader.getCentroidX();
|
||||
}
|
||||
|
||||
public static GeoShapeValue missing(String missing) {
|
||||
try {
|
||||
Geometry geometry = MISSING_GEOMETRY_PARSER.fromWKT(missing);
|
||||
ShapeField.DecodedTriangle[] triangles = toDecodedTriangles(geometry);
|
||||
TriangleTreeWriter writer =
|
||||
new TriangleTreeWriter(Arrays.asList(triangles), GeoShapeCoordinateEncoder.INSTANCE,
|
||||
new CentroidCalculator(geometry));
|
||||
ByteBuffersDataOutput output = new ByteBuffersDataOutput();
|
||||
writer.writeTo(output);
|
||||
TriangleTreeReader reader = new TriangleTreeReader(GeoShapeCoordinateEncoder.INSTANCE);
|
||||
reader.reset(new BytesRef(output.toArrayCopy(), 0, Math.toIntExact(output.size())));
|
||||
return new GeoShapeValue(reader);
|
||||
} catch (IOException | ParseException e) {
|
||||
throw new IllegalArgumentException("Can't apply missing value [" + missing + "]", e);
|
||||
}
|
||||
}
|
||||
|
||||
private static ShapeField.DecodedTriangle[] toDecodedTriangles(Geometry geometry) {
|
||||
GeoShapeIndexer indexer = new GeoShapeIndexer(true, "test");
|
||||
geometry = indexer.prepareForIndexing(geometry);
|
||||
List<IndexableField> fields = indexer.indexShape(null, geometry);
|
||||
ShapeField.DecodedTriangle[] triangles = new ShapeField.DecodedTriangle[fields.size()];
|
||||
final byte[] scratch = new byte[7 * Integer.BYTES];
|
||||
for (int i = 0; i < fields.size(); i++) {
|
||||
BytesRef bytesRef = fields.get(i).binaryValue();
|
||||
assert bytesRef.length == 7 * Integer.BYTES;
|
||||
System.arraycopy(bytesRef.bytes, bytesRef.offset, scratch, 0, 7 * Integer.BYTES);
|
||||
ShapeField.decodeTriangle(scratch, triangles[i] = new ShapeField.DecodedTriangle());
|
||||
}
|
||||
return triangles;
|
||||
}
|
||||
}
|
||||
|
||||
public static class BoundingBox {
|
||||
public double top;
|
||||
public double bottom;
|
||||
public double negLeft;
|
||||
public double negRight;
|
||||
public double posLeft;
|
||||
public double posRight;
|
||||
|
||||
private BoundingBox() {
|
||||
}
|
||||
|
||||
private void reset(Extent extent, CoordinateEncoder coordinateEncoder) {
|
||||
this.top = coordinateEncoder.decodeY(extent.top);
|
||||
this.bottom = coordinateEncoder.decodeY(extent.bottom);
|
||||
|
||||
if (extent.negLeft == Integer.MAX_VALUE && extent.negRight == Integer.MIN_VALUE) {
|
||||
this.negLeft = Double.POSITIVE_INFINITY;
|
||||
this.negRight = Double.NEGATIVE_INFINITY;
|
||||
} else {
|
||||
this.negLeft = coordinateEncoder.decodeX(extent.negLeft);
|
||||
this.negRight = coordinateEncoder.decodeX(extent.negRight);
|
||||
}
|
||||
|
||||
if (extent.posLeft == Integer.MAX_VALUE && extent.posRight == Integer.MIN_VALUE) {
|
||||
this.posLeft = Double.POSITIVE_INFINITY;
|
||||
this.posRight = Double.NEGATIVE_INFINITY;
|
||||
} else {
|
||||
this.posLeft = coordinateEncoder.decodeX(extent.posLeft);
|
||||
this.posRight = coordinateEncoder.decodeX(extent.posRight);
|
||||
}
|
||||
}
|
||||
|
||||
private void reset(GeoPoint point) {
|
||||
this.top = point.lat();
|
||||
this.bottom = point.lat();
|
||||
if (point.lon() < 0) {
|
||||
this.negLeft = point.lon();
|
||||
this.negRight = point.lon();
|
||||
this.posLeft = Double.POSITIVE_INFINITY;
|
||||
this.posRight = Double.NEGATIVE_INFINITY;
|
||||
} else {
|
||||
this.negLeft = Double.POSITIVE_INFINITY;
|
||||
this.negRight = Double.NEGATIVE_INFINITY;
|
||||
this.posLeft = point.lon();
|
||||
this.posRight = point.lon();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the minimum y-coordinate of the extent
|
||||
*/
|
||||
public double minY() {
|
||||
return bottom;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the maximum y-coordinate of the extent
|
||||
*/
|
||||
public double maxY() {
|
||||
return top;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the absolute minimum x-coordinate of the extent, whether it is positive or negative.
|
||||
*/
|
||||
public double minX() {
|
||||
return Math.min(negLeft, posLeft);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the absolute maximum x-coordinate of the extent, whether it is positive or negative.
|
||||
*/
|
||||
public double maxX() {
|
||||
return Math.max(negRight, posRight);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,418 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.spatial.index.mapper;
|
||||
|
||||
import org.apache.lucene.store.ByteArrayDataInput;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.apache.lucene.geo.GeoUtils.orient;
|
||||
|
||||
/**
|
||||
* A tree reusable reader for a previous serialized {@link org.elasticsearch.geometry.Geometry} using
|
||||
* {@link TriangleTreeWriter}.
|
||||
*
|
||||
* This class supports checking bounding box
|
||||
* relations against the serialized triangle tree.
|
||||
*
|
||||
* -----------------------------------------
|
||||
* | The binary format of the tree |
|
||||
* -----------------------------------------
|
||||
* ----------------------------------------- --
|
||||
* | centroid-x-coord (4 bytes) | |
|
||||
* ----------------------------------------- |
|
||||
* | centroid-y-coord (4 bytes) | |
|
||||
* ----------------------------------------- |
|
||||
* | DimensionalShapeType (1 byte) | | Centroid-related header
|
||||
* ----------------------------------------- |
|
||||
* | Sum of weights (VLong 1-8 bytes) | |
|
||||
* ----------------------------------------- --
|
||||
* | Extent (var-encoding) |
|
||||
* -----------------------------------------
|
||||
* | Triangle Tree |
|
||||
* -----------------------------------------
|
||||
* -----------------------------------------
|
||||
*/
|
||||
public class TriangleTreeReader {
|
||||
private final ByteArrayDataInput input;
|
||||
private final CoordinateEncoder coordinateEncoder;
|
||||
private final Tile2D tile2D;
|
||||
private final Extent extent;
|
||||
private int treeOffset;
|
||||
private int docValueOffset;
|
||||
|
||||
public TriangleTreeReader(CoordinateEncoder coordinateEncoder) {
|
||||
this.coordinateEncoder = coordinateEncoder;
|
||||
this.tile2D = new Tile2D();
|
||||
this.extent = new Extent();
|
||||
this.input = new ByteArrayDataInput();
|
||||
}
|
||||
|
||||
public void reset(BytesRef bytesRef) throws IOException {
|
||||
this.input.reset(bytesRef.bytes, bytesRef.offset, bytesRef.length);
|
||||
docValueOffset = bytesRef.offset;
|
||||
treeOffset = 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* returns the bounding box of the geometry in the format [minX, maxX, minY, maxY].
|
||||
*/
|
||||
public Extent getExtent() {
|
||||
if (treeOffset == 0) {
|
||||
getSumCentroidWeight(); // skip CENTROID_HEADER + var-long sum-weight
|
||||
Extent.readFromCompressed(input, extent);
|
||||
treeOffset = input.getPosition();
|
||||
} else {
|
||||
input.setPosition(treeOffset);
|
||||
}
|
||||
return extent;
|
||||
}
|
||||
|
||||
/**
|
||||
* returns the X coordinate of the centroid.
|
||||
*/
|
||||
public double getCentroidX() {
|
||||
input.setPosition(docValueOffset + 0);
|
||||
return coordinateEncoder.decodeX(input.readInt());
|
||||
}
|
||||
|
||||
/**
|
||||
* returns the Y coordinate of the centroid.
|
||||
*/
|
||||
public double getCentroidY() {
|
||||
input.setPosition(docValueOffset + 4);
|
||||
return coordinateEncoder.decodeY(input.readInt());
|
||||
}
|
||||
|
||||
public DimensionalShapeType getDimensionalShapeType() {
|
||||
input.setPosition(docValueOffset + 8);
|
||||
return DimensionalShapeType.readFrom(input);
|
||||
}
|
||||
|
||||
public double getSumCentroidWeight() {
|
||||
input.setPosition(docValueOffset + 9);
|
||||
return Double.longBitsToDouble(input.readVLong());
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute the relation with the provided bounding box. If the result is CELL_INSIDE_QUERY
|
||||
* then the bounding box is within the shape.
|
||||
*/
|
||||
public GeoRelation relateTile(int minX, int minY, int maxX, int maxY) {
|
||||
Extent extent = getExtent();
|
||||
int thisMaxX = extent.maxX();
|
||||
int thisMinX = extent.minX();
|
||||
int thisMaxY = extent.maxY();
|
||||
int thisMinY = extent.minY();
|
||||
|
||||
// exclude north and east boundary intersections with tiles from intersection consideration
|
||||
// for consistent tiling definition of shapes on the boundaries of tiles
|
||||
if ((thisMinX >= maxX || thisMaxX < minX || thisMinY > maxY || thisMaxY <= minY)) {
|
||||
// shapes are disjoint
|
||||
return GeoRelation.QUERY_DISJOINT;
|
||||
}
|
||||
if (minX <= thisMinX && maxX >= thisMaxX && minY <= thisMinY && maxY >= thisMaxY) {
|
||||
// the rectangle fully contains the shape
|
||||
return GeoRelation.QUERY_CROSSES;
|
||||
}
|
||||
// quick checks failed, need to traverse the tree
|
||||
GeoRelation rel = GeoRelation.QUERY_DISJOINT;
|
||||
tile2D.setValues(minX, maxX, minY, maxY);
|
||||
byte metadata = input.readByte();
|
||||
if ((metadata & 1 << 2) == 1 << 2) { // component in this node is a point
|
||||
int x = Math.toIntExact(thisMaxX - input.readVLong());
|
||||
int y = Math.toIntExact(thisMaxY - input.readVLong());
|
||||
if (tile2D.contains(x, y)) {
|
||||
return GeoRelation.QUERY_CROSSES;
|
||||
}
|
||||
thisMinX = x;
|
||||
} else if ((metadata & 1 << 3) == 1 << 3) { // component in this node is a line
|
||||
int aX = Math.toIntExact(thisMaxX - input.readVLong());
|
||||
int aY = Math.toIntExact(thisMaxY - input.readVLong());
|
||||
int bX = Math.toIntExact(thisMaxX - input.readVLong());
|
||||
int bY = Math.toIntExact(thisMaxY - input.readVLong());
|
||||
if (tile2D.intersectsLine(aX, aY, bX, bY)) {
|
||||
return GeoRelation.QUERY_CROSSES;
|
||||
}
|
||||
thisMinX = aX;
|
||||
} else { // component in this node is a triangle
|
||||
int aX = Math.toIntExact(thisMaxX - input.readVLong());
|
||||
int aY = Math.toIntExact(thisMaxY - input.readVLong());
|
||||
int bX = Math.toIntExact(thisMaxX - input.readVLong());
|
||||
int bY = Math.toIntExact(thisMaxY - input.readVLong());
|
||||
int cX = Math.toIntExact(thisMaxX - input.readVLong());
|
||||
int cY = Math.toIntExact(thisMaxY - input.readVLong());
|
||||
boolean ab = (metadata & 1 << 4) == 1 << 4;
|
||||
boolean bc = (metadata & 1 << 5) == 1 << 5;
|
||||
boolean ca = (metadata & 1 << 6) == 1 << 6;
|
||||
rel = tile2D.relateTriangle(aX, aY, ab, bX, bY, bc, cX, cY, ca);
|
||||
if (rel == GeoRelation.QUERY_CROSSES) {
|
||||
return GeoRelation.QUERY_CROSSES;
|
||||
}
|
||||
thisMinX = aX;
|
||||
}
|
||||
if ((metadata & 1 << 0) == 1 << 0) { // left != null
|
||||
GeoRelation left = relateTile(tile2D, false, thisMaxX, thisMaxY);
|
||||
if (left == GeoRelation.QUERY_CROSSES) {
|
||||
return GeoRelation.QUERY_CROSSES;
|
||||
} else if (left == GeoRelation.QUERY_INSIDE) {
|
||||
rel = left;
|
||||
}
|
||||
}
|
||||
if ((metadata & 1 << 1) == 1 << 1) { // right != null
|
||||
if (tile2D.maxX >= thisMinX) {
|
||||
GeoRelation right = relateTile(tile2D, false, thisMaxX, thisMaxY);
|
||||
if (right == GeoRelation.QUERY_CROSSES) {
|
||||
return GeoRelation.QUERY_CROSSES;
|
||||
} else if (right == GeoRelation.QUERY_INSIDE) {
|
||||
rel = right;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return rel;
|
||||
}
|
||||
|
||||
private GeoRelation relateTile(Tile2D tile2D, boolean splitX, int parentMaxX, int parentMaxY) {
|
||||
int thisMaxX = Math.toIntExact(parentMaxX - input.readVLong());
|
||||
int thisMaxY = Math.toIntExact(parentMaxY - input.readVLong());
|
||||
GeoRelation rel = GeoRelation.QUERY_DISJOINT;
|
||||
int size = input.readVInt();
|
||||
if (tile2D.minY <= thisMaxY && tile2D.minX <= thisMaxX) {
|
||||
byte metadata = input.readByte();
|
||||
int thisMinX;
|
||||
int thisMinY;
|
||||
if ((metadata & 1 << 2) == 1 << 2) { // component in this node is a point
|
||||
int x = Math.toIntExact(thisMaxX - input.readVLong());
|
||||
int y = Math.toIntExact(thisMaxY - input.readVLong());
|
||||
if (tile2D.contains(x, y)) {
|
||||
return GeoRelation.QUERY_CROSSES;
|
||||
}
|
||||
thisMinX = x;
|
||||
thisMinY = y;
|
||||
} else if ((metadata & 1 << 3) == 1 << 3) { // component in this node is a line
|
||||
int aX = Math.toIntExact(thisMaxX - input.readVLong());
|
||||
int aY = Math.toIntExact(thisMaxY - input.readVLong());
|
||||
int bX = Math.toIntExact(thisMaxX - input.readVLong());
|
||||
int bY = Math.toIntExact(thisMaxY - input.readVLong());
|
||||
if (tile2D.intersectsLine(aX, aY, bX, bY)) {
|
||||
return GeoRelation.QUERY_CROSSES;
|
||||
}
|
||||
thisMinX = aX;
|
||||
thisMinY = Math.min(aY, bY);
|
||||
} else { // component in this node is a triangle
|
||||
int aX = Math.toIntExact(thisMaxX - input.readVLong());
|
||||
int aY = Math.toIntExact(thisMaxY - input.readVLong());
|
||||
int bX = Math.toIntExact(thisMaxX - input.readVLong());
|
||||
int bY = Math.toIntExact(thisMaxY - input.readVLong());
|
||||
int cX = Math.toIntExact(thisMaxX - input.readVLong());
|
||||
int cY = Math.toIntExact(thisMaxY - input.readVLong());
|
||||
boolean ab = (metadata & 1 << 4) == 1 << 4;
|
||||
boolean bc = (metadata & 1 << 5) == 1 << 5;
|
||||
boolean ca = (metadata & 1 << 6) == 1 << 6;
|
||||
rel = tile2D.relateTriangle(aX, aY, ab, bX, bY, bc, cX, cY, ca);
|
||||
if (rel == GeoRelation.QUERY_CROSSES) {
|
||||
return GeoRelation.QUERY_CROSSES;
|
||||
}
|
||||
thisMinX = aX;
|
||||
thisMinY = Math.min(Math.min(aY, bY), cY);
|
||||
}
|
||||
if ((metadata & 1 << 0) == 1 << 0) { // left != null
|
||||
GeoRelation left = relateTile(tile2D, !splitX, thisMaxX, thisMaxY);
|
||||
if (left == GeoRelation.QUERY_CROSSES) {
|
||||
return GeoRelation.QUERY_CROSSES;
|
||||
} else if (left == GeoRelation.QUERY_INSIDE) {
|
||||
rel = left;
|
||||
}
|
||||
}
|
||||
if ((metadata & 1 << 1) == 1 << 1) { // right != null
|
||||
int rightSize = input.readVInt();
|
||||
if ((splitX == false && tile2D.maxY >= thisMinY) || (splitX && tile2D.maxX >= thisMinX)) {
|
||||
GeoRelation right = relateTile(tile2D, !splitX, thisMaxX, thisMaxY);
|
||||
if (right == GeoRelation.QUERY_CROSSES) {
|
||||
return GeoRelation.QUERY_CROSSES;
|
||||
} else if (right == GeoRelation.QUERY_INSIDE) {
|
||||
rel = right;
|
||||
}
|
||||
} else {
|
||||
input.skipBytes(rightSize);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
input.skipBytes(size);
|
||||
}
|
||||
return rel;
|
||||
}
|
||||
|
||||
private static class Tile2D {
|
||||
|
||||
protected int minX;
|
||||
protected int maxX;
|
||||
protected int minY;
|
||||
protected int maxY;
|
||||
|
||||
Tile2D() {
|
||||
}
|
||||
|
||||
private void setValues(int minX, int maxX, int minY, int maxY) {
|
||||
this.minX = minX;
|
||||
this.maxX = maxX;
|
||||
this.minY = minY;
|
||||
this.maxY = maxY;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the rectangle contains the provided point
|
||||
**/
|
||||
public boolean contains(int x, int y) {
|
||||
return (x <= minX || x > maxX || y < minY || y >= maxY) == false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the rectangle intersects the provided triangle
|
||||
**/
|
||||
private boolean intersectsLine(int aX, int aY, int bX, int bY) {
|
||||
// 1. query contains any triangle points
|
||||
if (contains(aX, aY) || contains(bX, bY)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// compute bounding box of triangle
|
||||
int tMinX = StrictMath.min(aX, bX);
|
||||
int tMaxX = StrictMath.max(aX, bX);
|
||||
int tMinY = StrictMath.min(aY, bY);
|
||||
int tMaxY = StrictMath.max(aY, bY);
|
||||
|
||||
// 2. check bounding boxes are disjoint
|
||||
if (tMaxX <= minX || tMinX > maxX || tMinY > maxY || tMaxY <= minY) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// 4. last ditch effort: check crossings
|
||||
if (edgeIntersectsQuery(aX, aY, bX, bY)) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the rectangle intersects the provided triangle
|
||||
**/
|
||||
private GeoRelation relateTriangle(int aX, int aY, boolean ab, int bX, int bY, boolean bc, int cX, int cY, boolean ca) {
|
||||
// compute bounding box of triangle
|
||||
int tMinX = StrictMath.min(StrictMath.min(aX, bX), cX);
|
||||
int tMaxX = StrictMath.max(StrictMath.max(aX, bX), cX);
|
||||
int tMinY = StrictMath.min(StrictMath.min(aY, bY), cY);
|
||||
int tMaxY = StrictMath.max(StrictMath.max(aY, bY), cY);
|
||||
|
||||
// 1. check bounding boxes are disjoint, where north and east boundaries are not considered as crossing
|
||||
if (tMaxX <= minX || tMinX > maxX || tMinY > maxY || tMaxY <= minY) {
|
||||
return GeoRelation.QUERY_DISJOINT;
|
||||
}
|
||||
|
||||
// 2. query contains any triangle points
|
||||
if (contains(aX, aY) || contains(bX, bY) || contains(cX, cY)) {
|
||||
return GeoRelation.QUERY_CROSSES;
|
||||
}
|
||||
|
||||
boolean within = false;
|
||||
if (edgeIntersectsQuery(aX, aY, bX, bY)) {
|
||||
if (ab) {
|
||||
return GeoRelation.QUERY_CROSSES;
|
||||
}
|
||||
within = true;
|
||||
}
|
||||
|
||||
// right
|
||||
if (edgeIntersectsQuery(bX, bY, cX, cY)) {
|
||||
if (bc) {
|
||||
return GeoRelation.QUERY_CROSSES;
|
||||
}
|
||||
within = true;
|
||||
}
|
||||
|
||||
if (edgeIntersectsQuery(cX, cY, aX, aY)) {
|
||||
if (ca) {
|
||||
return GeoRelation.QUERY_CROSSES;
|
||||
}
|
||||
within = true;
|
||||
}
|
||||
|
||||
if (within || pointInTriangle(tMinX, tMaxX, tMinY, tMaxY, minX, minY, aX, aY, bX, bY, cX, cY)) {
|
||||
return GeoRelation.QUERY_INSIDE;
|
||||
}
|
||||
|
||||
return GeoRelation.QUERY_DISJOINT;
|
||||
}
|
||||
|
||||
/**
|
||||
* returns true if the edge (defined by (ax, ay) (bx, by)) intersects the query
|
||||
*/
|
||||
private boolean edgeIntersectsQuery(int ax, int ay, int bx, int by) {
|
||||
// shortcut: check bboxes of edges are disjoint
|
||||
if (boxesAreDisjoint(Math.min(ax, bx), Math.max(ax, bx), Math.min(ay, by), Math.max(ay, by),
|
||||
minX, maxX, minY, maxY)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// top
|
||||
if (orient(ax, ay, bx, by, minX, maxY) * orient(ax, ay, bx, by, maxX, maxY) <= 0 &&
|
||||
orient(minX, maxY, maxX, maxY, ax, ay) * orient(minX, maxY, maxX, maxY, bx, by) <= 0) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// right
|
||||
if (orient(ax, ay, bx, by, maxX, maxY) * orient(ax, ay, bx, by, maxX, minY) <= 0 &&
|
||||
orient(maxX, maxY, maxX, minY, ax, ay) * orient(maxX, maxY, maxX, minY, bx, by) <= 0) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// bottom
|
||||
if (orient(ax, ay, bx, by, maxX, minY) * orient(ax, ay, bx, by, minX, minY) <= 0 &&
|
||||
orient(maxX, minY, minX, minY, ax, ay) * orient(maxX, minY, minX, minY, bx, by) <= 0) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// left
|
||||
if (orient(ax, ay, bx, by, minX, minY) * orient(ax, ay, bx, by, minX, maxY) <= 0 &&
|
||||
orient(minX, minY, minX, maxY, ax, ay) * orient(minX, minY, minX, maxY, bx, by) <= 0) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute whether the given x, y point is in a triangle; uses the winding order method
|
||||
*/
|
||||
private static boolean pointInTriangle(double minX, double maxX, double minY, double maxY, double x, double y,
|
||||
double aX, double aY, double bX, double bY, double cX, double cY) {
|
||||
//check the bounding box because if the triangle is degenerated, e.g points and lines, we need to filter out
|
||||
//coplanar points that are not part of the triangle.
|
||||
if (x >= minX && x <= maxX && y >= minY && y <= maxY) {
|
||||
int a = orient(x, y, aX, aY, bX, bY);
|
||||
int b = orient(x, y, bX, bY, cX, cY);
|
||||
if (a == 0 || b == 0 || a < 0 == b < 0) {
|
||||
int c = orient(x, y, cX, cY, aX, aY);
|
||||
return c == 0 || (c < 0 == (b < 0 || a < 0));
|
||||
}
|
||||
return false;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* utility method to check if two boxes are disjoint
|
||||
*/
|
||||
private static boolean boxesAreDisjoint(final int aMinX, final int aMaxX, final int aMinY, final int aMaxY,
|
||||
final int bMinX, final int bMaxX, final int bMinY, final int bMaxY) {
|
||||
return (aMaxX < bMinX || aMinX > bMaxX || aMaxY < bMinY || aMinY > bMaxY);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,254 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.spatial.index.mapper;
|
||||
|
||||
import org.apache.lucene.document.ShapeField;
|
||||
import org.apache.lucene.store.ByteBuffersDataOutput;
|
||||
import org.apache.lucene.util.ArrayUtil;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* This is a tree-writer that serializes a list of {@link ShapeField.DecodedTriangle} as an interval tree
|
||||
* into a byte array.
|
||||
*/
|
||||
public class TriangleTreeWriter {
|
||||
|
||||
private final TriangleTreeNode node;
|
||||
private final CoordinateEncoder coordinateEncoder;
|
||||
private final CentroidCalculator centroidCalculator;
|
||||
private Extent extent;
|
||||
|
||||
public TriangleTreeWriter(List<ShapeField.DecodedTriangle> triangles, CoordinateEncoder coordinateEncoder,
|
||||
CentroidCalculator centroidCalculator) {
|
||||
this.coordinateEncoder = coordinateEncoder;
|
||||
this.centroidCalculator = centroidCalculator;
|
||||
this.extent = new Extent();
|
||||
this.node = build(triangles);
|
||||
}
|
||||
|
||||
/*** Serialize the interval tree in the provided data output */
|
||||
public void writeTo(ByteBuffersDataOutput out) throws IOException {
|
||||
out.writeInt(coordinateEncoder.encodeX(centroidCalculator.getX()));
|
||||
out.writeInt(coordinateEncoder.encodeY(centroidCalculator.getY()));
|
||||
centroidCalculator.getDimensionalShapeType().writeTo(out);
|
||||
out.writeVLong(Double.doubleToLongBits(centroidCalculator.sumWeight()));
|
||||
extent.writeCompressed(out);
|
||||
node.writeTo(out);
|
||||
}
|
||||
|
||||
private void addToExtent(TriangleTreeNode treeNode) {
|
||||
extent.addRectangle(treeNode.minX, treeNode.minY, treeNode.maxX, treeNode.maxY);
|
||||
}
|
||||
|
||||
private TriangleTreeNode build(List<ShapeField.DecodedTriangle> triangles) {
|
||||
if (triangles.size() == 1) {
|
||||
TriangleTreeNode triangleTreeNode = new TriangleTreeNode(triangles.get(0));
|
||||
addToExtent(triangleTreeNode);
|
||||
return triangleTreeNode;
|
||||
}
|
||||
TriangleTreeNode[] nodes = new TriangleTreeNode[triangles.size()];
|
||||
for (int i = 0; i < triangles.size(); i++) {
|
||||
nodes[i] = new TriangleTreeNode(triangles.get(i));
|
||||
addToExtent(nodes[i]);
|
||||
}
|
||||
return createTree(nodes, 0, triangles.size() - 1, true);
|
||||
}
|
||||
|
||||
/** Creates tree from sorted components (with range low and high inclusive) */
|
||||
private TriangleTreeNode createTree(TriangleTreeNode[] components, int low, int high, boolean splitX) {
|
||||
if (low > high) {
|
||||
return null;
|
||||
}
|
||||
final int mid = (low + high) >>> 1;
|
||||
if (low < high) {
|
||||
Comparator<TriangleTreeNode> comparator;
|
||||
if (splitX) {
|
||||
comparator = Comparator.comparingInt((TriangleTreeNode left) -> left.minX).thenComparingInt(left -> left.maxX);
|
||||
} else {
|
||||
comparator = Comparator.comparingInt((TriangleTreeNode left) -> left.minY).thenComparingInt(left -> left.maxY);
|
||||
}
|
||||
ArrayUtil.select(components, low, high + 1, mid, comparator);
|
||||
}
|
||||
TriangleTreeNode newNode = components[mid];
|
||||
// find children
|
||||
newNode.left = createTree(components, low, mid - 1, !splitX);
|
||||
newNode.right = createTree(components, mid + 1, high, !splitX);
|
||||
|
||||
// pull up max values to this node
|
||||
if (newNode.left != null) {
|
||||
newNode.maxX = Math.max(newNode.maxX, newNode.left.maxX);
|
||||
newNode.maxY = Math.max(newNode.maxY, newNode.left.maxY);
|
||||
}
|
||||
if (newNode.right != null) {
|
||||
newNode.maxX = Math.max(newNode.maxX, newNode.right.maxX);
|
||||
newNode.maxY = Math.max(newNode.maxY, newNode.right.maxY);
|
||||
}
|
||||
return newNode;
|
||||
}
|
||||
|
||||
/** Represents an inner node of the tree. */
|
||||
private static class TriangleTreeNode {
|
||||
/** type of component */
|
||||
public enum TYPE {
|
||||
POINT, LINE, TRIANGLE
|
||||
}
|
||||
/** minimum latitude of this geometry's bounding box area */
|
||||
private int minY;
|
||||
/** maximum latitude of this geometry's bounding box area */
|
||||
private int maxY;
|
||||
/** minimum longitude of this geometry's bounding box area */
|
||||
private int minX;
|
||||
/** maximum longitude of this geometry's bounding box area */
|
||||
private int maxX;
|
||||
// child components, or null.
|
||||
private TriangleTreeNode left;
|
||||
private TriangleTreeNode right;
|
||||
/** root node of edge tree */
|
||||
private final ShapeField.DecodedTriangle component;
|
||||
/** component type */
|
||||
private final TYPE type;
|
||||
|
||||
private TriangleTreeNode(ShapeField.DecodedTriangle component) {
|
||||
this.minY = Math.min(Math.min(component.aY, component.bY), component.cY);
|
||||
this.maxY = Math.max(Math.max(component.aY, component.bY), component.cY);
|
||||
this.minX = Math.min(Math.min(component.aX, component.bX), component.cX);
|
||||
this.maxX = Math.max(Math.max(component.aX, component.bX), component.cX);
|
||||
this.component = component;
|
||||
this.type = getType(component);
|
||||
}
|
||||
|
||||
private static TYPE getType(ShapeField.DecodedTriangle triangle) {
|
||||
// the issue in lucene: https://github.com/apache/lucene-solr/pull/927
|
||||
// can help here
|
||||
if (triangle.aX == triangle.bX && triangle.aY == triangle.bY) {
|
||||
if (triangle.aX == triangle.cX && triangle.aY == triangle.cY) {
|
||||
return TYPE.POINT;
|
||||
}
|
||||
return TYPE.LINE;
|
||||
} else if ((triangle.aX == triangle.cX && triangle.aY == triangle.cY) ||
|
||||
(triangle.bX == triangle.cX && triangle.bY == triangle.cY)) {
|
||||
return TYPE.LINE;
|
||||
} else {
|
||||
return TYPE.TRIANGLE;
|
||||
}
|
||||
}
|
||||
|
||||
private void writeTo(ByteBuffersDataOutput out) throws IOException {
|
||||
ByteBuffersDataOutput scratchBuffer = ByteBuffersDataOutput.newResettableInstance();
|
||||
writeMetadata(out);
|
||||
writeComponent(out);
|
||||
if (left != null) {
|
||||
left.writeNode(out, maxX, maxY, scratchBuffer);
|
||||
}
|
||||
if (right != null) {
|
||||
right.writeNode(out, maxX, maxY, scratchBuffer);
|
||||
}
|
||||
}
|
||||
|
||||
private void writeNode(ByteBuffersDataOutput out, int parentMaxX, int parentMaxY,
|
||||
ByteBuffersDataOutput scratchBuffer) throws IOException {
|
||||
out.writeVLong((long) parentMaxX - maxX);
|
||||
out.writeVLong((long) parentMaxY - maxY);
|
||||
int size = nodeSize(false, parentMaxX, parentMaxY, scratchBuffer);
|
||||
out.writeVInt(size);
|
||||
writeMetadata(out);
|
||||
writeComponent(out);
|
||||
if (left != null) {
|
||||
left.writeNode(out, maxX, maxY, scratchBuffer);
|
||||
}
|
||||
if (right != null) {
|
||||
int rightSize = right.nodeSize(true, maxX, maxY, scratchBuffer);
|
||||
out.writeVInt(rightSize);
|
||||
right.writeNode(out, maxX, maxY, scratchBuffer);
|
||||
}
|
||||
}
|
||||
|
||||
private void writeMetadata(ByteBuffersDataOutput out) {
|
||||
byte metadata = 0;
|
||||
metadata |= (left != null) ? (1 << 0) : 0;
|
||||
metadata |= (right != null) ? (1 << 1) : 0;
|
||||
if (type == TYPE.POINT) {
|
||||
metadata |= (1 << 2);
|
||||
} else if (type == TYPE.LINE) {
|
||||
metadata |= (1 << 3);
|
||||
} else {
|
||||
metadata |= (component.ab) ? (1 << 4) : 0;
|
||||
metadata |= (component.bc) ? (1 << 5) : 0;
|
||||
metadata |= (component.ca) ? (1 << 6) : 0;
|
||||
}
|
||||
out.writeByte(metadata);
|
||||
}
|
||||
|
||||
private void writeComponent(ByteBuffersDataOutput out) throws IOException {
|
||||
if (type == TYPE.POINT) {
|
||||
out.writeVLong((long) maxX - component.aX);
|
||||
out.writeVLong((long) maxY - component.aY);
|
||||
} else if (type == TYPE.LINE) {
|
||||
out.writeVLong((long) maxX - component.aX);
|
||||
out.writeVLong((long) maxY - component.aY);
|
||||
out.writeVLong((long) maxX - component.bX);
|
||||
out.writeVLong((long) maxY - component.bY);
|
||||
} else {
|
||||
out.writeVLong((long) maxX - component.aX);
|
||||
out.writeVLong((long) maxY - component.aY);
|
||||
out.writeVLong((long) maxX - component.bX);
|
||||
out.writeVLong((long) maxY - component.bY);
|
||||
out.writeVLong((long) maxX - component.cX);
|
||||
out.writeVLong((long) maxY - component.cY);
|
||||
}
|
||||
}
|
||||
|
||||
private int nodeSize(boolean includeBox, int parentMaxX, int parentMaxY, ByteBuffersDataOutput scratchBuffer) throws IOException {
|
||||
int size =0;
|
||||
size++; //metadata
|
||||
size += componentSize(scratchBuffer);
|
||||
if (left != null) {
|
||||
size += left.nodeSize(true, maxX, maxY, scratchBuffer);
|
||||
}
|
||||
if (right != null) {
|
||||
int rightSize = right.nodeSize(true, maxX, maxY, scratchBuffer);
|
||||
scratchBuffer.reset();
|
||||
scratchBuffer.writeVLong(rightSize);
|
||||
size += scratchBuffer.size(); // jump size
|
||||
size += rightSize;
|
||||
}
|
||||
if (includeBox) {
|
||||
int jumpSize = size;
|
||||
scratchBuffer.reset();
|
||||
scratchBuffer.writeVLong((long) parentMaxX - maxX);
|
||||
scratchBuffer.writeVLong((long) parentMaxY - maxY);
|
||||
scratchBuffer.writeVLong(jumpSize);
|
||||
size += scratchBuffer.size(); // box size
|
||||
}
|
||||
return size;
|
||||
}
|
||||
|
||||
private int componentSize(ByteBuffersDataOutput scratchBuffer) throws IOException {
|
||||
scratchBuffer.reset();
|
||||
if (type == TYPE.POINT) {
|
||||
scratchBuffer.writeVLong((long) maxX - component.aX);
|
||||
scratchBuffer.writeVLong((long) maxY - component.aY);
|
||||
} else if (type == TYPE.LINE) {
|
||||
scratchBuffer.writeVLong((long) maxX - component.aX);
|
||||
scratchBuffer.writeVLong((long) maxY - component.aY);
|
||||
scratchBuffer.writeVLong((long) maxX - component.bX);
|
||||
scratchBuffer.writeVLong((long) maxY - component.bY);
|
||||
} else {
|
||||
scratchBuffer.writeVLong((long) maxX - component.aX);
|
||||
scratchBuffer.writeVLong((long) maxY - component.aY);
|
||||
scratchBuffer.writeVLong((long) maxX - component.bX);
|
||||
scratchBuffer.writeVLong((long) maxY - component.bY);
|
||||
scratchBuffer.writeVLong((long) maxX - component.cX);
|
||||
scratchBuffer.writeVLong((long) maxY - component.cY);
|
||||
}
|
||||
return Math.toIntExact(scratchBuffer.size());
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,44 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.spatial;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Name;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
|
||||
/** Runs yaml rest tests */
|
||||
public class SpatialClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||
|
||||
public SpatialClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws Exception {
|
||||
return ESClientYamlSuiteTestCase.createParameters();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,399 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.spatial.index.mapper;
|
||||
|
||||
import org.elasticsearch.common.collect.List;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
import org.elasticsearch.geo.GeometryTestUtils;
|
||||
import org.elasticsearch.geometry.Geometry;
|
||||
import org.elasticsearch.geometry.GeometryCollection;
|
||||
import org.elasticsearch.geometry.Line;
|
||||
import org.elasticsearch.geometry.LinearRing;
|
||||
import org.elasticsearch.geometry.MultiLine;
|
||||
import org.elasticsearch.geometry.MultiPoint;
|
||||
import org.elasticsearch.geometry.Point;
|
||||
import org.elasticsearch.geometry.Polygon;
|
||||
import org.elasticsearch.geometry.ShapeType;
|
||||
import org.elasticsearch.geometry.utils.GeographyValidator;
|
||||
import org.elasticsearch.geometry.utils.WellKnownText;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
|
||||
import static org.elasticsearch.xpack.spatial.index.mapper.DimensionalShapeType.LINE;
|
||||
import static org.elasticsearch.xpack.spatial.index.mapper.DimensionalShapeType.POINT;
|
||||
import static org.elasticsearch.xpack.spatial.index.mapper.DimensionalShapeType.POLYGON;
|
||||
import static org.hamcrest.Matchers.anyOf;
|
||||
import static org.hamcrest.Matchers.closeTo;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class CentroidCalculatorTests extends ESTestCase {
|
||||
private static final double DELTA = 0.000000001;
|
||||
|
||||
public void testPoint() {
|
||||
Point point = GeometryTestUtils.randomPoint(false);
|
||||
CentroidCalculator calculator = new CentroidCalculator(point);
|
||||
assertThat(calculator.getX(), equalTo(GeoUtils.normalizeLon(point.getX())));
|
||||
assertThat(calculator.getY(), equalTo(GeoUtils.normalizeLat(point.getY())));
|
||||
assertThat(calculator.sumWeight(), equalTo(1.0));
|
||||
assertThat(calculator.getDimensionalShapeType(), equalTo(POINT));
|
||||
}
|
||||
|
||||
public void testPolygonWithSmallTrianglesOfZeroWeight() throws Exception {
|
||||
Geometry geometry = new WellKnownText(false, new GeographyValidator(true))
|
||||
.fromWKT("POLYGON((-4.385064 55.2259599,-4.385056 55.2259224,-4.3850466 55.2258994,-4.3849755 55.2258574," +
|
||||
"-4.3849339 55.2258589,-4.3847033 55.2258742,-4.3846805 55.2258818,-4.3846282 55.2259132,-4.3846215 55.2259247," +
|
||||
"-4.3846121 55.2259683,-4.3846147 55.2259798,-4.3846369 55.2260157,-4.3846472 55.2260241," +
|
||||
"-4.3846697 55.2260409,-4.3846952 55.2260562,-4.384765 55.22608,-4.3848199 55.2260861,-4.3848481 55.2260845," +
|
||||
"-4.3849245 55.2260761,-4.3849393 55.22607,-4.3849996 55.2260432,-4.3850131 55.2260364,-4.3850426 55.2259989," +
|
||||
"-4.385064 55.2259599),(-4.3850104 55.2259583,-4.385005 55.2259752,-4.384997 55.2259892,-4.3849339 55.2259981," +
|
||||
"-4.3849272 55.2259308,-4.3850016 55.2259262,-4.385005 55.2259377,-4.3850104 55.2259583)," +
|
||||
"(-4.3849996 55.2259193,-4.3847502 55.2259331,-4.3847548 55.2258921,-4.3848012 55.2258895," +
|
||||
"-4.3849219 55.2258811,-4.3849514 55.2258818,-4.3849728 55.2258933,-4.3849996 55.2259193)," +
|
||||
"(-4.3849917 55.2259984,-4.3849849 55.2260103,-4.3849771 55.2260192,-4.3849701 55.2260019,-4.3849917 55.2259984)," +
|
||||
"(-4.3846608 55.2259374,-4.384663 55.2259316,-4.3846711 55.2259201,-4.3846992 55.225904," +
|
||||
"-4.384718 55.2258941,-4.3847434 55.2258927,-4.3847314 55.2259407,-4.3849098 55.2259316,-4.3849098 55.2259492," +
|
||||
"-4.3848843 55.2259515,-4.3849017 55.2260119,-4.3849567 55.226005,-4.3849701 55.2260272,-4.3849299 55.2260486," +
|
||||
"-4.3849192 55.2260295,-4.384883 55.2260188,-4.3848776 55.2260119,-4.3848441 55.2260149,-4.3848441 55.2260226," +
|
||||
"-4.3847864 55.2260241,-4.384722 55.2259652,-4.3847053 55.2259706,-4.384683 55.225954,-4.3846608 55.2259374)," +
|
||||
"(-4.3846541 55.2259549,-4.384698 55.2259883,-4.3847173 55.2259828,-4.3847743 55.2260333,-4.3847891 55.2260356," +
|
||||
"-4.3848146 55.226031,-4.3848199 55.2260409,-4.3848387 55.2260417,-4.3848494 55.2260593,-4.3848092 55.2260616," +
|
||||
"-4.3847623 55.2260539,-4.3847341 55.2260432,-4.3847046 55.2260279,-4.3846738 55.2260062,-4.3846496 55.2259844," +
|
||||
"-4.3846429 55.2259737,-4.3846523 55.2259714,-4.384651 55.2259629,-4.3846541 55.2259549)," +
|
||||
"(-4.3846608 55.2259374,-4.3846559 55.2259502,-4.3846541 55.2259549,-4.3846608 55.2259374))");
|
||||
CentroidCalculator calculator = new CentroidCalculator(geometry);
|
||||
assertThat(calculator.getX(), closeTo( -4.3848, 1e-4));
|
||||
assertThat(calculator.getY(), closeTo(55.22595, 1e-4));
|
||||
assertThat(calculator.sumWeight(), closeTo(0, 1e-5));
|
||||
assertThat(calculator.getDimensionalShapeType(), equalTo(POLYGON));
|
||||
}
|
||||
|
||||
|
||||
public void testLine() {
|
||||
double[] y = new double[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 };
|
||||
double[] x = new double[] { 10, 20, 30, 40, 50, 60, 70, 80, 90, 100 };
|
||||
double[] yRunningAvg = new double[] { 1, 1.5, 2.0, 2.5, 3, 3.5, 4, 4.5, 5, 5.5 };
|
||||
double[] xRunningAvg = new double[] { 10, 15, 20, 25, 30, 35, 40, 45, 50, 55 };
|
||||
|
||||
Point point = new Point(x[0], y[0]);
|
||||
CentroidCalculator calculator = new CentroidCalculator(point);
|
||||
assertThat(calculator.getX(), equalTo(xRunningAvg[0]));
|
||||
assertThat(calculator.getY(), equalTo(yRunningAvg[0]));
|
||||
for (int i = 1; i < 10; i++) {
|
||||
double[] subX = new double[i + 1];
|
||||
double[] subY = new double[i + 1];
|
||||
System.arraycopy(x, 0, subX, 0, i + 1);
|
||||
System.arraycopy(y, 0, subY, 0, i + 1);
|
||||
Geometry geometry = new Line(subX, subY);
|
||||
calculator = new CentroidCalculator(geometry);
|
||||
assertEquals(xRunningAvg[i], calculator.getX(), DELTA);
|
||||
assertEquals(yRunningAvg[i], calculator.getY(), DELTA);
|
||||
}
|
||||
CentroidCalculator otherCalculator = new CentroidCalculator(new Point(0, 0));
|
||||
calculator.addFrom(otherCalculator);
|
||||
assertEquals(55.0, calculator.getX(), DELTA);
|
||||
assertEquals(5.5, calculator.getY(), DELTA);
|
||||
}
|
||||
|
||||
public void testMultiLine() {
|
||||
MultiLine multiLine = GeometryTestUtils.randomMultiLine(false);
|
||||
double sumLineX = 0;
|
||||
double sumLineY = 0;
|
||||
double sumLineWeight = 0;
|
||||
for (Line line : multiLine) {
|
||||
CentroidCalculator calculator = new CentroidCalculator(line);
|
||||
sumLineX += calculator.compSumX.value();
|
||||
sumLineY += calculator.compSumY.value();
|
||||
sumLineWeight += calculator.compSumWeight.value();
|
||||
}
|
||||
CentroidCalculator calculator = new CentroidCalculator(multiLine);
|
||||
|
||||
assertEquals(sumLineX / sumLineWeight, calculator.getX(), DELTA);
|
||||
assertEquals(sumLineY / sumLineWeight, calculator.getY(), DELTA);
|
||||
assertEquals(sumLineWeight, calculator.sumWeight(), DELTA);
|
||||
assertThat(calculator.getDimensionalShapeType(), equalTo(LINE));
|
||||
}
|
||||
|
||||
public void testMultiPoint() {
|
||||
MultiPoint multiPoint = GeometryTestUtils.randomMultiPoint(false);
|
||||
double sumPointX = 0;
|
||||
double sumPointY = 0;
|
||||
double sumPointWeight = 0;
|
||||
for (Point point : multiPoint) {
|
||||
sumPointX += point.getX();
|
||||
sumPointY += point.getY();
|
||||
sumPointWeight += 1;
|
||||
}
|
||||
|
||||
CentroidCalculator calculator = new CentroidCalculator(multiPoint);
|
||||
assertEquals(sumPointX / sumPointWeight, calculator.getX(), DELTA);
|
||||
assertEquals(sumPointY / sumPointWeight, calculator.getY(), DELTA);
|
||||
assertEquals(sumPointWeight, calculator.sumWeight(), DELTA);
|
||||
assertThat(calculator.getDimensionalShapeType(), equalTo(POINT));
|
||||
|
||||
}
|
||||
|
||||
public void testRoundingErrorAndNormalization() {
|
||||
double lonA = GeometryTestUtils.randomLon();
|
||||
double latA = GeometryTestUtils.randomLat();
|
||||
double lonB = randomValueOtherThanMany((l) -> Math.abs(l - lonA) <= GeoUtils.TOLERANCE, GeometryTestUtils::randomLon);
|
||||
double latB = randomValueOtherThanMany((l) -> Math.abs(l - latA) <= GeoUtils.TOLERANCE, GeometryTestUtils::randomLat);
|
||||
{
|
||||
Line line = new Line(new double[]{180.0, 180.0}, new double[]{latA, latB});
|
||||
assertThat(new CentroidCalculator(line).getX(), anyOf(equalTo(179.99999999999997),
|
||||
equalTo(180.0), equalTo(-179.99999999999997)));
|
||||
}
|
||||
|
||||
{
|
||||
Line line = new Line(new double[]{-180.0, -180.0}, new double[]{latA, latB});
|
||||
assertThat(new CentroidCalculator(line).getX(), anyOf(equalTo(179.99999999999997),
|
||||
equalTo(180.0), equalTo(-179.99999999999997)));
|
||||
}
|
||||
|
||||
{
|
||||
Line line = new Line(new double[]{lonA, lonB}, new double[] { 90.0, 90.0 });
|
||||
assertThat(new CentroidCalculator(line).getY(), anyOf(equalTo(90.0), equalTo(89.99999999999999)));
|
||||
}
|
||||
|
||||
{
|
||||
Line line = new Line(new double[]{lonA, lonB}, new double[] { -90.0, -90.0 });
|
||||
assertThat(new CentroidCalculator(line).getY(), anyOf(equalTo(-90.0), equalTo(-89.99999999999999)));
|
||||
}
|
||||
}
|
||||
|
||||
// test that the centroid calculation is agnostic to orientation
|
||||
public void testPolyonWithHole() {
|
||||
for (boolean ccwOuter : List.of(true, false)) {
|
||||
for (boolean ccwInner : List.of(true, false)) {
|
||||
final LinearRing outer, inner;
|
||||
if (ccwOuter) {
|
||||
outer = new LinearRing(new double[]{-50, 50, 50, -50, -50}, new double[]{-50, -50, 50, 50, -50});
|
||||
} else {
|
||||
outer = new LinearRing(new double[]{-50, -50, 50, 50, -50}, new double[]{-50, 50, 50, -50, -50});
|
||||
}
|
||||
if (ccwInner) {
|
||||
inner = new LinearRing(new double[]{-40, 30, 30, -40, -40}, new double[]{-40, -40, 30, 30, -40});
|
||||
} else {
|
||||
inner = new LinearRing(new double[]{-40, -40, 30, 30, -40}, new double[]{-40, 30, 30, -40, -40});
|
||||
}
|
||||
final double POLY_CENTROID = 4.803921568627451;
|
||||
CentroidCalculator calculator = new CentroidCalculator(new Polygon(outer, Collections.singletonList(inner)));
|
||||
assertEquals(POLY_CENTROID, calculator.getX(), DELTA);
|
||||
assertEquals(POLY_CENTROID, calculator.getY(), DELTA);
|
||||
assertThat(calculator.sumWeight(), equalTo(5100.0));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testLineAsClosedPoint() {
|
||||
double lon = GeometryTestUtils.randomLon();
|
||||
double lat = GeometryTestUtils.randomLat();
|
||||
CentroidCalculator calculator = new CentroidCalculator(new Line(new double[] {lon, lon}, new double[] { lat, lat}));
|
||||
assertThat(calculator.getX(), equalTo(GeoUtils.normalizeLon(lon)));
|
||||
assertThat(calculator.getY(), equalTo(GeoUtils.normalizeLat(lat)));
|
||||
assertThat(calculator.sumWeight(), equalTo(1.0));
|
||||
}
|
||||
|
||||
public void testPolygonAsLine() {
|
||||
// create a line that traces itself as a polygon
|
||||
Line sourceLine = GeometryTestUtils.randomLine(false);
|
||||
double[] x = new double[2 * sourceLine.length() - 1];
|
||||
double[] y = new double[2 * sourceLine.length() - 1];
|
||||
int idx = 0;
|
||||
for (int i = 0; i < sourceLine.length(); i++) {
|
||||
x[idx] = sourceLine.getX(i);
|
||||
y[idx] = sourceLine.getY(i);
|
||||
idx += 1;
|
||||
}
|
||||
for (int i = sourceLine.length() - 2; i >= 0; i--) {
|
||||
x[idx] = sourceLine.getX(i);
|
||||
y[idx] = sourceLine.getY(i);
|
||||
idx += 1;
|
||||
}
|
||||
|
||||
Line line = new Line(x, y);
|
||||
CentroidCalculator lineCalculator = new CentroidCalculator(line);
|
||||
|
||||
Polygon polygon = new Polygon(new LinearRing(x, y));
|
||||
CentroidCalculator calculator = new CentroidCalculator(polygon);
|
||||
|
||||
// sometimes precision issues yield non-zero areas. must verify that area is close to zero
|
||||
if (calculator.getDimensionalShapeType() == POLYGON) {
|
||||
assertEquals(0.0, calculator.sumWeight(), 1e-10);
|
||||
} else {
|
||||
assertThat(calculator.getDimensionalShapeType(), equalTo(LINE));
|
||||
assertThat(calculator.getX(), equalTo(lineCalculator.getX()));
|
||||
assertThat(calculator.getY(), equalTo(lineCalculator.getY()));
|
||||
assertThat(calculator.sumWeight(), equalTo(lineCalculator.compSumWeight.value()));
|
||||
}
|
||||
}
|
||||
|
||||
public void testPolygonWithEqualSizedHole() {
|
||||
Polygon polyWithHole = new Polygon(new LinearRing(new double[]{-50, 50, 50, -50, -50}, new double[]{-50, -50, 50, 50, -50}),
|
||||
Collections.singletonList(new LinearRing(new double[]{-50, -50, 50, 50, -50}, new double[]{-50, 50, 50, -50, -50})));
|
||||
CentroidCalculator calculator = new CentroidCalculator(polyWithHole);
|
||||
assertThat(calculator.getX(), equalTo(0.0));
|
||||
assertThat(calculator.getY(), equalTo(0.0));
|
||||
assertThat(calculator.sumWeight(), equalTo(400.0));
|
||||
assertThat(calculator.getDimensionalShapeType(), equalTo(LINE));
|
||||
}
|
||||
|
||||
public void testPolygonAsPoint() {
|
||||
Point point = GeometryTestUtils.randomPoint(false);
|
||||
Polygon polygon = new Polygon(new LinearRing(new double[] { point.getX(), point.getX(), point.getX(), point.getX() },
|
||||
new double[] { point.getY(), point.getY(), point.getY(), point.getY() }));
|
||||
CentroidCalculator calculator = new CentroidCalculator(polygon);
|
||||
assertThat(calculator.getX(), equalTo(GeoUtils.normalizeLon(point.getX())));
|
||||
assertThat(calculator.getY(), equalTo(GeoUtils.normalizeLat(point.getY())));
|
||||
assertThat(calculator.sumWeight(), equalTo(1.0));
|
||||
assertThat(calculator.getDimensionalShapeType(), equalTo(POINT));
|
||||
}
|
||||
|
||||
public void testGeometryCollection() {
|
||||
int numPoints = randomIntBetween(0, 3);
|
||||
int numLines = randomIntBetween(0, 3);
|
||||
int numPolygons = randomIntBetween(0, 3);
|
||||
|
||||
if (numPoints == 0 && numLines == 0 && numPolygons == 0) {
|
||||
numPoints = 1;
|
||||
numLines = 1;
|
||||
numPolygons = 1;
|
||||
}
|
||||
java.util.List<Geometry> shapes = new ArrayList<>();
|
||||
for (int i = 0; i < numPoints; i++) {
|
||||
if (randomBoolean()) {
|
||||
shapes.add(GeometryTestUtils.randomPoint(false));
|
||||
} else {
|
||||
shapes.add(GeometryTestUtils.randomMultiPoint(false));
|
||||
}
|
||||
}
|
||||
for (int i = 0; i < numLines; i++) {
|
||||
if (randomBoolean()) {
|
||||
shapes.add(GeometryTestUtils.randomLine(false));
|
||||
} else {
|
||||
shapes.add(GeometryTestUtils.randomMultiLine(false));
|
||||
}
|
||||
}
|
||||
for (int i = 0; i < numPolygons; i++) {
|
||||
if (randomBoolean()) {
|
||||
shapes.add(GeometryTestUtils.randomPolygon(false));
|
||||
} else {
|
||||
shapes.add(GeometryTestUtils.randomMultiPolygon(false));
|
||||
}
|
||||
}
|
||||
|
||||
DimensionalShapeType dimensionalShapeType = numPolygons > 0 ? POLYGON : numLines > 0 ? LINE : POINT;
|
||||
|
||||
// addFromCalculator is only adding from shapes with the highest dimensionalShapeType
|
||||
CentroidCalculator addFromCalculator = null;
|
||||
for (Geometry shape : shapes) {
|
||||
if ((shape.type() == ShapeType.MULTIPOLYGON || shape.type() == ShapeType.POLYGON) ||
|
||||
(dimensionalShapeType == LINE && (shape.type() == ShapeType.LINESTRING || shape.type() == ShapeType.MULTILINESTRING)) ||
|
||||
(dimensionalShapeType == POINT && (shape.type() == ShapeType.POINT || shape.type() == ShapeType.MULTIPOINT))) {
|
||||
if (addFromCalculator == null) {
|
||||
addFromCalculator = new CentroidCalculator(shape);
|
||||
} else {
|
||||
addFromCalculator.addFrom(new CentroidCalculator(shape));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// shuffle
|
||||
if (randomBoolean()) {
|
||||
Collections.shuffle(shapes, random());
|
||||
} else if (randomBoolean()) {
|
||||
Collections.reverse(shapes);
|
||||
}
|
||||
|
||||
GeometryCollection<Geometry> collection = new GeometryCollection<>(shapes);
|
||||
CentroidCalculator calculator = new CentroidCalculator(collection);
|
||||
|
||||
assertNotNull(addFromCalculator.getDimensionalShapeType());
|
||||
assertThat(addFromCalculator.getDimensionalShapeType(), equalTo(dimensionalShapeType));
|
||||
assertThat(calculator.getDimensionalShapeType(), equalTo(dimensionalShapeType));
|
||||
assertEquals(calculator.getX(), addFromCalculator.getX(), DELTA);
|
||||
assertEquals(calculator.getY(), addFromCalculator.getY(), DELTA);
|
||||
assertEquals(calculator.sumWeight(), addFromCalculator.sumWeight(), DELTA);
|
||||
}
|
||||
|
||||
public void testAddFrom() {
|
||||
Point point = GeometryTestUtils.randomPoint(false);
|
||||
Line line = GeometryTestUtils.randomLine(false);
|
||||
Polygon polygon = GeometryTestUtils.randomPolygon(false);
|
||||
|
||||
// point add point
|
||||
{
|
||||
CentroidCalculator calculator = new CentroidCalculator(point);
|
||||
calculator.addFrom(new CentroidCalculator(point));
|
||||
assertThat(calculator.compSumX.value(), equalTo(2 * point.getX()));
|
||||
assertThat(calculator.compSumY.value(), equalTo(2 * point.getY()));
|
||||
assertThat(calculator.sumWeight(), equalTo(2.0));
|
||||
}
|
||||
|
||||
// point add line/polygon
|
||||
{
|
||||
CentroidCalculator lineCalculator = new CentroidCalculator(line);
|
||||
CentroidCalculator calculator = new CentroidCalculator(point);
|
||||
calculator.addFrom(lineCalculator);
|
||||
assertThat(calculator.getX(), equalTo(lineCalculator.getX()));
|
||||
assertThat(calculator.getY(), equalTo(lineCalculator.getY()));
|
||||
assertThat(calculator.sumWeight(), equalTo(lineCalculator.sumWeight()));
|
||||
}
|
||||
|
||||
// line add point
|
||||
{
|
||||
CentroidCalculator lineCalculator = new CentroidCalculator(line);
|
||||
CentroidCalculator calculator = new CentroidCalculator(line);
|
||||
calculator.addFrom(new CentroidCalculator(point));
|
||||
assertThat(calculator.getX(), equalTo(lineCalculator.getX()));
|
||||
assertThat(calculator.getY(), equalTo(lineCalculator.getY()));
|
||||
assertThat(calculator.sumWeight(), equalTo(lineCalculator.sumWeight()));
|
||||
}
|
||||
|
||||
// line add line
|
||||
{
|
||||
CentroidCalculator lineCalculator = new CentroidCalculator(line);
|
||||
CentroidCalculator calculator = new CentroidCalculator(line);
|
||||
calculator.addFrom(lineCalculator);
|
||||
assertEquals(2 * lineCalculator.compSumX.value(), calculator.compSumX.value(), DELTA);
|
||||
assertEquals(2 * lineCalculator.compSumY.value(), calculator.compSumY.value(), DELTA);
|
||||
assertEquals(2 * lineCalculator.sumWeight(), calculator.sumWeight(), DELTA);
|
||||
}
|
||||
|
||||
// line add polygon
|
||||
{
|
||||
CentroidCalculator polygonCalculator = new CentroidCalculator(polygon);
|
||||
CentroidCalculator calculator = new CentroidCalculator(line);
|
||||
calculator.addFrom(polygonCalculator);
|
||||
assertThat(calculator.getX(), equalTo(polygonCalculator.getX()));
|
||||
assertThat(calculator.getY(), equalTo(polygonCalculator.getY()));
|
||||
assertThat(calculator.sumWeight(), equalTo(calculator.sumWeight()));
|
||||
}
|
||||
|
||||
// polygon add point/line
|
||||
{
|
||||
CentroidCalculator polygonCalculator = new CentroidCalculator(polygon);
|
||||
CentroidCalculator calculator = new CentroidCalculator(polygon);
|
||||
calculator.addFrom(new CentroidCalculator(randomBoolean() ? point : line));
|
||||
assertThat(calculator.getX(), equalTo(polygonCalculator.getX()));
|
||||
assertThat(calculator.getY(), equalTo(polygonCalculator.getY()));
|
||||
assertThat(calculator.sumWeight(), equalTo(calculator.sumWeight()));
|
||||
}
|
||||
|
||||
// polygon add polygon
|
||||
{
|
||||
CentroidCalculator polygonCalculator = new CentroidCalculator(polygon);
|
||||
CentroidCalculator calculator = new CentroidCalculator(polygon);
|
||||
calculator.addFrom(polygonCalculator);
|
||||
assertThat(calculator.compSumX.value(), equalTo(2 * polygonCalculator.compSumX.value()));
|
||||
assertThat(calculator.compSumY.value(), equalTo(2 * polygonCalculator.compSumY.value()));
|
||||
assertThat(calculator.sumWeight(), equalTo(2 * polygonCalculator.sumWeight()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,52 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.spatial.index.mapper;
|
||||
|
||||
import org.apache.lucene.geo.GeoEncodingUtils;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import static org.elasticsearch.xpack.spatial.index.mapper.GeoShapeCoordinateEncoder.INSTANCE;
|
||||
import static org.hamcrest.Matchers.closeTo;
|
||||
import static org.hamcrest.Matchers.endsWith;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class GeoShapeCoordinateEncoderTests extends ESTestCase {
|
||||
|
||||
public void testLongitude() {
|
||||
double randomLon = randomDoubleBetween(-180, 180, true);
|
||||
double randomInvalidLon = randomFrom(randomDoubleBetween(-1000, -180.01, true),
|
||||
randomDoubleBetween(180.01, 1000, true));
|
||||
|
||||
assertThat(INSTANCE.encodeX(Double.POSITIVE_INFINITY), equalTo(Integer.MAX_VALUE));
|
||||
assertThat(INSTANCE.encodeX(Double.NEGATIVE_INFINITY), equalTo(Integer.MIN_VALUE));
|
||||
int encodedLon = INSTANCE.encodeX(randomLon);
|
||||
assertThat(encodedLon, equalTo(GeoEncodingUtils.encodeLongitude(randomLon)));
|
||||
Exception e = expectThrows(IllegalArgumentException.class, () -> GeoShapeCoordinateEncoder.INSTANCE.encodeX(randomInvalidLon));
|
||||
assertThat(e.getMessage(), endsWith("must be between -180.0 and 180.0"));
|
||||
|
||||
assertThat(INSTANCE.decodeX(encodedLon), closeTo(randomLon, 0.0001));
|
||||
assertThat(INSTANCE.decodeX(Integer.MAX_VALUE), closeTo(180, 0.00001));
|
||||
assertThat(INSTANCE.decodeX(Integer.MIN_VALUE), closeTo(-180, 0.00001));
|
||||
}
|
||||
|
||||
public void testLatitude() {
|
||||
double randomLat = randomDoubleBetween(-90, 90, true);
|
||||
double randomInvalidLat = randomFrom(randomDoubleBetween(-1000, -90.01, true),
|
||||
randomDoubleBetween(90.01, 1000, true));
|
||||
|
||||
assertThat(INSTANCE.encodeY(Double.POSITIVE_INFINITY), equalTo(Integer.MAX_VALUE));
|
||||
assertThat(INSTANCE.encodeY(Double.NEGATIVE_INFINITY), equalTo(Integer.MIN_VALUE));
|
||||
int encodedLat = INSTANCE.encodeY(randomLat);
|
||||
assertThat(encodedLat, equalTo(GeoEncodingUtils.encodeLatitude(randomLat)));
|
||||
Exception e = expectThrows(IllegalArgumentException.class, () -> GeoShapeCoordinateEncoder.INSTANCE.encodeY(randomInvalidLat));
|
||||
assertThat(e.getMessage(), endsWith("must be between -90.0 and 90.0"));
|
||||
|
||||
assertThat(INSTANCE.decodeY(encodedLat), closeTo(randomLat, 0.0001));
|
||||
assertThat(INSTANCE.decodeY(Integer.MAX_VALUE), closeTo(90, 0.00001));
|
||||
assertThat(INSTANCE.decodeY(Integer.MIN_VALUE), closeTo(-90, 0.00001));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,401 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.spatial.index.mapper;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Explicit;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.mapper.AbstractGeometryFieldMapper;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin;
|
||||
import org.elasticsearch.xpack.spatial.SpatialPlugin;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
|
||||
import static org.elasticsearch.index.mapper.GeoPointFieldMapper.Names.IGNORE_Z_VALUE;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
|
||||
public class GeoShapeWithDocValuesFieldMapperTests extends ESSingleNodeTestCase {
|
||||
|
||||
@Override
|
||||
protected boolean forbidPrivateIndexSettings() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> getPlugins() {
|
||||
return pluginList(InternalSettingsPlugin.class, SpatialPlugin.class, LocalStateCompositeXPackPlugin.class);
|
||||
}
|
||||
|
||||
public void testDefaultConfiguration() throws IOException {
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("location")
|
||||
.field("type", "geo_shape")
|
||||
.endObject().endObject()
|
||||
.endObject().endObject());
|
||||
|
||||
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
|
||||
.parse("type1", new CompressedXContent(mapping));
|
||||
Mapper fieldMapper = defaultMapper.mappers().getMapper("location");
|
||||
assertThat(fieldMapper, instanceOf(GeoShapeWithDocValuesFieldMapper.class));
|
||||
|
||||
GeoShapeWithDocValuesFieldMapper geoShapeFieldMapper = (GeoShapeWithDocValuesFieldMapper) fieldMapper;
|
||||
assertThat(geoShapeFieldMapper.fieldType().orientation(),
|
||||
equalTo(org.elasticsearch.index.mapper.GeoShapeFieldMapper.Defaults.ORIENTATION.value()));
|
||||
assertFalse(geoShapeFieldMapper.docValues().explicit());
|
||||
assertTrue(geoShapeFieldMapper.docValues().value());
|
||||
assertTrue(geoShapeFieldMapper.fieldType().hasDocValues());
|
||||
}
|
||||
|
||||
public void testDefaultDocValueConfigurationOnPre7_8() throws IOException {
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("location")
|
||||
.field("type", "geo_shape")
|
||||
.endObject().endObject()
|
||||
.endObject().endObject());
|
||||
|
||||
Version oldVersion = Version.V_7_7_0;
|
||||
DocumentMapper defaultMapper = createIndex("test", settings(oldVersion).build()).mapperService().documentMapperParser()
|
||||
.parse("type1", new CompressedXContent(mapping));
|
||||
Mapper fieldMapper = defaultMapper.mappers().getMapper("location");
|
||||
assertThat(fieldMapper, instanceOf(GeoShapeWithDocValuesFieldMapper.class));
|
||||
|
||||
GeoShapeWithDocValuesFieldMapper geoShapeFieldMapper = (GeoShapeWithDocValuesFieldMapper) fieldMapper;
|
||||
assertFalse(geoShapeFieldMapper.docValues().explicit());
|
||||
assertFalse(geoShapeFieldMapper.docValues().value());
|
||||
assertFalse(geoShapeFieldMapper.fieldType().hasDocValues());
|
||||
}
|
||||
|
||||
/**
|
||||
* Test that orientation parameter correctly parses
|
||||
*/
|
||||
public void testOrientationParsing() throws IOException {
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("location")
|
||||
.field("type", "geo_shape")
|
||||
.field("orientation", "left")
|
||||
.endObject().endObject()
|
||||
.endObject().endObject());
|
||||
|
||||
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
|
||||
.parse("type1", new CompressedXContent(mapping));
|
||||
Mapper fieldMapper = defaultMapper.mappers().getMapper("location");
|
||||
assertThat(fieldMapper, instanceOf(GeoShapeWithDocValuesFieldMapper.class));
|
||||
|
||||
ShapeBuilder.Orientation orientation = ((GeoShapeWithDocValuesFieldMapper)fieldMapper).fieldType().orientation();
|
||||
assertThat(orientation, equalTo(ShapeBuilder.Orientation.CLOCKWISE));
|
||||
assertThat(orientation, equalTo(ShapeBuilder.Orientation.LEFT));
|
||||
assertThat(orientation, equalTo(ShapeBuilder.Orientation.CW));
|
||||
|
||||
// explicit right orientation test
|
||||
mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("location")
|
||||
.field("type", "geo_shape")
|
||||
.field("orientation", "right")
|
||||
.endObject().endObject()
|
||||
.endObject().endObject());
|
||||
|
||||
defaultMapper = createIndex("test2").mapperService().documentMapperParser()
|
||||
.parse("type1", new CompressedXContent(mapping));
|
||||
fieldMapper = defaultMapper.mappers().getMapper("location");
|
||||
assertThat(fieldMapper, instanceOf(GeoShapeWithDocValuesFieldMapper.class));
|
||||
|
||||
orientation = ((GeoShapeWithDocValuesFieldMapper)fieldMapper).fieldType().orientation();
|
||||
assertThat(orientation, equalTo(ShapeBuilder.Orientation.COUNTER_CLOCKWISE));
|
||||
assertThat(orientation, equalTo(ShapeBuilder.Orientation.RIGHT));
|
||||
assertThat(orientation, equalTo(ShapeBuilder.Orientation.CCW));
|
||||
}
|
||||
|
||||
/**
|
||||
* Test that coerce parameter correctly parses
|
||||
*/
|
||||
public void testCoerceParsing() throws IOException {
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("location")
|
||||
.field("type", "geo_shape")
|
||||
.field("coerce", "true")
|
||||
.endObject().endObject()
|
||||
.endObject().endObject());
|
||||
|
||||
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
|
||||
.parse("type1", new CompressedXContent(mapping));
|
||||
Mapper fieldMapper = defaultMapper.mappers().getMapper("location");
|
||||
assertThat(fieldMapper, instanceOf(GeoShapeWithDocValuesFieldMapper.class));
|
||||
|
||||
boolean coerce = ((GeoShapeWithDocValuesFieldMapper)fieldMapper).coerce().value();
|
||||
assertThat(coerce, equalTo(true));
|
||||
|
||||
// explicit false coerce test
|
||||
mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("location")
|
||||
.field("type", "geo_shape")
|
||||
.field("coerce", "false")
|
||||
.endObject().endObject()
|
||||
.endObject().endObject());
|
||||
|
||||
defaultMapper = createIndex("test2").mapperService().documentMapperParser()
|
||||
.parse("type1", new CompressedXContent(mapping));
|
||||
fieldMapper = defaultMapper.mappers().getMapper("location");
|
||||
assertThat(fieldMapper, instanceOf(GeoShapeWithDocValuesFieldMapper.class));
|
||||
|
||||
coerce = ((GeoShapeWithDocValuesFieldMapper)fieldMapper).coerce().value();
|
||||
assertThat(coerce, equalTo(false));
|
||||
assertFieldWarnings("tree");
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Test that accept_z_value parameter correctly parses
|
||||
*/
|
||||
public void testIgnoreZValue() throws IOException {
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("location")
|
||||
.field("type", "geo_shape")
|
||||
.field(IGNORE_Z_VALUE.getPreferredName(), "true")
|
||||
.endObject().endObject()
|
||||
.endObject().endObject());
|
||||
|
||||
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
|
||||
.parse("type1", new CompressedXContent(mapping));
|
||||
Mapper fieldMapper = defaultMapper.mappers().getMapper("location");
|
||||
assertThat(fieldMapper, instanceOf(GeoShapeWithDocValuesFieldMapper.class));
|
||||
|
||||
boolean ignoreZValue = ((GeoShapeWithDocValuesFieldMapper)fieldMapper).ignoreZValue().value();
|
||||
assertThat(ignoreZValue, equalTo(true));
|
||||
|
||||
// explicit false accept_z_value test
|
||||
mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("location")
|
||||
.field("type", "geo_shape")
|
||||
.field(IGNORE_Z_VALUE.getPreferredName(), "false")
|
||||
.endObject().endObject()
|
||||
.endObject().endObject());
|
||||
|
||||
defaultMapper = createIndex("test2").mapperService().documentMapperParser()
|
||||
.parse("type1", new CompressedXContent(mapping));
|
||||
fieldMapper = defaultMapper.mappers().getMapper("location");
|
||||
assertThat(fieldMapper, instanceOf(GeoShapeWithDocValuesFieldMapper.class));
|
||||
|
||||
ignoreZValue = ((GeoShapeWithDocValuesFieldMapper)fieldMapper).ignoreZValue().value();
|
||||
assertThat(ignoreZValue, equalTo(false));
|
||||
}
|
||||
|
||||
/**
|
||||
* Test that ignore_malformed parameter correctly parses
|
||||
*/
|
||||
public void testIgnoreMalformedParsing() throws IOException {
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("location")
|
||||
.field("type", "geo_shape")
|
||||
.field("ignore_malformed", "true")
|
||||
.endObject().endObject()
|
||||
.endObject().endObject());
|
||||
|
||||
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
|
||||
.parse("type1", new CompressedXContent(mapping));
|
||||
Mapper fieldMapper = defaultMapper.mappers().getMapper("location");
|
||||
assertThat(fieldMapper, instanceOf(GeoShapeWithDocValuesFieldMapper.class));
|
||||
|
||||
Explicit<Boolean> ignoreMalformed = ((GeoShapeWithDocValuesFieldMapper)fieldMapper).ignoreMalformed();
|
||||
assertThat(ignoreMalformed.value(), equalTo(true));
|
||||
|
||||
// explicit false ignore_malformed test
|
||||
mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("location")
|
||||
.field("type", "geo_shape")
|
||||
.field("ignore_malformed", "false")
|
||||
.endObject().endObject()
|
||||
.endObject().endObject());
|
||||
|
||||
defaultMapper = createIndex("test2").mapperService().documentMapperParser()
|
||||
.parse("type1", new CompressedXContent(mapping));
|
||||
fieldMapper = defaultMapper.mappers().getMapper("location");
|
||||
assertThat(fieldMapper, instanceOf(GeoShapeWithDocValuesFieldMapper.class));
|
||||
|
||||
ignoreMalformed = ((GeoShapeWithDocValuesFieldMapper)fieldMapper).ignoreMalformed();
|
||||
assertThat(ignoreMalformed.explicit(), equalTo(true));
|
||||
assertThat(ignoreMalformed.value(), equalTo(false));
|
||||
}
|
||||
|
||||
/**
|
||||
* Test that doc_values parameter correctly parses
|
||||
*/
|
||||
public void testDocValues() throws IOException {
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("location")
|
||||
.field("type", "geo_shape")
|
||||
.field("doc_values", true)
|
||||
.endObject().endObject()
|
||||
.endObject().endObject());
|
||||
|
||||
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser()
|
||||
.parse("type1", new CompressedXContent(mapping));
|
||||
Mapper fieldMapper = defaultMapper.mappers().getMapper("location");
|
||||
assertThat(fieldMapper, instanceOf(GeoShapeWithDocValuesFieldMapper.class));
|
||||
|
||||
assertTrue(((GeoShapeWithDocValuesFieldMapper)fieldMapper).docValues().explicit());
|
||||
assertTrue(((GeoShapeWithDocValuesFieldMapper)fieldMapper).docValues().value());
|
||||
boolean hasDocValues = ((GeoShapeWithDocValuesFieldMapper)fieldMapper).fieldType().hasDocValues();
|
||||
assertTrue(hasDocValues);
|
||||
|
||||
// explicit false doc_values
|
||||
mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("location")
|
||||
.field("type", "geo_shape")
|
||||
.field("doc_values", "false")
|
||||
.endObject().endObject()
|
||||
.endObject().endObject());
|
||||
|
||||
defaultMapper = createIndex("test2").mapperService().documentMapperParser()
|
||||
.parse("type1", new CompressedXContent(mapping));
|
||||
fieldMapper = defaultMapper.mappers().getMapper("location");
|
||||
assertThat(fieldMapper, instanceOf(GeoShapeWithDocValuesFieldMapper.class));
|
||||
|
||||
assertTrue(((GeoShapeWithDocValuesFieldMapper)fieldMapper).docValues().explicit());
|
||||
assertFalse(((GeoShapeWithDocValuesFieldMapper)fieldMapper).docValues().value());
|
||||
hasDocValues = ((GeoShapeWithDocValuesFieldMapper)fieldMapper).fieldType().hasDocValues();
|
||||
assertFalse(hasDocValues);
|
||||
}
|
||||
|
||||
private void assertFieldWarnings(String... fieldNames) {
|
||||
String[] warnings = new String[fieldNames.length];
|
||||
for (int i = 0; i < fieldNames.length; ++i) {
|
||||
warnings[i] = "Field parameter [" + fieldNames[i] + "] "
|
||||
+ "is deprecated and will be removed in a future version.";
|
||||
}
|
||||
}
|
||||
|
||||
public void testGeoShapeMapperMerge() throws Exception {
|
||||
String stage1Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
|
||||
.startObject("shape").field("type", "geo_shape")
|
||||
.field("orientation", "ccw")
|
||||
.endObject().endObject().endObject().endObject());
|
||||
MapperService mapperService = createIndex("test").mapperService();
|
||||
DocumentMapper docMapper = mapperService.merge("type", new CompressedXContent(stage1Mapping),
|
||||
MapperService.MergeReason.MAPPING_UPDATE);
|
||||
String stage2Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("shape").field("type", "geo_shape")
|
||||
.field("orientation", "cw").endObject().endObject().endObject().endObject());
|
||||
mapperService.merge("type", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE);
|
||||
|
||||
// verify nothing changed
|
||||
Mapper fieldMapper = docMapper.mappers().getMapper("shape");
|
||||
assertThat(fieldMapper, instanceOf(GeoShapeWithDocValuesFieldMapper.class));
|
||||
|
||||
GeoShapeWithDocValuesFieldMapper geoShapeFieldMapper = (GeoShapeWithDocValuesFieldMapper) fieldMapper;
|
||||
assertThat(geoShapeFieldMapper.fieldType().orientation(), equalTo(ShapeBuilder.Orientation.CCW));
|
||||
|
||||
// change mapping; orientation
|
||||
stage2Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("shape").field("type", "geo_shape")
|
||||
.field("orientation", "cw").endObject().endObject().endObject().endObject());
|
||||
docMapper = mapperService.merge("type", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE);
|
||||
|
||||
fieldMapper = docMapper.mappers().getMapper("shape");
|
||||
assertThat(fieldMapper, instanceOf(GeoShapeWithDocValuesFieldMapper.class));
|
||||
|
||||
geoShapeFieldMapper = (GeoShapeWithDocValuesFieldMapper) fieldMapper;
|
||||
assertThat(geoShapeFieldMapper.fieldType().orientation(), equalTo(ShapeBuilder.Orientation.CW));
|
||||
}
|
||||
|
||||
public void testEmptyName() throws Exception {
|
||||
// after 5.x
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("")
|
||||
.field("type", "geo_shape")
|
||||
.endObject().endObject()
|
||||
.endObject().endObject());
|
||||
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
||||
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> parser.parse("type1", new CompressedXContent(mapping))
|
||||
);
|
||||
assertThat(e.getMessage(), containsString("name cannot be empty string"));
|
||||
}
|
||||
|
||||
public void testSerializeDefaults() throws Exception {
|
||||
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
||||
{
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("location")
|
||||
.field("type", "geo_shape")
|
||||
.endObject().endObject()
|
||||
.endObject().endObject());
|
||||
DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping));
|
||||
String serialized = toXContentString((GeoShapeWithDocValuesFieldMapper) defaultMapper.mappers().getMapper("location"));
|
||||
assertTrue(serialized, serialized.contains("\"orientation\":\"" +
|
||||
AbstractGeometryFieldMapper.Defaults.ORIENTATION.value() + "\""));
|
||||
assertTrue(serialized, serialized.contains("\"doc_values\":true"));
|
||||
}
|
||||
}
|
||||
|
||||
public void testSerializeDocValues() throws IOException {
|
||||
boolean docValues = randomBoolean();
|
||||
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("location")
|
||||
.field("type", "geo_shape")
|
||||
.field("doc_values", docValues)
|
||||
.endObject().endObject()
|
||||
.endObject().endObject());
|
||||
DocumentMapper mapper = parser.parse("type1", new CompressedXContent(mapping));
|
||||
String serialized = toXContentString((GeoShapeWithDocValuesFieldMapper) mapper.mappers().getMapper("location"));
|
||||
assertTrue(serialized, serialized.contains("\"orientation\":\"" +
|
||||
AbstractGeometryFieldMapper.Defaults.ORIENTATION.value() + "\""));
|
||||
assertTrue(serialized, serialized.contains("\"doc_values\":" + docValues));
|
||||
}
|
||||
|
||||
public String toXContentString(GeoShapeWithDocValuesFieldMapper mapper, boolean includeDefaults) throws IOException {
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder().startObject();
|
||||
ToXContent.Params params;
|
||||
if (includeDefaults) {
|
||||
params = new ToXContent.MapParams(Collections.singletonMap("include_defaults", "true"));
|
||||
} else {
|
||||
params = ToXContent.EMPTY_PARAMS;
|
||||
}
|
||||
mapper.doXContentBody(builder, includeDefaults, params);
|
||||
return Strings.toString(builder.endObject());
|
||||
}
|
||||
|
||||
public String toXContentString(GeoShapeWithDocValuesFieldMapper mapper) throws IOException {
|
||||
return toXContentString(mapper, true);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,30 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.spatial.index.mapper;
|
||||
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
||||
import org.elasticsearch.index.mapper.FieldTypeTestCase;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.junit.Before;
|
||||
|
||||
public class GeoShapeWithDocValuesFieldTypeTests extends FieldTypeTestCase {
|
||||
|
||||
@Override
|
||||
protected MappedFieldType createDefaultFieldType() {
|
||||
return new GeoShapeWithDocValuesFieldMapper.GeoShapeWithDocValuesFieldType();
|
||||
}
|
||||
|
||||
@Before
|
||||
public void setupProperties() {
|
||||
addModifier(new FieldTypeTestCase.Modifier("orientation", true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
((GeoShapeWithDocValuesFieldMapper.GeoShapeWithDocValuesFieldType)ft).setOrientation(ShapeBuilder.Orientation.LEFT);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
|
@ -0,0 +1,35 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.spatial.index.mapper;
|
||||
|
||||
/**
|
||||
* {@link CoordinateEncoder} used for tests that is an identity-encoder-decoder
|
||||
*/
|
||||
public class TestCoordinateEncoder implements CoordinateEncoder {
|
||||
|
||||
public static final TestCoordinateEncoder INSTANCE = new TestCoordinateEncoder();
|
||||
|
||||
@Override
|
||||
public int encodeX(double x) {
|
||||
return (int) x;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int encodeY(double y) {
|
||||
return (int) y;
|
||||
}
|
||||
|
||||
@Override
|
||||
public double decodeX(int x) {
|
||||
return x;
|
||||
}
|
||||
|
||||
@Override
|
||||
public double decodeY(int y) {
|
||||
return y;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,419 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.spatial.index.mapper;
|
||||
|
||||
import org.elasticsearch.common.collect.List;
|
||||
import org.elasticsearch.common.CheckedBiFunction;
|
||||
import org.elasticsearch.geo.GeometryTestUtils;
|
||||
import org.elasticsearch.geometry.Circle;
|
||||
import org.elasticsearch.geometry.Geometry;
|
||||
import org.elasticsearch.geometry.GeometryCollection;
|
||||
import org.elasticsearch.geometry.GeometryVisitor;
|
||||
import org.elasticsearch.geometry.Line;
|
||||
import org.elasticsearch.geometry.LinearRing;
|
||||
import org.elasticsearch.geometry.MultiLine;
|
||||
import org.elasticsearch.geometry.MultiPoint;
|
||||
import org.elasticsearch.geometry.MultiPolygon;
|
||||
import org.elasticsearch.geometry.Point;
|
||||
import org.elasticsearch.geometry.Polygon;
|
||||
import org.elasticsearch.geometry.Rectangle;
|
||||
import org.elasticsearch.geometry.ShapeType;
|
||||
import org.elasticsearch.index.mapper.GeoShapeIndexer;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.function.Function;
|
||||
|
||||
import static org.elasticsearch.xpack.spatial.util.GeoTestUtils.assertRelation;
|
||||
import static org.elasticsearch.xpack.spatial.util.GeoTestUtils.triangleTreeReader;
|
||||
import static org.elasticsearch.geo.GeometryTestUtils.randomLine;
|
||||
import static org.elasticsearch.geo.GeometryTestUtils.randomMultiLine;
|
||||
import static org.elasticsearch.geo.GeometryTestUtils.randomMultiPoint;
|
||||
import static org.elasticsearch.geo.GeometryTestUtils.randomMultiPolygon;
|
||||
import static org.elasticsearch.geo.GeometryTestUtils.randomPoint;
|
||||
import static org.elasticsearch.geo.GeometryTestUtils.randomPolygon;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class TriangleTreeTests extends ESTestCase {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testDimensionalShapeType() throws IOException {
|
||||
GeoShapeIndexer indexer = new GeoShapeIndexer(true, "test");
|
||||
assertDimensionalShapeType(randomPoint(false), DimensionalShapeType.POINT);
|
||||
assertDimensionalShapeType(randomMultiPoint(false), DimensionalShapeType.POINT);
|
||||
assertDimensionalShapeType(randomLine(false), DimensionalShapeType.LINE);
|
||||
assertDimensionalShapeType(randomMultiLine(false), DimensionalShapeType.LINE);
|
||||
Geometry randoPoly = indexer.prepareForIndexing(randomValueOtherThanMany(g -> {
|
||||
try {
|
||||
Geometry newGeo = indexer.prepareForIndexing(g);
|
||||
return newGeo.type() != ShapeType.POLYGON;
|
||||
} catch (Exception e) {
|
||||
return true;
|
||||
}
|
||||
}, () -> randomPolygon(false)));
|
||||
Geometry randoMultiPoly = indexer.prepareForIndexing(randomValueOtherThanMany(g -> {
|
||||
try {
|
||||
Geometry newGeo = indexer.prepareForIndexing(g);
|
||||
return newGeo.type() != ShapeType.MULTIPOLYGON;
|
||||
} catch (Exception e) {
|
||||
return true;
|
||||
}
|
||||
}, () -> randomMultiPolygon(false)));
|
||||
assertDimensionalShapeType(randoPoly, DimensionalShapeType.POLYGON);
|
||||
assertDimensionalShapeType(randoMultiPoly, DimensionalShapeType.POLYGON);
|
||||
assertDimensionalShapeType(randomFrom(
|
||||
new GeometryCollection<>(List.of(randomPoint(false))),
|
||||
new GeometryCollection<>(List.of(randomMultiPoint(false))),
|
||||
new GeometryCollection<>(Collections.singletonList(
|
||||
new GeometryCollection<>(List.of(randomPoint(false), randomMultiPoint(false))))))
|
||||
, DimensionalShapeType.POINT);
|
||||
assertDimensionalShapeType(randomFrom(
|
||||
new GeometryCollection<>(List.of(randomPoint(false), randomLine(false))),
|
||||
new GeometryCollection<>(List.of(randomMultiPoint(false), randomMultiLine(false))),
|
||||
new GeometryCollection<>(Collections.singletonList(
|
||||
new GeometryCollection<>(List.of(randomPoint(false), randomLine(false))))))
|
||||
, DimensionalShapeType.LINE);
|
||||
assertDimensionalShapeType(randomFrom(
|
||||
new GeometryCollection<>(List.of(randomPoint(false), indexer.prepareForIndexing(randomLine(false)), randoPoly)),
|
||||
new GeometryCollection<>(List.of(randomMultiPoint(false), randoMultiPoly)),
|
||||
new GeometryCollection<>(Collections.singletonList(
|
||||
new GeometryCollection<>(List.of(indexer.prepareForIndexing(randomLine(false)),
|
||||
indexer.prepareForIndexing(randoPoly))))))
|
||||
, DimensionalShapeType.POLYGON);
|
||||
}
|
||||
|
||||
|
||||
public void testRectangleShape() throws IOException {
|
||||
for (int i = 0; i < 1000; i++) {
|
||||
int minX = randomIntBetween(-40, -1);
|
||||
int maxX = randomIntBetween(1, 40);
|
||||
int minY = randomIntBetween(-40, -1);
|
||||
int maxY = randomIntBetween(1, 40);
|
||||
Geometry rectangle = new Rectangle(minX, maxX, maxY, minY);
|
||||
TriangleTreeReader reader = triangleTreeReader(rectangle, GeoShapeCoordinateEncoder.INSTANCE);
|
||||
|
||||
Extent expectedExtent = getExtentFromBox(minX, minY, maxX, maxY);
|
||||
assertThat(expectedExtent, equalTo(reader.getExtent()));
|
||||
// centroid is calculated using original double values but then loses precision as it is serialized as an integer
|
||||
int encodedCentroidX = GeoShapeCoordinateEncoder.INSTANCE.encodeX(((double) minX + maxX) / 2);
|
||||
int encodedCentroidY = GeoShapeCoordinateEncoder.INSTANCE.encodeY(((double) minY + maxY) / 2);
|
||||
assertEquals(GeoShapeCoordinateEncoder.INSTANCE.decodeX(encodedCentroidX), reader.getCentroidX(), 0.0000001);
|
||||
assertEquals(GeoShapeCoordinateEncoder.INSTANCE.decodeY(encodedCentroidY), reader.getCentroidY(), 0.0000001);
|
||||
|
||||
// box-query touches bottom-left corner
|
||||
assertRelation(GeoRelation.QUERY_DISJOINT, reader, getExtentFromBox(minX - randomIntBetween(1, 180 + minX),
|
||||
minY - randomIntBetween(1, 90 + minY), minX, minY));
|
||||
// box-query touches bottom-right corner
|
||||
assertRelation(GeoRelation.QUERY_DISJOINT, reader, getExtentFromBox(maxX, minY - randomIntBetween(1, 90 + minY),
|
||||
maxX + randomIntBetween(1, 180 - maxX), minY));
|
||||
// box-query touches top-right corner
|
||||
assertRelation(GeoRelation.QUERY_DISJOINT, reader, getExtentFromBox(maxX, maxY, maxX + randomIntBetween(1, 180 - maxX),
|
||||
maxY + randomIntBetween(1, 90 - maxY)));
|
||||
// box-query touches top-left corner
|
||||
assertRelation(GeoRelation.QUERY_DISJOINT, reader, getExtentFromBox(minX - randomIntBetween(1, 180 + minX), maxY, minX,
|
||||
maxY + randomIntBetween(1, 90 - maxY)));
|
||||
|
||||
// box-query fully-enclosed inside rectangle
|
||||
assertRelation(GeoRelation.QUERY_INSIDE, reader, getExtentFromBox(3 * (minX + maxX) / 4, 3 * (minY + maxY) / 4,
|
||||
3 * (maxX + minX) / 4, 3 * (maxY + minY) / 4));
|
||||
// box-query fully-contains poly
|
||||
assertRelation(GeoRelation.QUERY_CROSSES, reader, getExtentFromBox(minX - randomIntBetween(1, 180 + minX),
|
||||
minY - randomIntBetween(1, 90 + minY), maxX + randomIntBetween(1, 180 - maxX),
|
||||
maxY + randomIntBetween(1, 90 - maxY)));
|
||||
// box-query half-in-half-out-right
|
||||
assertRelation(GeoRelation.QUERY_CROSSES, reader, getExtentFromBox(3 * (minX + maxX) / 4, 3 * (minY + maxY) / 4,
|
||||
maxX + randomIntBetween(1, 90 - maxY), 3 * (maxY + minY) / 4));
|
||||
// box-query half-in-half-out-left
|
||||
assertRelation(GeoRelation.QUERY_CROSSES, reader, getExtentFromBox(minX - randomIntBetween(1, 180 + minX),
|
||||
3 * (minY + maxY) / 4, 3 * (maxX + minX) / 4, 3 * (maxY + minY) / 4));
|
||||
// box-query half-in-half-out-top
|
||||
assertRelation(GeoRelation.QUERY_CROSSES, reader, getExtentFromBox(3 * (minX + maxX) / 4, 3 * (minY + maxY) / 4,
|
||||
maxX + randomIntBetween(1, 180 - maxX), maxY + randomIntBetween(1, 90 - maxY)));
|
||||
// box-query half-in-half-out-bottom
|
||||
assertRelation(GeoRelation.QUERY_CROSSES, reader, getExtentFromBox(3 * (minX + maxX) / 4,
|
||||
minY - randomIntBetween(1, 90 + minY), maxX + randomIntBetween(1, 180 - maxX),
|
||||
3 * (maxY + minY) / 4));
|
||||
|
||||
// box-query outside to the right
|
||||
assertRelation(GeoRelation.QUERY_DISJOINT, reader, getExtentFromBox(maxX + randomIntBetween(1, 180 - maxX), minY,
|
||||
maxX + randomIntBetween(1, 180 - maxX), maxY));
|
||||
// box-query outside to the left
|
||||
assertRelation(GeoRelation.QUERY_DISJOINT, reader, getExtentFromBox(maxX - randomIntBetween(1, 180 - maxX), minY,
|
||||
minX - randomIntBetween(1, 180 + minX), maxY));
|
||||
// box-query outside to the top
|
||||
assertRelation(GeoRelation.QUERY_DISJOINT, reader, getExtentFromBox(minX, maxY + randomIntBetween(1, 90 - maxY), maxX,
|
||||
maxY + randomIntBetween(1, 90 - maxY)));
|
||||
// box-query outside to the bottom
|
||||
assertRelation(GeoRelation.QUERY_DISJOINT, reader, getExtentFromBox(minX, minY - randomIntBetween(1, 90 + minY), maxX,
|
||||
minY - randomIntBetween(1, 90 + minY)));
|
||||
}
|
||||
}
|
||||
|
||||
public void testPacManPolygon() throws Exception {
|
||||
// pacman
|
||||
double[] px = {0, 10, 10, 0, -8, -10, -8, 0, 10, 10, 0};
|
||||
double[] py = {0, -5, -9, -10, -9, 0, 9, 10, 9, 5, 0};
|
||||
|
||||
// test cell crossing poly
|
||||
Polygon pacMan = new Polygon(new LinearRing(py, px), Collections.emptyList());
|
||||
TriangleTreeReader reader = triangleTreeReader(pacMan, TestCoordinateEncoder.INSTANCE);
|
||||
assertRelation(GeoRelation.QUERY_CROSSES, reader, getExtentFromBox(2, -1, 11, 1));
|
||||
assertRelation(GeoRelation.QUERY_CROSSES, reader, getExtentFromBox(-12, -12, 12, 12));
|
||||
assertRelation(GeoRelation.QUERY_CROSSES, reader, getExtentFromBox(-2, -1, 2, 0));
|
||||
assertRelation(GeoRelation.QUERY_INSIDE, reader, getExtentFromBox(-5, -6, 2, -2));
|
||||
}
|
||||
|
||||
// adapted from org.apache.lucene.geo.TestPolygon2D#testMultiPolygon
|
||||
public void testPolygonWithHole() throws Exception {
|
||||
Polygon polyWithHole = new Polygon(new LinearRing(new double[]{-50, 50, 50, -50, -50}, new double[]{-50, -50, 50, 50, -50}),
|
||||
Collections.singletonList(new LinearRing(new double[]{-10, 10, 10, -10, -10}, new double[]{-10, -10, 10, 10, -10})));
|
||||
|
||||
TriangleTreeReader reader = triangleTreeReader(polyWithHole, GeoShapeCoordinateEncoder.INSTANCE);
|
||||
|
||||
assertRelation(GeoRelation.QUERY_DISJOINT, reader, getExtentFromBox(6, -6, 6, -6)); // in the hole
|
||||
assertRelation(GeoRelation.QUERY_INSIDE, reader, getExtentFromBox(25, -25, 25, -25)); // on the mainland
|
||||
assertRelation(GeoRelation.QUERY_DISJOINT, reader, getExtentFromBox(51, 51, 52, 52)); // outside of mainland
|
||||
assertRelation(GeoRelation.QUERY_CROSSES, reader, getExtentFromBox(-60, -60, 60, 60)); // enclosing us completely
|
||||
assertRelation(GeoRelation.QUERY_CROSSES, reader, getExtentFromBox(49, 49, 51, 51)); // overlapping the mainland
|
||||
assertRelation(GeoRelation.QUERY_CROSSES, reader, getExtentFromBox(9, 9, 11, 11)); // overlapping the hole
|
||||
}
|
||||
|
||||
public void testCombPolygon() throws Exception {
|
||||
double[] px = {0, 10, 10, 20, 20, 30, 30, 40, 40, 50, 50, 0, 0};
|
||||
double[] py = {0, 0, 20, 20, 0, 0, 20, 20, 0, 0, 30, 30, 0};
|
||||
|
||||
double[] hx = {21, 21, 29, 29, 21};
|
||||
double[] hy = {1, 20, 20, 1, 1};
|
||||
|
||||
Polygon polyWithHole = new Polygon(new LinearRing(px, py), Collections.singletonList(new LinearRing(hx, hy)));
|
||||
TriangleTreeReader reader = triangleTreeReader(polyWithHole, GeoShapeCoordinateEncoder.INSTANCE);
|
||||
// test cell crossing poly
|
||||
assertRelation(GeoRelation.QUERY_INSIDE, reader, getExtentFromBox(5, 10, 5, 10));
|
||||
assertRelation(GeoRelation.QUERY_DISJOINT, reader, getExtentFromBox(15, 10, 15, 10));
|
||||
assertRelation(GeoRelation.QUERY_DISJOINT, reader, getExtentFromBox(25, 10, 25, 10));
|
||||
}
|
||||
|
||||
public void testPacManClosedLineString() throws Exception {
|
||||
// pacman
|
||||
double[] px = {0, 10, 10, 0, -8, -10, -8, 0, 10, 10, 0};
|
||||
double[] py = {0, 5, 9, 10, 9, 0, -9, -10, -9, -5, 0};
|
||||
|
||||
// test cell crossing poly
|
||||
TriangleTreeReader reader = triangleTreeReader(new Line(px, py), GeoShapeCoordinateEncoder.INSTANCE);
|
||||
assertRelation(GeoRelation.QUERY_CROSSES, reader, getExtentFromBox(2, -1, 11, 1));
|
||||
assertRelation(GeoRelation.QUERY_CROSSES, reader, getExtentFromBox(-12, -12, 12, 12));
|
||||
assertRelation(GeoRelation.QUERY_CROSSES, reader, getExtentFromBox(-2, -1, 2, 0));
|
||||
assertRelation(GeoRelation.QUERY_DISJOINT, reader, getExtentFromBox(-5, -6, 2, -2));
|
||||
}
|
||||
|
||||
public void testPacManLineString() throws Exception {
|
||||
// pacman
|
||||
double[] px = {0, 10, 10, 0, -8, -10, -8, 0, 10, 10};
|
||||
double[] py = {0, 5, 9, 10, 9, 0, -9, -10, -9, -5};
|
||||
|
||||
// test cell crossing poly
|
||||
TriangleTreeReader reader = triangleTreeReader(new Line(px, py), GeoShapeCoordinateEncoder.INSTANCE);
|
||||
assertRelation(GeoRelation.QUERY_CROSSES, reader, getExtentFromBox(2, -1, 11, 1));
|
||||
assertRelation(GeoRelation.QUERY_CROSSES, reader, getExtentFromBox(-12, -12, 12, 12));
|
||||
assertRelation(GeoRelation.QUERY_CROSSES, reader, getExtentFromBox(-2, -1, 2, 0));
|
||||
assertRelation(GeoRelation.QUERY_DISJOINT, reader, getExtentFromBox(-5, -6, 2, -2));
|
||||
}
|
||||
|
||||
public void testPacManPoints() throws Exception {
|
||||
// pacman
|
||||
java.util.List<Point> points = Arrays.asList(
|
||||
new Point(0, 0),
|
||||
new Point(5, 10),
|
||||
new Point(9, 10),
|
||||
new Point(10, 0),
|
||||
new Point(9, -8),
|
||||
new Point(0, -10),
|
||||
new Point(-9, -8),
|
||||
new Point(-10, 0),
|
||||
new Point(-9, 10),
|
||||
new Point(-5, 10)
|
||||
);
|
||||
|
||||
|
||||
// candidate intersects cell
|
||||
int xMin = 0;
|
||||
int xMax = 11;
|
||||
int yMin = -10;
|
||||
int yMax = 9;
|
||||
|
||||
// test cell crossing poly
|
||||
TriangleTreeReader reader = triangleTreeReader(new MultiPoint(points), GeoShapeCoordinateEncoder.INSTANCE);
|
||||
assertRelation(GeoRelation.QUERY_CROSSES, reader, getExtentFromBox(xMin, yMin, xMax, yMax));
|
||||
}
|
||||
|
||||
public void testRandomMultiLineIntersections() throws IOException {
|
||||
GeoShapeIndexer indexer = new GeoShapeIndexer(true, "test");
|
||||
MultiLine geometry = randomMultiLine(false);
|
||||
geometry = (MultiLine) indexer.prepareForIndexing(geometry);
|
||||
TriangleTreeReader reader = triangleTreeReader(geometry, GeoShapeCoordinateEncoder.INSTANCE);
|
||||
Extent readerExtent = reader.getExtent();
|
||||
|
||||
for (Line line : geometry) {
|
||||
Extent lineExtent = triangleTreeReader(line, GeoShapeCoordinateEncoder.INSTANCE).getExtent();
|
||||
if (lineExtent.minX() != Integer.MIN_VALUE && lineExtent.maxX() != Integer.MAX_VALUE
|
||||
&& lineExtent.minY() != Integer.MIN_VALUE && lineExtent.maxY() != Integer.MAX_VALUE) {
|
||||
assertRelation(GeoRelation.QUERY_CROSSES, reader, Extent.fromPoints(lineExtent.minX() - 1, lineExtent.minY() - 1,
|
||||
lineExtent.maxX() + 1, lineExtent.maxY() + 1));
|
||||
}
|
||||
}
|
||||
|
||||
// extent that fully encloses the MultiLine
|
||||
assertRelation(GeoRelation.QUERY_CROSSES, reader, reader.getExtent());
|
||||
if (readerExtent.minX() != Integer.MIN_VALUE && readerExtent.maxX() != Integer.MAX_VALUE
|
||||
&& readerExtent.minY() != Integer.MIN_VALUE && readerExtent.maxY() != Integer.MAX_VALUE) {
|
||||
assertRelation(GeoRelation.QUERY_CROSSES, reader, Extent.fromPoints(readerExtent.minX() - 1, readerExtent.minY() - 1,
|
||||
readerExtent.maxX() + 1, readerExtent.maxY() + 1));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public void testRandomPolygonIntersection() throws IOException {
|
||||
int testPointCount = randomIntBetween(50, 100);
|
||||
Point[] testPoints = new Point[testPointCount];
|
||||
double extentSize = randomDoubleBetween(1, 10, true);
|
||||
boolean[] intersects = new boolean[testPointCount];
|
||||
for (int i = 0; i < testPoints.length; i++) {
|
||||
testPoints[i] = randomPoint(false);
|
||||
}
|
||||
|
||||
Geometry geometry = randomMultiPolygon(false);
|
||||
GeoShapeIndexer indexer = new GeoShapeIndexer(true, "test");
|
||||
Geometry preparedGeometry = indexer.prepareForIndexing(geometry);
|
||||
|
||||
for (int i = 0; i < testPointCount; i++) {
|
||||
int cur = i;
|
||||
intersects[cur] = fold(preparedGeometry, false, (g, s) -> s || intersects(g, testPoints[cur], extentSize));
|
||||
}
|
||||
|
||||
for (int i = 0; i < testPointCount; i++) {
|
||||
assertEquals(intersects[i], intersects(preparedGeometry, testPoints[i], extentSize));
|
||||
}
|
||||
}
|
||||
|
||||
private Extent bufferedExtentFromGeoPoint(double x, double y, double extentSize) {
|
||||
int xMin = GeoShapeCoordinateEncoder.INSTANCE.encodeX(Math.max(x - extentSize, -180.0));
|
||||
int xMax = GeoShapeCoordinateEncoder.INSTANCE.encodeX(Math.min(x + extentSize, 180.0));
|
||||
int yMin = GeoShapeCoordinateEncoder.INSTANCE.encodeY(Math.max(y - extentSize, -90));
|
||||
int yMax = GeoShapeCoordinateEncoder.INSTANCE.encodeY(Math.min(y + extentSize, 90));
|
||||
return Extent.fromPoints(xMin, yMin, xMax, yMax);
|
||||
}
|
||||
|
||||
private static Extent getExtentFromBox(double bottomLeftX, double bottomLeftY, double topRightX, double topRightY) {
|
||||
return Extent.fromPoints(GeoShapeCoordinateEncoder.INSTANCE.encodeX(bottomLeftX),
|
||||
GeoShapeCoordinateEncoder.INSTANCE.encodeY(bottomLeftY),
|
||||
GeoShapeCoordinateEncoder.INSTANCE.encodeX(topRightX),
|
||||
GeoShapeCoordinateEncoder.INSTANCE.encodeY(topRightY));
|
||||
|
||||
}
|
||||
|
||||
private boolean intersects(Geometry g, Point p, double extentSize) throws IOException {
|
||||
|
||||
Extent bufferBounds = bufferedExtentFromGeoPoint(p.getX(), p.getY(), extentSize);
|
||||
GeoRelation relation = triangleTreeReader(g, GeoShapeCoordinateEncoder.INSTANCE)
|
||||
.relateTile(bufferBounds.minX(), bufferBounds.minY(), bufferBounds.maxX(), bufferBounds.maxY());
|
||||
return relation == GeoRelation.QUERY_CROSSES || relation == GeoRelation.QUERY_INSIDE;
|
||||
}
|
||||
|
||||
private static Geometry randomGeometryTreeGeometry() {
|
||||
return randomGeometryTreeGeometry(0);
|
||||
}
|
||||
|
||||
private static Geometry randomGeometryTreeGeometry(int level) {
|
||||
@SuppressWarnings("unchecked") Function<Boolean, Geometry> geometry = ESTestCase.randomFrom(
|
||||
GeometryTestUtils::randomLine,
|
||||
GeometryTestUtils::randomPoint,
|
||||
GeometryTestUtils::randomPolygon,
|
||||
GeometryTestUtils::randomMultiLine,
|
||||
GeometryTestUtils::randomMultiPoint,
|
||||
level < 3 ? (b) -> randomGeometryTreeCollection(level + 1) : GeometryTestUtils::randomPoint // don't build too deep
|
||||
);
|
||||
return geometry.apply(false);
|
||||
}
|
||||
|
||||
private static Geometry randomGeometryTreeCollection(int level) {
|
||||
int size = ESTestCase.randomIntBetween(1, 10);
|
||||
java.util.List<Geometry> shapes = new ArrayList<>();
|
||||
for (int i = 0; i < size; i++) {
|
||||
shapes.add(randomGeometryTreeGeometry(level));
|
||||
}
|
||||
return new GeometryCollection<>(shapes);
|
||||
}
|
||||
|
||||
private static void assertDimensionalShapeType(Geometry geometry, DimensionalShapeType expected) throws IOException {
|
||||
TriangleTreeReader reader = triangleTreeReader(geometry, GeoShapeCoordinateEncoder.INSTANCE);
|
||||
assertThat(reader.getDimensionalShapeType(), equalTo(expected));
|
||||
}
|
||||
|
||||
/**
|
||||
* Preforms left fold operation on all primitive geometries (points, lines polygons, circles and rectangles).
|
||||
* All collection geometries are iterated depth first.
|
||||
*/
|
||||
public static <R, E extends Exception> R fold(Geometry geometry, R state, CheckedBiFunction<Geometry, R, R, E> operation) throws E {
|
||||
return geometry.visit(new GeometryVisitor<R, E>() {
|
||||
@Override
|
||||
public R visit(Circle circle) throws E {
|
||||
return operation.apply(geometry, state);
|
||||
}
|
||||
|
||||
@Override
|
||||
public R visit(GeometryCollection<?> collection) throws E {
|
||||
R ret = state;
|
||||
for (Geometry g : collection) {
|
||||
ret = fold(g, ret, operation);
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
@Override
|
||||
public R visit(Line line) throws E {
|
||||
return operation.apply(line, state);
|
||||
}
|
||||
|
||||
@Override
|
||||
public R visit(LinearRing ring) throws E {
|
||||
return operation.apply(ring, state);
|
||||
}
|
||||
|
||||
@Override
|
||||
public R visit(MultiLine multiLine) throws E {
|
||||
return visit((GeometryCollection<?>) multiLine);
|
||||
}
|
||||
|
||||
@Override
|
||||
public R visit(MultiPoint multiPoint) throws E {
|
||||
return visit((GeometryCollection<?>) multiPoint); }
|
||||
|
||||
@Override
|
||||
public R visit(MultiPolygon multiPolygon) throws E {
|
||||
return visit((GeometryCollection<?>) multiPolygon);
|
||||
}
|
||||
|
||||
@Override
|
||||
public R visit(Point point) throws E {
|
||||
return operation.apply(point, state);
|
||||
}
|
||||
|
||||
@Override
|
||||
public R visit(Polygon polygon) throws E {
|
||||
return operation.apply(polygon, state);
|
||||
}
|
||||
|
||||
@Override
|
||||
public R visit(Rectangle rectangle) throws E {
|
||||
return operation.apply(rectangle, state);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
|
@ -21,7 +21,6 @@ import org.elasticsearch.common.xcontent.ToXContentObject;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.geo.GeoPlugin;
|
||||
import org.elasticsearch.geometry.Geometry;
|
||||
import org.elasticsearch.geometry.ShapeType;
|
||||
import org.elasticsearch.index.get.GetResult;
|
||||
|
@ -65,7 +64,7 @@ public abstract class ShapeQueryBuilderTests extends AbstractQueryTestCase<Shape
|
|||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> getPlugins() {
|
||||
return Arrays.asList(SpatialPlugin.class, GeoPlugin.class);
|
||||
return Arrays.asList(SpatialPlugin.class);
|
||||
}
|
||||
|
||||
protected String fieldName() {
|
||||
|
|
|
@ -27,7 +27,7 @@ import org.elasticsearch.geometry.Point;
|
|||
import org.elasticsearch.geometry.Polygon;
|
||||
import org.elasticsearch.geometry.utils.StandardValidator;
|
||||
import org.elasticsearch.geometry.utils.WellKnownText;
|
||||
import org.elasticsearch.index.mapper.GeoShapeFieldMapper;
|
||||
import org.elasticsearch.xpack.spatial.index.mapper.GeoShapeWithDocValuesFieldMapper;
|
||||
import org.elasticsearch.index.mapper.GeoShapeIndexer;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
|
@ -213,7 +213,7 @@ public class CircleProcessorTests extends ESTestCase {
|
|||
int numSides = randomIntBetween(4, 1000);
|
||||
Geometry geometry = SpatialUtils.createRegularGeoShapePolygon(circle, numSides);
|
||||
|
||||
MappedFieldType shapeType = new GeoShapeFieldMapper.GeoShapeFieldType();
|
||||
MappedFieldType shapeType = new GeoShapeWithDocValuesFieldMapper.GeoShapeWithDocValuesFieldType();
|
||||
shapeType.setHasDocValues(false);
|
||||
shapeType.setName(fieldName);
|
||||
|
||||
|
|
|
@ -0,0 +1,95 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.spatial.util;
|
||||
|
||||
import org.apache.lucene.document.ShapeField;
|
||||
import org.apache.lucene.geo.GeoEncodingUtils;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.apache.lucene.store.ByteBuffersDataOutput;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.geo.GeoJson;
|
||||
import org.elasticsearch.common.geo.GeometryParser;
|
||||
import org.elasticsearch.common.xcontent.DeprecationHandler;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.geometry.Geometry;
|
||||
import org.elasticsearch.index.mapper.GeoShapeIndexer;
|
||||
import org.elasticsearch.xpack.spatial.index.mapper.CentroidCalculator;
|
||||
import org.elasticsearch.xpack.spatial.index.mapper.CoordinateEncoder;
|
||||
import org.elasticsearch.xpack.spatial.index.mapper.Extent;
|
||||
import org.elasticsearch.xpack.spatial.index.mapper.GeoRelation;
|
||||
import org.elasticsearch.xpack.spatial.index.mapper.TriangleTreeReader;
|
||||
import org.elasticsearch.xpack.spatial.index.mapper.TriangleTreeWriter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class GeoTestUtils {
|
||||
|
||||
public static void assertRelation(GeoRelation expectedRelation, TriangleTreeReader reader, Extent extent) throws IOException {
|
||||
GeoRelation actualRelation = reader.relateTile(extent.minX(), extent.minY(), extent.maxX(), extent.maxY());
|
||||
assertThat(actualRelation, equalTo(expectedRelation));
|
||||
}
|
||||
|
||||
public static ShapeField.DecodedTriangle[] toDecodedTriangles(Geometry geometry) throws IOException {
|
||||
GeoShapeIndexer indexer = new GeoShapeIndexer(true, "test");
|
||||
geometry = indexer.prepareForIndexing(geometry);
|
||||
List<IndexableField> fields = indexer.indexShape(null, geometry);
|
||||
ShapeField.DecodedTriangle[] triangles = new ShapeField.DecodedTriangle[fields.size()];
|
||||
final byte[] scratch = new byte[7 * Integer.BYTES];
|
||||
for (int i = 0; i < fields.size(); i++) {
|
||||
BytesRef bytesRef = fields.get(i).binaryValue();
|
||||
assert bytesRef.length == 7 * Integer.BYTES;
|
||||
System.arraycopy(bytesRef.bytes, bytesRef.offset, scratch, 0, 7 * Integer.BYTES);
|
||||
ShapeField.decodeTriangle(scratch, triangles[i] = new ShapeField.DecodedTriangle());
|
||||
}
|
||||
return triangles;
|
||||
}
|
||||
|
||||
public static TriangleTreeReader triangleTreeReader(Geometry geometry, CoordinateEncoder encoder) throws IOException {
|
||||
ShapeField.DecodedTriangle[] triangles = toDecodedTriangles(geometry);
|
||||
TriangleTreeWriter writer = new TriangleTreeWriter(Arrays.asList(triangles), encoder, new CentroidCalculator(geometry));
|
||||
ByteBuffersDataOutput output = new ByteBuffersDataOutput();
|
||||
writer.writeTo(output);
|
||||
TriangleTreeReader reader = new TriangleTreeReader(encoder);
|
||||
reader.reset(new BytesRef(output.toArrayCopy(), 0, Math.toIntExact(output.size())));
|
||||
return reader;
|
||||
}
|
||||
|
||||
public static double encodeDecodeLat(double lat) {
|
||||
return GeoEncodingUtils.decodeLatitude(GeoEncodingUtils.encodeLatitude(lat));
|
||||
}
|
||||
|
||||
public static double encodeDecodeLon(double lon) {
|
||||
return GeoEncodingUtils.decodeLongitude(GeoEncodingUtils.encodeLongitude(lon));
|
||||
}
|
||||
|
||||
public static String toGeoJsonString(Geometry geometry) throws IOException {
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
GeoJson.toXContent(geometry, builder, ToXContent.EMPTY_PARAMS);
|
||||
return XContentHelper.convertToJson(BytesReference.bytes(builder), true, false, XContentType.JSON);
|
||||
}
|
||||
|
||||
public static Geometry fromGeoJsonString(String geoJson) throws Exception {
|
||||
XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION,
|
||||
new BytesArray(geoJson), XContentType.JSON);
|
||||
parser.nextToken();
|
||||
Geometry geometry = new GeometryParser(true, true, true).parse(parser);
|
||||
return new GeoShapeIndexer(true, "indexer").prepareForIndexing(geometry);
|
||||
}
|
||||
}
|
|
@ -104,6 +104,9 @@ subprojects {
|
|||
testRuntime "org.jline:jline-style:${jlineVersion}"
|
||||
|
||||
testRuntime "org.elasticsearch:jna:${versions.jna}"
|
||||
|
||||
// spatial dependency
|
||||
testRuntime project(path: xpackModule('spatial'))
|
||||
}
|
||||
|
||||
if (project.name != 'security') {
|
||||
|
|
|
@ -275,7 +275,6 @@ public class SqlDataTypes {
|
|||
|| dataType == SCALED_FLOAT // because of scaling_factor
|
||||
|| dataType == CONSTANT_KEYWORD
|
||||
|| dataType == GEO_POINT
|
||||
|| dataType == GEO_SHAPE
|
||||
|| dataType == SHAPE;
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue