mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-17 10:25:15 +00:00
Compressed geo-point field data.
This commit allows to trade precision for memory when storing geo points. This new field data impl accepts a `precision` parameter that controls the maximum expected error for storing coordinates. This option can be updated on a live index with the PUT mapping API. Default precision is 1cm, which requires 8 bytes per geo-point (50% memory saving compared to using 2 doubles). Close #4386
This commit is contained in:
parent
684affa5c7
commit
33599d9a34
@ -156,6 +156,47 @@ is `true`)
|
||||
|`normalize_lon` |Set to `true` to normalize longitude
|
||||
|=======================================================================
|
||||
|
||||
[float]
|
||||
==== Field data
|
||||
|
||||
By default, geo points use the `array` format which loads geo points into two
|
||||
parallel double arrays, making sure there is no precision loss. However, this
|
||||
can require a non-negligible amount of memory (16 bytes per document) which is
|
||||
why Elasticsearch also provides a field data implementation with lossy
|
||||
compression called `compressed`:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"pin" : {
|
||||
"properties" : {
|
||||
"location" : {
|
||||
"type" : "geo_point",
|
||||
"fielddata" : {
|
||||
"format" : "compressed",
|
||||
"precision" : "1cm"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
This field data format comes with a `precision` option which allows to
|
||||
configure how much precision can be traded for memory. The default value is
|
||||
`1cm`. The following table presents values of the memory savings given various
|
||||
precisions:
|
||||
|
||||
|=============================================
|
||||
| Precision | Bytes per point | Size reduction
|
||||
| 1km | 4 | 75%
|
||||
| 3m | 6 | 62.5%
|
||||
| 1cm | 8 | 50%
|
||||
| 1mm | 10 | 37.5%
|
||||
|=============================================
|
||||
|
||||
Precision can be changed on a live index by using the update mapping API.
|
||||
|
||||
[float]
|
||||
==== Usage in Scripts
|
||||
|
||||
|
@ -248,7 +248,7 @@ public enum DistanceUnit {
|
||||
public final double value;
|
||||
public final DistanceUnit unit;
|
||||
|
||||
private Distance(double value, DistanceUnit unit) {
|
||||
public Distance(double value, DistanceUnit unit) {
|
||||
super();
|
||||
this.value = value;
|
||||
this.unit = unit;
|
||||
|
@ -170,7 +170,7 @@ public interface IndexFieldData<FD extends AtomicFieldData> extends IndexCompone
|
||||
|
||||
interface Builder {
|
||||
|
||||
IndexFieldData build(Index index, @IndexSettings Settings indexSettings, FieldMapper.Names fieldNames, FieldDataType type, IndexFieldDataCache cache);
|
||||
IndexFieldData build(Index index, @IndexSettings Settings indexSettings, FieldMapper<?> mapper, IndexFieldDataCache cache);
|
||||
}
|
||||
|
||||
public interface WithOrdinals<FD extends AtomicFieldData.WithOrdinals> extends IndexFieldData<FD> {
|
||||
|
@ -49,6 +49,7 @@ public class IndexFieldDataService extends AbstractIndexComponent {
|
||||
private static final String ARRAY_FORMAT = "array";
|
||||
private static final String PAGED_BYTES_FORMAT = "paged_bytes";
|
||||
private static final String FST_FORMAT = "fst";
|
||||
private static final String COMPRESSED_FORMAT = "compressed";
|
||||
|
||||
private final static ImmutableMap<String, IndexFieldData.Builder> buildersByType;
|
||||
private final static ImmutableMap<String, IndexFieldData.Builder> docValuesBuildersByType;
|
||||
@ -108,6 +109,8 @@ public class IndexFieldDataService extends AbstractIndexComponent {
|
||||
|
||||
.put(Tuple.tuple("geo_point", ARRAY_FORMAT), new GeoPointDoubleArrayIndexFieldData.Builder())
|
||||
.put(Tuple.tuple("geo_point", DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
|
||||
.put(Tuple.tuple("geo_point", COMPRESSED_FORMAT), new GeoPointCompressedIndexFieldData.Builder())
|
||||
|
||||
.immutableMap();
|
||||
}
|
||||
|
||||
@ -226,7 +229,7 @@ public class IndexFieldDataService extends AbstractIndexComponent {
|
||||
fieldDataCaches.put(fieldNames.indexName(), cache);
|
||||
}
|
||||
|
||||
fieldData = builder.build(index, indexSettings, fieldNames, type, cache);
|
||||
fieldData = builder.build(index, indexSettings, mapper, cache);
|
||||
loadedFieldData.put(fieldNames.indexName(), fieldData);
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,145 @@
|
||||
/*
|
||||
* Licensed to ElasticSearch and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. ElasticSearch licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.fielddata.plain;
|
||||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefIterator;
|
||||
import org.apache.lucene.util.CharsRef;
|
||||
import org.apache.lucene.util.UnicodeUtil;
|
||||
import org.elasticsearch.ElasticSearchIllegalArgumentException;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.fielddata.*;
|
||||
import org.elasticsearch.index.fielddata.fieldcomparator.SortMode;
|
||||
import org.elasticsearch.index.mapper.FieldMapper.Names;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
abstract class AbstractGeoPointIndexFieldData extends AbstractIndexFieldData<AtomicGeoPointFieldData<ScriptDocValues>> implements IndexGeoPointFieldData<AtomicGeoPointFieldData<ScriptDocValues>> {
|
||||
|
||||
protected static class Empty extends AtomicGeoPointFieldData<ScriptDocValues> {
|
||||
|
||||
private final int numDocs;
|
||||
|
||||
Empty(int numDocs) {
|
||||
this.numDocs = numDocs;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isMultiValued() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isValuesOrdered() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getNumberUniqueValues() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getMemorySizeInBytes() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesValues getBytesValues(boolean needsHashes) {
|
||||
return BytesValues.EMPTY;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoPointValues getGeoPointValues() {
|
||||
return GeoPointValues.EMPTY;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ScriptDocValues getScriptValues() {
|
||||
return ScriptDocValues.EMPTY;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getNumDocs() {
|
||||
return numDocs;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
// no-op
|
||||
}
|
||||
}
|
||||
|
||||
protected static class GeoPointEnum {
|
||||
|
||||
private final BytesRefIterator termsEnum;
|
||||
private final GeoPoint next;
|
||||
private final CharsRef spare;
|
||||
|
||||
protected GeoPointEnum(BytesRefIterator termsEnum) {
|
||||
this.termsEnum = termsEnum;
|
||||
next = new GeoPoint();
|
||||
spare = new CharsRef();
|
||||
}
|
||||
|
||||
public GeoPoint next() throws IOException {
|
||||
final BytesRef term = termsEnum.next();
|
||||
if (term == null) {
|
||||
return null;
|
||||
}
|
||||
UnicodeUtil.UTF8toUTF16(term, spare);
|
||||
int commaIndex = -1;
|
||||
for (int i = 0; i < spare.length; i++) {
|
||||
if (spare.chars[spare.offset + i] == ',') { // safes a string creation
|
||||
commaIndex = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (commaIndex == -1) {
|
||||
assert false;
|
||||
return next.reset(0, 0);
|
||||
}
|
||||
final double lat = Double.parseDouble(new String(spare.chars, spare.offset, (commaIndex - spare.offset)));
|
||||
final double lon = Double.parseDouble(new String(spare.chars, (spare.offset + (commaIndex + 1)), spare.length - ((commaIndex + 1) - spare.offset)));
|
||||
return next.reset(lat, lon);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public AbstractGeoPointIndexFieldData(Index index, Settings indexSettings, Names fieldNames, FieldDataType fieldDataType, IndexFieldDataCache cache) {
|
||||
super(index, indexSettings, fieldNames, fieldDataType, cache);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean valuesOrdered() {
|
||||
// because we might have single values? we can dynamically update a flag to reflect that
|
||||
// based on the atomic field data loaded
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final XFieldComparatorSource comparatorSource(@Nullable Object missingValue, SortMode sortMode) {
|
||||
throw new ElasticSearchIllegalArgumentException("can't sort on geo_point field without using specific sorting feature, like geo_distance");
|
||||
}
|
||||
|
||||
}
|
@ -37,8 +37,8 @@ public final class DisabledIndexFieldData extends AbstractIndexFieldData<AtomicF
|
||||
|
||||
public static class Builder implements IndexFieldData.Builder {
|
||||
@Override
|
||||
public IndexFieldData<AtomicFieldData<?>> build(Index index, @IndexSettings Settings indexSettings, FieldMapper.Names fieldNames, FieldDataType type, IndexFieldDataCache cache) {
|
||||
return new DisabledIndexFieldData(index, indexSettings, fieldNames, type, cache);
|
||||
public IndexFieldData<AtomicFieldData<?>> build(Index index, @IndexSettings Settings indexSettings, FieldMapper<?> mapper, IndexFieldDataCache cache) {
|
||||
return new DisabledIndexFieldData(index, indexSettings, mapper.names(), mapper.fieldDataType(), cache);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -24,10 +24,10 @@ import org.apache.lucene.index.IndexReader;
|
||||
import org.elasticsearch.ElasticSearchIllegalArgumentException;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.FieldMapper.Names;
|
||||
import org.elasticsearch.index.mapper.internal.IdFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.TimestampFieldMapper;
|
||||
@ -93,8 +93,9 @@ public abstract class DocValuesIndexFieldData {
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexFieldData<?> build(Index index, Settings indexSettings, Names fieldNames, FieldDataType type, IndexFieldDataCache cache) {
|
||||
final Settings fdSettings = type.getSettings();
|
||||
public IndexFieldData<?> build(Index index, Settings indexSettings, FieldMapper<?> mapper, IndexFieldDataCache cache) {
|
||||
final FieldMapper.Names fieldNames = mapper.names();
|
||||
final Settings fdSettings = mapper.fieldDataType().getSettings();
|
||||
final Map<String, Settings> filter = fdSettings.getGroups("filter");
|
||||
if (filter != null && !filter.isEmpty()) {
|
||||
throw new ElasticSearchIllegalArgumentException("Doc values field data doesn't support filters [" + fieldNames.name() + "]");
|
||||
|
@ -43,8 +43,8 @@ public class DoubleArrayIndexFieldData extends AbstractIndexFieldData<DoubleArra
|
||||
public static class Builder implements IndexFieldData.Builder {
|
||||
|
||||
@Override
|
||||
public IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, FieldMapper.Names fieldNames, FieldDataType type, IndexFieldDataCache cache) {
|
||||
return new DoubleArrayIndexFieldData(index, indexSettings, fieldNames, type, cache);
|
||||
public IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, FieldMapper<?> mapper, IndexFieldDataCache cache) {
|
||||
return new DoubleArrayIndexFieldData(index, indexSettings, mapper.names(), mapper.fieldDataType(), cache);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -43,8 +43,8 @@ public class FSTBytesIndexFieldData extends AbstractBytesIndexFieldData<FSTBytes
|
||||
public static class Builder implements IndexFieldData.Builder {
|
||||
|
||||
@Override
|
||||
public IndexFieldData<FSTBytesAtomicFieldData> build(Index index, @IndexSettings Settings indexSettings, FieldMapper.Names fieldNames, FieldDataType type, IndexFieldDataCache cache) {
|
||||
return new FSTBytesIndexFieldData(index, indexSettings, fieldNames, type, cache);
|
||||
public IndexFieldData<FSTBytesAtomicFieldData> build(Index index, @IndexSettings Settings indexSettings, FieldMapper<?> mapper, IndexFieldDataCache cache) {
|
||||
return new FSTBytesIndexFieldData(index, indexSettings, mapper.names(), mapper.fieldDataType(), cache);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -43,8 +43,8 @@ public class FloatArrayIndexFieldData extends AbstractIndexFieldData<FloatArrayA
|
||||
public static class Builder implements IndexFieldData.Builder {
|
||||
|
||||
@Override
|
||||
public IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, FieldMapper.Names fieldNames, FieldDataType type, IndexFieldDataCache cache) {
|
||||
return new FloatArrayIndexFieldData(index, indexSettings, fieldNames, type, cache);
|
||||
public IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, FieldMapper<?> mapper, IndexFieldDataCache cache) {
|
||||
return new FloatArrayIndexFieldData(index, indexSettings, mapper.names(), mapper.fieldDataType(), cache);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -0,0 +1,281 @@
|
||||
/*
|
||||
* Licensed to ElasticSearch and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. ElasticSearch licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.fielddata.plain;
|
||||
|
||||
import org.apache.lucene.util.FixedBitSet;
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
import org.apache.lucene.util.packed.PagedMutable;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.index.fielddata.AtomicGeoPointFieldData;
|
||||
import org.elasticsearch.index.fielddata.GeoPointValues;
|
||||
import org.elasticsearch.index.fielddata.ScriptDocValues;
|
||||
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||
|
||||
/**
|
||||
* Field data atomic impl for geo points with lossy compression.
|
||||
*/
|
||||
public abstract class GeoPointCompressedAtomicFieldData extends AtomicGeoPointFieldData<ScriptDocValues> {
|
||||
|
||||
private final int numDocs;
|
||||
|
||||
protected long size = -1;
|
||||
|
||||
public GeoPointCompressedAtomicFieldData(int numDocs) {
|
||||
this.numDocs = numDocs;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getNumDocs() {
|
||||
return numDocs;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ScriptDocValues getScriptValues() {
|
||||
return new ScriptDocValues.GeoPoints(getGeoPointValues());
|
||||
}
|
||||
|
||||
static class WithOrdinals extends GeoPointCompressedAtomicFieldData {
|
||||
|
||||
private final GeoPointFieldMapper.Encoding encoding;
|
||||
private final PagedMutable lon, lat;
|
||||
private final Ordinals ordinals;
|
||||
|
||||
public WithOrdinals(GeoPointFieldMapper.Encoding encoding, PagedMutable lon, PagedMutable lat, int numDocs, Ordinals ordinals) {
|
||||
super(numDocs);
|
||||
this.encoding = encoding;
|
||||
this.lon = lon;
|
||||
this.lat = lat;
|
||||
this.ordinals = ordinals;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isMultiValued() {
|
||||
return ordinals.isMultiValued();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isValuesOrdered() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getNumberUniqueValues() {
|
||||
return ordinals.getNumOrds();
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getMemorySizeInBytes() {
|
||||
if (size == -1) {
|
||||
size = RamUsageEstimator.NUM_BYTES_INT/*size*/ + RamUsageEstimator.NUM_BYTES_INT/*numDocs*/ + lon.ramBytesUsed() + lat.ramBytesUsed();
|
||||
}
|
||||
return size;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoPointValues getGeoPointValues() {
|
||||
return new GeoPointValuesWithOrdinals(encoding, lon, lat, ordinals.ordinals());
|
||||
}
|
||||
|
||||
public static class GeoPointValuesWithOrdinals extends GeoPointValues {
|
||||
|
||||
private final GeoPointFieldMapper.Encoding encoding;
|
||||
private final PagedMutable lon, lat;
|
||||
private final Ordinals.Docs ordinals;
|
||||
|
||||
private final GeoPoint scratch = new GeoPoint();
|
||||
|
||||
GeoPointValuesWithOrdinals(GeoPointFieldMapper.Encoding encoding, PagedMutable lon, PagedMutable lat, Ordinals.Docs ordinals) {
|
||||
super(ordinals.isMultiValued());
|
||||
this.encoding = encoding;
|
||||
this.lon = lon;
|
||||
this.lat = lat;
|
||||
this.ordinals = ordinals;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoPoint nextValue() {
|
||||
final long ord = ordinals.nextOrd();
|
||||
assert ord > 0;
|
||||
return encoding.decode(lat.get(ord), lon.get(ord), scratch);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int setDocument(int docId) {
|
||||
this.docId = docId;
|
||||
return ordinals.setDocument(docId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Assumes unset values are marked in bitset, and docId is used as the index to the value array.
|
||||
*/
|
||||
public static class SingleFixedSet extends GeoPointCompressedAtomicFieldData {
|
||||
|
||||
private final GeoPointFieldMapper.Encoding encoding;
|
||||
private final PagedMutable lon, lat;
|
||||
private final FixedBitSet set;
|
||||
private final long numOrds;
|
||||
|
||||
public SingleFixedSet(GeoPointFieldMapper.Encoding encoding, PagedMutable lon, PagedMutable lat, int numDocs, FixedBitSet set, long numOrds) {
|
||||
super(numDocs);
|
||||
this.encoding = encoding;
|
||||
this.lon = lon;
|
||||
this.lat = lat;
|
||||
this.set = set;
|
||||
this.numOrds = numOrds;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isMultiValued() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isValuesOrdered() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getNumberUniqueValues() {
|
||||
return numOrds;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getMemorySizeInBytes() {
|
||||
if (size == -1) {
|
||||
size = RamUsageEstimator.NUM_BYTES_INT/*size*/ + RamUsageEstimator.NUM_BYTES_INT/*numDocs*/ + lon.ramBytesUsed() + lat.ramBytesUsed() + RamUsageEstimator.sizeOf(set.getBits());
|
||||
}
|
||||
return size;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoPointValues getGeoPointValues() {
|
||||
return new GeoPointValuesSingleFixedSet(encoding, lon, lat, set);
|
||||
}
|
||||
|
||||
|
||||
static class GeoPointValuesSingleFixedSet extends GeoPointValues {
|
||||
|
||||
private final GeoPointFieldMapper.Encoding encoding;
|
||||
private final PagedMutable lat, lon;
|
||||
private final FixedBitSet set;
|
||||
private final GeoPoint scratch = new GeoPoint();
|
||||
|
||||
|
||||
GeoPointValuesSingleFixedSet(GeoPointFieldMapper.Encoding encoding, PagedMutable lon, PagedMutable lat, FixedBitSet set) {
|
||||
super(false);
|
||||
this.encoding = encoding;
|
||||
this.lon = lon;
|
||||
this.lat = lat;
|
||||
this.set = set;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int setDocument(int docId) {
|
||||
this.docId = docId;
|
||||
return set.get(docId) ? 1 : 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoPoint nextValue() {
|
||||
return encoding.decode(lat.get(docId), lon.get(docId), scratch);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Assumes all the values are "set", and docId is used as the index to the value array.
|
||||
*/
|
||||
public static class Single extends GeoPointCompressedAtomicFieldData {
|
||||
|
||||
private final GeoPointFieldMapper.Encoding encoding;
|
||||
private final PagedMutable lon, lat;
|
||||
private final long numOrds;
|
||||
|
||||
public Single(GeoPointFieldMapper.Encoding encoding, PagedMutable lon, PagedMutable lat, int numDocs, long numOrds) {
|
||||
super(numDocs);
|
||||
this.encoding = encoding;
|
||||
this.lon = lon;
|
||||
this.lat = lat;
|
||||
this.numOrds = numOrds;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isMultiValued() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isValuesOrdered() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getNumberUniqueValues() {
|
||||
return numOrds;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getMemorySizeInBytes() {
|
||||
if (size == -1) {
|
||||
size = RamUsageEstimator.NUM_BYTES_INT/*size*/ + RamUsageEstimator.NUM_BYTES_INT/*numDocs*/ + (lon.ramBytesUsed() + lat.ramBytesUsed());
|
||||
}
|
||||
return size;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoPointValues getGeoPointValues() {
|
||||
return new GeoPointValuesSingle(encoding, lon, lat);
|
||||
}
|
||||
|
||||
static class GeoPointValuesSingle extends GeoPointValues {
|
||||
|
||||
private final GeoPointFieldMapper.Encoding encoding;
|
||||
private final PagedMutable lon, lat;
|
||||
|
||||
private final GeoPoint scratch = new GeoPoint();
|
||||
|
||||
|
||||
GeoPointValuesSingle(GeoPointFieldMapper.Encoding encoding, PagedMutable lon, PagedMutable lat) {
|
||||
super(false);
|
||||
this.encoding = encoding;
|
||||
this.lon = lon;
|
||||
this.lat = lat;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int setDocument(int docId) {
|
||||
this.docId = docId;
|
||||
return 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoPoint nextValue() {
|
||||
return encoding.decode(lat.get(docId), lon.get(docId), scratch);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,137 @@
|
||||
/*
|
||||
* Licensed to ElasticSearch and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. ElasticSearch licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.fielddata.plain;
|
||||
|
||||
import org.apache.lucene.index.AtomicReader;
|
||||
import org.apache.lucene.index.AtomicReaderContext;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.util.FixedBitSet;
|
||||
import org.apache.lucene.util.packed.PackedInts;
|
||||
import org.apache.lucene.util.packed.PagedMutable;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.common.unit.DistanceUnit.Distance;
|
||||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.fielddata.*;
|
||||
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
||||
import org.elasticsearch.index.fielddata.ordinals.Ordinals.Docs;
|
||||
import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class GeoPointCompressedIndexFieldData extends AbstractGeoPointIndexFieldData {
|
||||
|
||||
private static final String PRECISION_KEY = "precision";
|
||||
private static final Distance DEFAULT_PRECISION_VALUE = new Distance(1, DistanceUnit.CENTIMETERS);
|
||||
|
||||
public static class Builder implements IndexFieldData.Builder {
|
||||
|
||||
@Override
|
||||
public IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, FieldMapper<?> mapper, IndexFieldDataCache cache) {
|
||||
FieldDataType type = mapper.fieldDataType();
|
||||
final String precisionAsString = type.getSettings().get(PRECISION_KEY);
|
||||
final Distance precision;
|
||||
if (precisionAsString != null) {
|
||||
precision = Distance.parseDistance(precisionAsString, DistanceUnit.METERS);
|
||||
} else {
|
||||
precision = DEFAULT_PRECISION_VALUE;
|
||||
}
|
||||
return new GeoPointCompressedIndexFieldData(index, indexSettings, mapper.names(), mapper.fieldDataType(), cache, precision);
|
||||
}
|
||||
}
|
||||
|
||||
private final GeoPointFieldMapper.Encoding encoding;
|
||||
|
||||
public GeoPointCompressedIndexFieldData(Index index, @IndexSettings Settings indexSettings, FieldMapper.Names fieldNames, FieldDataType fieldDataType, IndexFieldDataCache cache, Distance precision) {
|
||||
super(index, indexSettings, fieldNames, fieldDataType, cache);
|
||||
this.encoding = GeoPointFieldMapper.Encoding.of(precision);
|
||||
}
|
||||
|
||||
@Override
|
||||
public AtomicGeoPointFieldData<ScriptDocValues> loadDirect(AtomicReaderContext context) throws Exception {
|
||||
AtomicReader reader = context.reader();
|
||||
|
||||
Terms terms = reader.terms(getFieldNames().indexName());
|
||||
if (terms == null) {
|
||||
return new Empty(reader.maxDoc());
|
||||
}
|
||||
final long initialSize;
|
||||
if (terms.size() >= 0) {
|
||||
initialSize = 1 + terms.size();
|
||||
} else { // codec doesn't expose size
|
||||
initialSize = 1 + Math.min(1 << 12, reader.maxDoc());
|
||||
}
|
||||
final int pageSize = Integer.highestOneBit(BigArrays.PAGE_SIZE_IN_BYTES * 8 / encoding.numBitsPerCoordinate() - 1) << 1;
|
||||
PagedMutable lat = new PagedMutable(initialSize, pageSize, encoding.numBitsPerCoordinate(), PackedInts.COMPACT);
|
||||
PagedMutable lon = new PagedMutable(initialSize, pageSize, encoding.numBitsPerCoordinate(), PackedInts.COMPACT);
|
||||
final float acceptableTransientOverheadRatio = fieldDataType.getSettings().getAsFloat("acceptable_transient_overhead_ratio", OrdinalsBuilder.DEFAULT_ACCEPTABLE_OVERHEAD_RATIO);
|
||||
OrdinalsBuilder builder = new OrdinalsBuilder(terms.size(), reader.maxDoc(), acceptableTransientOverheadRatio);
|
||||
try {
|
||||
final GeoPointEnum iter = new GeoPointEnum(builder.buildFromTerms(terms.iterator(null)));
|
||||
GeoPoint point;
|
||||
long ord = 0;
|
||||
while ((point = iter.next()) != null) {
|
||||
++ord;
|
||||
if (lat.size() <= ord) {
|
||||
final long newSize = BigArrays.overSize(ord + 1);
|
||||
lat = lat.resize(newSize);
|
||||
lon = lon.resize(newSize);
|
||||
}
|
||||
lat.set(ord, encoding.encodeCoordinate(point.getLat()));
|
||||
lon.set(ord, encoding.encodeCoordinate(point.getLon()));
|
||||
}
|
||||
|
||||
Ordinals build = builder.build(fieldDataType.getSettings());
|
||||
if (!build.isMultiValued() && CommonSettings.removeOrdsOnSingleValue(fieldDataType)) {
|
||||
Docs ordinals = build.ordinals();
|
||||
int maxDoc = reader.maxDoc();
|
||||
PagedMutable sLat = new PagedMutable(reader.maxDoc(), pageSize, encoding.numBitsPerCoordinate(), PackedInts.COMPACT);
|
||||
PagedMutable sLon = new PagedMutable(reader.maxDoc(), pageSize, encoding.numBitsPerCoordinate(), PackedInts.COMPACT);
|
||||
for (int i = 0; i < maxDoc; i++) {
|
||||
final long nativeOrdinal = ordinals.getOrd(i);
|
||||
sLat.set(i, lat.get(nativeOrdinal));
|
||||
sLon.set(i, lon.get(nativeOrdinal));
|
||||
}
|
||||
FixedBitSet set = builder.buildDocsWithValuesSet();
|
||||
if (set == null) {
|
||||
return new GeoPointCompressedAtomicFieldData.Single(encoding, sLon, sLat, reader.maxDoc(), ordinals.getNumOrds());
|
||||
} else {
|
||||
return new GeoPointCompressedAtomicFieldData.SingleFixedSet(encoding, sLon, sLat, reader.maxDoc(), set, ordinals.getNumOrds());
|
||||
}
|
||||
} else {
|
||||
if (lat.size() != build.getMaxOrd()) {
|
||||
lat = lat.resize(build.getMaxOrd());
|
||||
lon = lon.resize(build.getMaxOrd());
|
||||
}
|
||||
return new GeoPointCompressedAtomicFieldData.WithOrdinals(encoding, lon, lat, reader.maxDoc(), build);
|
||||
}
|
||||
} finally {
|
||||
builder.close();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -24,7 +24,6 @@ import org.apache.lucene.util.RamUsageEstimator;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.util.BigDoubleArrayList;
|
||||
import org.elasticsearch.index.fielddata.AtomicGeoPointFieldData;
|
||||
import org.elasticsearch.index.fielddata.BytesValues;
|
||||
import org.elasticsearch.index.fielddata.GeoPointValues;
|
||||
import org.elasticsearch.index.fielddata.ScriptDocValues;
|
||||
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
||||
@ -33,10 +32,6 @@ import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
||||
*/
|
||||
public abstract class GeoPointDoubleArrayAtomicFieldData extends AtomicGeoPointFieldData<ScriptDocValues> {
|
||||
|
||||
public static GeoPointDoubleArrayAtomicFieldData empty(int numDocs) {
|
||||
return new Empty(numDocs);
|
||||
}
|
||||
|
||||
private final int numDocs;
|
||||
|
||||
protected long size = -1;
|
||||
@ -59,48 +54,6 @@ public abstract class GeoPointDoubleArrayAtomicFieldData extends AtomicGeoPointF
|
||||
return new ScriptDocValues.GeoPoints(getGeoPointValues());
|
||||
}
|
||||
|
||||
static class Empty extends GeoPointDoubleArrayAtomicFieldData {
|
||||
|
||||
Empty(int numDocs) {
|
||||
super(numDocs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isMultiValued() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isValuesOrdered() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getNumberUniqueValues() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getMemorySizeInBytes() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesValues getBytesValues(boolean needsHashes) {
|
||||
return BytesValues.EMPTY;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoPointValues getGeoPointValues() {
|
||||
return GeoPointValues.EMPTY;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ScriptDocValues getScriptValues() {
|
||||
return ScriptDocValues.EMPTY;
|
||||
}
|
||||
}
|
||||
|
||||
static class WithOrdinals extends GeoPointDoubleArrayAtomicFieldData {
|
||||
|
||||
private final BigDoubleArrayList lon, lat;
|
||||
|
@ -22,14 +22,12 @@ package org.elasticsearch.index.fielddata.plain;
|
||||
import org.apache.lucene.index.AtomicReader;
|
||||
import org.apache.lucene.index.AtomicReaderContext;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.util.*;
|
||||
import org.elasticsearch.ElasticSearchIllegalArgumentException;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.apache.lucene.util.FixedBitSet;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.BigDoubleArrayList;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.fielddata.*;
|
||||
import org.elasticsearch.index.fielddata.fieldcomparator.SortMode;
|
||||
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
||||
import org.elasticsearch.index.fielddata.ordinals.Ordinals.Docs;
|
||||
import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder;
|
||||
@ -38,13 +36,13 @@ import org.elasticsearch.index.settings.IndexSettings;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class GeoPointDoubleArrayIndexFieldData extends AbstractIndexFieldData<GeoPointDoubleArrayAtomicFieldData> implements IndexGeoPointFieldData<GeoPointDoubleArrayAtomicFieldData> {
|
||||
public class GeoPointDoubleArrayIndexFieldData extends AbstractGeoPointIndexFieldData {
|
||||
|
||||
public static class Builder implements IndexFieldData.Builder {
|
||||
|
||||
@Override
|
||||
public IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, FieldMapper.Names fieldNames, FieldDataType type, IndexFieldDataCache cache) {
|
||||
return new GeoPointDoubleArrayIndexFieldData(index, indexSettings, fieldNames, type, cache);
|
||||
public IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, FieldMapper<?> mapper, IndexFieldDataCache cache) {
|
||||
return new GeoPointDoubleArrayIndexFieldData(index, indexSettings, mapper.names(), mapper.fieldDataType(), cache);
|
||||
}
|
||||
}
|
||||
|
||||
@ -53,43 +51,25 @@ public class GeoPointDoubleArrayIndexFieldData extends AbstractIndexFieldData<Ge
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean valuesOrdered() {
|
||||
// because we might have single values? we can dynamically update a flag to reflect that
|
||||
// based on the atomic field data loaded
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoPointDoubleArrayAtomicFieldData loadDirect(AtomicReaderContext context) throws Exception {
|
||||
public AtomicGeoPointFieldData<ScriptDocValues> loadDirect(AtomicReaderContext context) throws Exception {
|
||||
AtomicReader reader = context.reader();
|
||||
|
||||
Terms terms = reader.terms(getFieldNames().indexName());
|
||||
if (terms == null) {
|
||||
return GeoPointDoubleArrayAtomicFieldData.empty(reader.maxDoc());
|
||||
return new Empty(reader.maxDoc());
|
||||
}
|
||||
// TODO: how can we guess the number of terms? numerics end up creating more terms per value...
|
||||
final BigDoubleArrayList lat = new BigDoubleArrayList();
|
||||
final BigDoubleArrayList lon = new BigDoubleArrayList();
|
||||
lat.add(0); // first "t" indicates null value
|
||||
lon.add(0); // first "t" indicates null value
|
||||
final float acceptableTransientOverheadRatio = fieldDataType.getSettings().getAsFloat("acceptable_transient_overhead_ratio", OrdinalsBuilder.DEFAULT_ACCEPTABLE_OVERHEAD_RATIO);
|
||||
OrdinalsBuilder builder = new OrdinalsBuilder(terms.size(), reader.maxDoc(), acceptableTransientOverheadRatio);
|
||||
final CharsRef spare = new CharsRef();
|
||||
try {
|
||||
BytesRefIterator iter = builder.buildFromTerms(terms.iterator(null));
|
||||
BytesRef term;
|
||||
while ((term = iter.next()) != null) {
|
||||
UnicodeUtil.UTF8toUTF16(term, spare);
|
||||
boolean parsed = false;
|
||||
for (int i = spare.offset; i < spare.length; i++) {
|
||||
if (spare.chars[i] == ',') { // safes a string creation
|
||||
lat.add(Double.parseDouble(new String(spare.chars, spare.offset, (i - spare.offset))));
|
||||
lon.add(Double.parseDouble(new String(spare.chars, (spare.offset + (i + 1)), spare.length - ((i + 1) - spare.offset))));
|
||||
parsed = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
assert parsed;
|
||||
final GeoPointEnum iter = new GeoPointEnum(builder.buildFromTerms(terms.iterator(null)));
|
||||
GeoPoint point;
|
||||
while ((point = iter.next()) != null) {
|
||||
lat.add(point.getLat());
|
||||
lon.add(point.getLon());
|
||||
}
|
||||
|
||||
Ordinals build = builder.build(fieldDataType.getSettings());
|
||||
@ -119,9 +99,4 @@ public class GeoPointDoubleArrayIndexFieldData extends AbstractIndexFieldData<Ge
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public XFieldComparatorSource comparatorSource(@Nullable Object missingValue, SortMode sortMode) {
|
||||
throw new ElasticSearchIllegalArgumentException("can't sort on geo_point field without using specific sorting feature, like geo_distance");
|
||||
}
|
||||
}
|
@ -58,8 +58,8 @@ public class PackedArrayIndexFieldData extends AbstractIndexFieldData<AtomicNume
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexFieldData<AtomicNumericFieldData> build(Index index, @IndexSettings Settings indexSettings, FieldMapper.Names fieldNames, FieldDataType type, IndexFieldDataCache cache) {
|
||||
return new PackedArrayIndexFieldData(index, indexSettings, fieldNames, type, cache, numericType);
|
||||
public IndexFieldData<AtomicNumericFieldData> build(Index index, @IndexSettings Settings indexSettings, FieldMapper<?> mapper, IndexFieldDataCache cache) {
|
||||
return new PackedArrayIndexFieldData(index, indexSettings, mapper.names(), mapper.fieldDataType(), cache, numericType);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -40,8 +40,8 @@ public class PagedBytesIndexFieldData extends AbstractBytesIndexFieldData<PagedB
|
||||
public static class Builder implements IndexFieldData.Builder {
|
||||
|
||||
@Override
|
||||
public IndexFieldData<PagedBytesAtomicFieldData> build(Index index, @IndexSettings Settings indexSettings, FieldMapper.Names fieldNames, FieldDataType type, IndexFieldDataCache cache) {
|
||||
return new PagedBytesIndexFieldData(index, indexSettings, fieldNames, type, cache);
|
||||
public IndexFieldData<PagedBytesAtomicFieldData> build(Index index, @IndexSettings Settings indexSettings, FieldMapper<?> mapper, IndexFieldDataCache cache) {
|
||||
return new PagedBytesIndexFieldData(index, indexSettings, mapper.names(), mapper.fieldDataType(), cache);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -19,15 +19,19 @@
|
||||
|
||||
package org.elasticsearch.index.mapper.geo;
|
||||
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.index.FieldInfo.IndexOptions;
|
||||
import org.elasticsearch.ElasticSearchIllegalArgumentException;
|
||||
import org.elasticsearch.ElasticSearchIllegalStateException;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.geo.GeoDistance;
|
||||
import org.elasticsearch.common.geo.GeoHashUtils;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
@ -41,14 +45,16 @@ import org.elasticsearch.index.mapper.core.DoubleFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.elasticsearch.index.mapper.object.ArrayValueMapperParser;
|
||||
import org.elasticsearch.index.similarity.SimilarityProvider;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.index.mapper.MapperBuilders.*;
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.parseField;
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.parsePathType;
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.parseStore;
|
||||
|
||||
/**
|
||||
* Parsing: We handle:
|
||||
@ -60,7 +66,7 @@ import static org.elasticsearch.index.mapper.core.TypeParsers.parseStore;
|
||||
* "lon" : 2.1
|
||||
* }
|
||||
*/
|
||||
public class GeoPointFieldMapper implements Mapper, ArrayValueMapperParser {
|
||||
public class GeoPointFieldMapper extends AbstractFieldMapper<GeoPoint> implements ArrayValueMapperParser {
|
||||
|
||||
public static final String CONTENT_TYPE = "geo_point";
|
||||
|
||||
@ -79,7 +85,7 @@ public class GeoPointFieldMapper implements Mapper, ArrayValueMapperParser {
|
||||
public static final boolean ENABLE_LATLON = false;
|
||||
public static final boolean ENABLE_GEOHASH = false;
|
||||
public static final boolean ENABLE_GEOHASH_PREFIX = false;
|
||||
public static final int PRECISION = GeoHashUtils.PRECISION;
|
||||
public static final int GEO_HASH_PRECISION = GeoHashUtils.PRECISION;
|
||||
public static final boolean NORMALIZE_LAT = true;
|
||||
public static final boolean NORMALIZE_LON = true;
|
||||
public static final boolean VALIDATE_LAT = true;
|
||||
@ -96,7 +102,7 @@ public class GeoPointFieldMapper implements Mapper, ArrayValueMapperParser {
|
||||
}
|
||||
}
|
||||
|
||||
public static class Builder extends Mapper.Builder<Builder, GeoPointFieldMapper> {
|
||||
public static class Builder extends AbstractFieldMapper.Builder<Builder, GeoPointFieldMapper> {
|
||||
|
||||
private ContentPath.Type pathType = Defaults.PATH_TYPE;
|
||||
|
||||
@ -108,9 +114,7 @@ public class GeoPointFieldMapper implements Mapper, ArrayValueMapperParser {
|
||||
|
||||
private Integer precisionStep;
|
||||
|
||||
private int precision = Defaults.PRECISION;
|
||||
|
||||
private boolean store = Defaults.STORE;
|
||||
private int geoHashPrecision = Defaults.GEO_HASH_PRECISION;
|
||||
|
||||
boolean validateLat = Defaults.VALIDATE_LAT;
|
||||
boolean validateLon = Defaults.VALIDATE_LON;
|
||||
@ -118,7 +122,7 @@ public class GeoPointFieldMapper implements Mapper, ArrayValueMapperParser {
|
||||
boolean normalizeLon = Defaults.NORMALIZE_LON;
|
||||
|
||||
public Builder(String name) {
|
||||
super(name);
|
||||
super(name, new FieldType(Defaults.FIELD_TYPE));
|
||||
this.builder = this;
|
||||
}
|
||||
|
||||
@ -147,14 +151,14 @@ public class GeoPointFieldMapper implements Mapper, ArrayValueMapperParser {
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder precision(int precision) {
|
||||
this.precision = precision;
|
||||
public Builder geoHashPrecision(int precision) {
|
||||
this.geoHashPrecision = precision;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder store(boolean store) {
|
||||
this.store = store;
|
||||
return this;
|
||||
public Builder fieldDataSettings(Settings settings) {
|
||||
this.fieldDataSettings = settings;
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -162,9 +166,6 @@ public class GeoPointFieldMapper implements Mapper, ArrayValueMapperParser {
|
||||
ContentPath.Type origPathType = context.path().pathType();
|
||||
context.path().pathType(pathType);
|
||||
|
||||
GeoStringFieldMapper geoStringMapper = new GeoStringFieldMapper.Builder(name)
|
||||
.includeInAll(false).store(store).build(context);
|
||||
|
||||
DoubleFieldMapper latMapper = null;
|
||||
DoubleFieldMapper lonMapper = null;
|
||||
|
||||
@ -176,8 +177,8 @@ public class GeoPointFieldMapper implements Mapper, ArrayValueMapperParser {
|
||||
latMapperBuilder.precisionStep(precisionStep);
|
||||
lonMapperBuilder.precisionStep(precisionStep);
|
||||
}
|
||||
latMapper = (DoubleFieldMapper) latMapperBuilder.includeInAll(false).store(store).build(context);
|
||||
lonMapper = (DoubleFieldMapper) lonMapperBuilder.includeInAll(false).store(store).build(context);
|
||||
latMapper = (DoubleFieldMapper) latMapperBuilder.includeInAll(false).store(fieldType.stored()).build(context);
|
||||
lonMapper = (DoubleFieldMapper) lonMapperBuilder.includeInAll(false).store(fieldType.stored()).build(context);
|
||||
}
|
||||
StringFieldMapper geohashMapper = null;
|
||||
if (enableGeoHash) {
|
||||
@ -187,9 +188,11 @@ public class GeoPointFieldMapper implements Mapper, ArrayValueMapperParser {
|
||||
|
||||
context.path().pathType(origPathType);
|
||||
|
||||
return new GeoPointFieldMapper(name, pathType, enableLatLon, enableGeoHash, enableGeohashPrefix, precisionStep, precision,
|
||||
latMapper, lonMapper, geohashMapper, geoStringMapper,
|
||||
validateLon, validateLat, normalizeLon, normalizeLat);
|
||||
// this is important: even if geo points feel like they need to be tokenized to distinguish lat from lon, we actually want to
|
||||
// store them as a single token.
|
||||
fieldType.setTokenized(false);
|
||||
|
||||
return new GeoPointFieldMapper(buildNames(context), fieldType, indexAnalyzer, searchAnalyzer, postingsProvider, docValuesProvider, similarity, fieldDataSettings, context.indexSettings(), origPathType, enableLatLon, enableGeoHash, enableGeohashPrefix, precisionStep, geoHashPrecision, latMapper, lonMapper, geohashMapper, validateLon, validateLat, normalizeLon, normalizeLat);
|
||||
}
|
||||
}
|
||||
|
||||
@ -197,14 +200,12 @@ public class GeoPointFieldMapper implements Mapper, ArrayValueMapperParser {
|
||||
@Override
|
||||
public Mapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
Builder builder = geoPointField(name);
|
||||
|
||||
parseField(builder, name, node, parserContext);
|
||||
for (Map.Entry<String, Object> entry : node.entrySet()) {
|
||||
String fieldName = Strings.toUnderscoreCase(entry.getKey());
|
||||
Object fieldNode = entry.getValue();
|
||||
if (fieldName.equals("path")) {
|
||||
builder.pathType(parsePathType(name, fieldNode.toString()));
|
||||
} else if (fieldName.equals("store")) {
|
||||
builder.store(parseStore(name, fieldNode.toString()));
|
||||
} else if (fieldName.equals("lat_lon")) {
|
||||
builder.enableLatLon(XContentMapValues.nodeBooleanValue(fieldNode));
|
||||
} else if (fieldName.equals("geohash")) {
|
||||
@ -217,7 +218,7 @@ public class GeoPointFieldMapper implements Mapper, ArrayValueMapperParser {
|
||||
} else if (fieldName.equals("precision_step")) {
|
||||
builder.precisionStep(XContentMapValues.nodeIntegerValue(fieldNode));
|
||||
} else if (fieldName.equals("geohash_precision")) {
|
||||
builder.precision(XContentMapValues.nodeIntegerValue(fieldNode));
|
||||
builder.geoHashPrecision(XContentMapValues.nodeIntegerValue(fieldNode));
|
||||
} else if (fieldName.equals("validate")) {
|
||||
builder.validateLat = XContentMapValues.nodeBooleanValue(fieldNode);
|
||||
builder.validateLon = XContentMapValues.nodeBooleanValue(fieldNode);
|
||||
@ -238,7 +239,130 @@ public class GeoPointFieldMapper implements Mapper, ArrayValueMapperParser {
|
||||
}
|
||||
}
|
||||
|
||||
private final String name;
|
||||
/**
|
||||
* A byte-aligned fixed-length encoding for latitudes and longitudes.
|
||||
*/
|
||||
public static final class Encoding {
|
||||
|
||||
// With 14 bytes we already have better precision than a double since a double has 11 bits of exponent
|
||||
private static final int MAX_NUM_BYTES = 14;
|
||||
|
||||
private static final Encoding[] INSTANCES;
|
||||
static {
|
||||
INSTANCES = new Encoding[MAX_NUM_BYTES + 1];
|
||||
for (int numBytes = 2; numBytes <= MAX_NUM_BYTES; numBytes += 2) {
|
||||
INSTANCES[numBytes] = new Encoding(numBytes);
|
||||
}
|
||||
}
|
||||
|
||||
/** Get an instance based on the number of bytes that has been used to encode values. */
|
||||
public static final Encoding of(int numBytesPerValue) {
|
||||
final Encoding instance = INSTANCES[numBytesPerValue];
|
||||
if (instance == null) {
|
||||
throw new ElasticSearchIllegalStateException("No encoding for " + numBytesPerValue + " bytes per value");
|
||||
}
|
||||
return instance;
|
||||
}
|
||||
|
||||
/** Get an instance based on the expected precision. Here are examples of the number of required bytes per value depending on the
|
||||
* expected precision:<ul>
|
||||
* <li>1km: 4 bytes</li>
|
||||
* <li>3m: 6 bytes</li>
|
||||
* <li>1m: 8 bytes</li>
|
||||
* <li>1cm: 8 bytes</li>
|
||||
* <li>1mm: 10 bytes</li></ul> */
|
||||
public static final Encoding of(DistanceUnit.Distance precision) {
|
||||
for (Encoding encoding : INSTANCES) {
|
||||
if (encoding != null && encoding.precision().compareTo(precision) <= 0) {
|
||||
return encoding;
|
||||
}
|
||||
}
|
||||
return INSTANCES[MAX_NUM_BYTES];
|
||||
}
|
||||
|
||||
private final DistanceUnit.Distance precision;
|
||||
private final int numBytes;
|
||||
private final int numBytesPerCoordinate;
|
||||
private final double factor;
|
||||
|
||||
private Encoding(int numBytes) {
|
||||
assert numBytes >= 1 && numBytes <= MAX_NUM_BYTES;
|
||||
assert (numBytes & 1) == 0; // we don't support odd numBytes for the moment
|
||||
this.numBytes = numBytes;
|
||||
this.numBytesPerCoordinate = numBytes / 2;
|
||||
this.factor = Math.pow(2, - numBytesPerCoordinate * 8 + 9);
|
||||
assert (1L << (numBytesPerCoordinate * 8 - 1)) * factor > 180 && (1L << (numBytesPerCoordinate * 8 - 2)) * factor < 180 : numBytesPerCoordinate + " " + factor;
|
||||
if (numBytes == MAX_NUM_BYTES) {
|
||||
// no precision loss compared to a double
|
||||
precision = new DistanceUnit.Distance(0, DistanceUnit.METERS);
|
||||
} else {
|
||||
precision = new DistanceUnit.Distance(
|
||||
GeoDistance.PLANE.calculate(0, 0, factor / 2, factor / 2, DistanceUnit.METERS), // factor/2 because we use Math.round instead of a cast to convert the double to a long
|
||||
DistanceUnit.METERS);
|
||||
}
|
||||
}
|
||||
|
||||
public DistanceUnit.Distance precision() {
|
||||
return precision;
|
||||
}
|
||||
|
||||
/** The number of bytes required to encode a single geo point. */
|
||||
public final int numBytes() {
|
||||
return numBytes;
|
||||
}
|
||||
|
||||
/** The number of bits required to encode a single coordinate of a geo point. */
|
||||
public int numBitsPerCoordinate() {
|
||||
return numBytesPerCoordinate << 3;
|
||||
}
|
||||
|
||||
/** Return the bits that encode a latitude/longitude. */
|
||||
public long encodeCoordinate(double lat) {
|
||||
return Math.round((lat + 180) / factor);
|
||||
}
|
||||
|
||||
/** Decode a sequence of bits into the original coordinate. */
|
||||
public double decodeCoordinate(long bits) {
|
||||
return bits * factor - 180;
|
||||
}
|
||||
|
||||
private void encodeBits(long bits, byte[] out, int offset) {
|
||||
for (int i = 0; i < numBytesPerCoordinate; ++i) {
|
||||
out[offset++] = (byte) bits;
|
||||
bits >>>= 8;
|
||||
}
|
||||
assert bits == 0;
|
||||
}
|
||||
|
||||
private long decodeBits(byte [] in, int offset) {
|
||||
long r = in[offset++] & 0xFFL;
|
||||
for (int i = 1; i < numBytesPerCoordinate; ++i) {
|
||||
r = (in[offset++] & 0xFFL) << (i * 8);
|
||||
}
|
||||
return r;
|
||||
}
|
||||
|
||||
/** Encode a geo point into a byte-array, over {@link #numBytes()} bytes. */
|
||||
public void encode(double lat, double lon, byte[] out, int offset) {
|
||||
encodeBits(encodeCoordinate(lat), out, offset);
|
||||
encodeBits(encodeCoordinate(lon), out, offset + numBytesPerCoordinate);
|
||||
}
|
||||
|
||||
/** Decode a geo point from a byte-array, reading {@link #numBytes()} bytes. */
|
||||
public GeoPoint decode(byte[] in, int offset, GeoPoint out) {
|
||||
final long latBits = decodeBits(in, offset);
|
||||
final long lonBits = decodeBits(in, offset + numBytesPerCoordinate);
|
||||
return decode(latBits, lonBits, out);
|
||||
}
|
||||
|
||||
/** Decode a geo point from the bits of the encoded latitude and longitudes. */
|
||||
public GeoPoint decode(long latBits, long lonBits, GeoPoint out) {
|
||||
final double lat = decodeCoordinate(latBits);
|
||||
final double lon = decodeCoordinate(lonBits);
|
||||
return out.reset(lat, lon);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private final ContentPath.Type pathType;
|
||||
|
||||
@ -250,7 +374,7 @@ public class GeoPointFieldMapper implements Mapper, ArrayValueMapperParser {
|
||||
|
||||
private final Integer precisionStep;
|
||||
|
||||
private final int precision;
|
||||
private final int geoHashPrecision;
|
||||
|
||||
private final DoubleFieldMapper latMapper;
|
||||
|
||||
@ -258,43 +382,56 @@ public class GeoPointFieldMapper implements Mapper, ArrayValueMapperParser {
|
||||
|
||||
private final StringFieldMapper geohashMapper;
|
||||
|
||||
private final GeoStringFieldMapper geoStringMapper;
|
||||
|
||||
private final boolean validateLon;
|
||||
private final boolean validateLat;
|
||||
|
||||
private final boolean normalizeLon;
|
||||
private final boolean normalizeLat;
|
||||
|
||||
public GeoPointFieldMapper(String name, ContentPath.Type pathType, boolean enableLatLon, boolean enableGeoHash, boolean enableGeohashPrefix, Integer precisionStep, int precision,
|
||||
DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geohashMapper, GeoStringFieldMapper geoStringMapper,
|
||||
boolean validateLon, boolean validateLat,
|
||||
boolean normalizeLon, boolean normalizeLat) {
|
||||
this.name = name;
|
||||
public GeoPointFieldMapper(FieldMapper.Names names, FieldType fieldType,
|
||||
NamedAnalyzer indexAnalyzer, NamedAnalyzer searchAnalyzer,
|
||||
PostingsFormatProvider postingsFormat, DocValuesFormatProvider docValuesFormat,
|
||||
SimilarityProvider similarity, @Nullable Settings fieldDataSettings, Settings indexSettings,
|
||||
ContentPath.Type pathType, boolean enableLatLon, boolean enableGeoHash, boolean enableGeohashPrefix, Integer precisionStep, int geoHashPrecision,
|
||||
DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geohashMapper,
|
||||
boolean validateLon, boolean validateLat,
|
||||
boolean normalizeLon, boolean normalizeLat) {
|
||||
super(names, 1f, fieldType, null, indexAnalyzer, postingsFormat, docValuesFormat, similarity, fieldDataSettings, indexSettings);
|
||||
this.pathType = pathType;
|
||||
this.enableLatLon = enableLatLon;
|
||||
this.enableGeoHash = enableGeoHash || enableGeohashPrefix; // implicitly enable geohashes if geohash_prefix is set
|
||||
this.enableGeohashPrefix = enableGeohashPrefix;
|
||||
this.precisionStep = precisionStep;
|
||||
this.precision = precision;
|
||||
this.geoHashPrecision = geoHashPrecision;
|
||||
|
||||
this.latMapper = latMapper;
|
||||
this.lonMapper = lonMapper;
|
||||
this.geoStringMapper = geoStringMapper;
|
||||
this.geohashMapper = geohashMapper;
|
||||
|
||||
this.geoStringMapper.geoMapper = this;
|
||||
|
||||
this.validateLat = validateLat;
|
||||
this.validateLon = validateLon;
|
||||
|
||||
this.normalizeLat = normalizeLat;
|
||||
this.normalizeLon = normalizeLon;
|
||||
|
||||
if (hasDocValues()) {
|
||||
throw new ElasticSearchIllegalStateException("Geo points don't support doc values"); // yet
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return this.name;
|
||||
protected String contentType() {
|
||||
return CONTENT_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldType defaultFieldType() {
|
||||
return Defaults.FIELD_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldDataType defaultFieldDataType() {
|
||||
return new FieldDataType("geo_point");
|
||||
}
|
||||
|
||||
public DoubleFieldMapper latMapper() {
|
||||
@ -305,10 +442,6 @@ public class GeoPointFieldMapper implements Mapper, ArrayValueMapperParser {
|
||||
return lonMapper;
|
||||
}
|
||||
|
||||
public GeoStringFieldMapper stringMapper() {
|
||||
return this.geoStringMapper;
|
||||
}
|
||||
|
||||
public StringFieldMapper geoHashStringMapper() {
|
||||
return this.geohashMapper;
|
||||
}
|
||||
@ -321,11 +454,25 @@ public class GeoPointFieldMapper implements Mapper, ArrayValueMapperParser {
|
||||
return enableGeohashPrefix;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoPoint value(Object value) {
|
||||
if (value instanceof GeoPoint) {
|
||||
return (GeoPoint) value;
|
||||
} else {
|
||||
return GeoPoint.parseFromLatLon(value.toString());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
|
||||
throw new UnsupportedOperationException("Parsing is implemented in parse(), this method should NEVER be called");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void parse(ParseContext context) throws IOException {
|
||||
ContentPath.Type origPathType = context.path().pathType();
|
||||
context.path().pathType(pathType);
|
||||
context.path().add(name);
|
||||
context.path().add(name());
|
||||
|
||||
XContentParser.Token token = context.parser().currentToken();
|
||||
if (token == XContentParser.Token.START_ARRAY) {
|
||||
@ -413,7 +560,7 @@ public class GeoPointFieldMapper implements Mapper, ArrayValueMapperParser {
|
||||
}
|
||||
|
||||
private void parseGeohashField(ParseContext context, String geohash) throws IOException {
|
||||
int len = Math.min(precision, geohash.length());
|
||||
int len = Math.min(geoHashPrecision, geohash.length());
|
||||
int min = enableGeohashPrefix ? 1 : geohash.length();
|
||||
|
||||
for (int i = len; i >= min; i--) {
|
||||
@ -433,19 +580,21 @@ public class GeoPointFieldMapper implements Mapper, ArrayValueMapperParser {
|
||||
|
||||
if (validateLat) {
|
||||
if (lat > 90.0 || lat < -90.0) {
|
||||
throw new ElasticSearchIllegalArgumentException("illegal latitude value [" + lat + "] for " + name);
|
||||
throw new ElasticSearchIllegalArgumentException("illegal latitude value [" + lat + "] for " + name());
|
||||
}
|
||||
}
|
||||
if (validateLon) {
|
||||
if (lon > 180.0 || lon < -180) {
|
||||
throw new ElasticSearchIllegalArgumentException("illegal longitude value [" + lon + "] for " + name);
|
||||
throw new ElasticSearchIllegalArgumentException("illegal longitude value [" + lon + "] for " + name());
|
||||
}
|
||||
}
|
||||
|
||||
context.externalValue(Double.toString(lat) + ',' + Double.toString(lon));
|
||||
geoStringMapper.parse(context);
|
||||
if (fieldType.indexed() || fieldType.stored()) {
|
||||
Field field = new Field(names.indexName(), Double.toString(lat) + ',' + Double.toString(lon), fieldType);
|
||||
context.doc().add(field);
|
||||
}
|
||||
if (enableGeoHash) {
|
||||
parseGeohashField(context, GeoHashUtils.encode(lat, lon, precision));
|
||||
parseGeohashField(context, GeoHashUtils.encode(lat, lon, geoHashPrecision));
|
||||
}
|
||||
if (enableLatLon) {
|
||||
context.externalValue(lat);
|
||||
@ -464,17 +613,19 @@ public class GeoPointFieldMapper implements Mapper, ArrayValueMapperParser {
|
||||
|
||||
if (validateLat) {
|
||||
if (point.lat() > 90.0 || point.lat() < -90.0) {
|
||||
throw new ElasticSearchIllegalArgumentException("illegal latitude value [" + point.lat() + "] for " + name);
|
||||
throw new ElasticSearchIllegalArgumentException("illegal latitude value [" + point.lat() + "] for " + name());
|
||||
}
|
||||
}
|
||||
if (validateLon) {
|
||||
if (point.lon() > 180.0 || point.lon() < -180) {
|
||||
throw new ElasticSearchIllegalArgumentException("illegal longitude value [" + point.lon() + "] for " + name);
|
||||
throw new ElasticSearchIllegalArgumentException("illegal longitude value [" + point.lon() + "] for " + name());
|
||||
}
|
||||
}
|
||||
|
||||
context.externalValue(Double.toString(point.lat()) + ',' + Double.toString(point.lon()));
|
||||
geoStringMapper.parse(context);
|
||||
if (fieldType.indexed() || fieldType.stored()) {
|
||||
Field field = new Field(names.indexName(), Double.toString(point.lat()) + ',' + Double.toString(point.lon()), fieldType);
|
||||
context.doc().add(field);
|
||||
}
|
||||
if (enableGeoHash) {
|
||||
parseGeohashField(context, geohash);
|
||||
}
|
||||
@ -497,19 +648,17 @@ public class GeoPointFieldMapper implements Mapper, ArrayValueMapperParser {
|
||||
if (geohashMapper != null) {
|
||||
geohashMapper.close();
|
||||
}
|
||||
if (geoStringMapper != null) {
|
||||
geoStringMapper.close();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
|
||||
// TODO
|
||||
super.merge(mergeWith, mergeContext);
|
||||
// TODO: geo-specific properties
|
||||
}
|
||||
|
||||
@Override
|
||||
public void traverse(FieldMapperListener fieldMapperListener) {
|
||||
geoStringMapper.traverse(fieldMapperListener);
|
||||
super.traverse(fieldMapperListener);
|
||||
if (enableGeoHash) {
|
||||
geohashMapper.traverse(fieldMapperListener);
|
||||
}
|
||||
@ -524,8 +673,8 @@ public class GeoPointFieldMapper implements Mapper, ArrayValueMapperParser {
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(name);
|
||||
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
|
||||
super.doXContentBody(builder, includeDefaults, params);
|
||||
builder.field("type", CONTENT_TYPE);
|
||||
if (pathType != Defaults.PATH_TYPE) {
|
||||
builder.field("path", pathType.name().toLowerCase(Locale.ROOT));
|
||||
@ -539,11 +688,8 @@ public class GeoPointFieldMapper implements Mapper, ArrayValueMapperParser {
|
||||
if (enableGeohashPrefix != Defaults.ENABLE_GEOHASH_PREFIX) {
|
||||
builder.field("geohash_prefix", enableGeohashPrefix);
|
||||
}
|
||||
if (geoStringMapper.fieldType().stored() != Defaults.STORE) {
|
||||
builder.field("store", geoStringMapper.fieldType().stored());
|
||||
}
|
||||
if (precision != Defaults.PRECISION) {
|
||||
builder.field("geohash_precision", precision);
|
||||
if (geoHashPrecision != Defaults.GEO_HASH_PRECISION) {
|
||||
builder.field("geohash_precision", geoHashPrecision);
|
||||
}
|
||||
if (precisionStep != null) {
|
||||
builder.field("precision_step", precisionStep);
|
||||
@ -568,66 +714,6 @@ public class GeoPointFieldMapper implements Mapper, ArrayValueMapperParser {
|
||||
builder.field("normalize_lon", normalizeLon);
|
||||
}
|
||||
}
|
||||
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
public static class GeoStringFieldMapper extends StringFieldMapper {
|
||||
|
||||
public static class Builder extends AbstractFieldMapper.Builder<Builder, StringFieldMapper> {
|
||||
|
||||
protected String nullValue = Defaults.NULL_VALUE;
|
||||
|
||||
public Builder(String name) {
|
||||
super(name, new FieldType(GeoPointFieldMapper.Defaults.FIELD_TYPE));
|
||||
builder = this;
|
||||
}
|
||||
|
||||
public Builder nullValue(String nullValue) {
|
||||
this.nullValue = nullValue;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoStringFieldMapper build(BuilderContext context) {
|
||||
GeoStringFieldMapper fieldMapper = new GeoStringFieldMapper(buildNames(context), boost, fieldType, nullValue,
|
||||
indexAnalyzer, searchAnalyzer, postingsProvider, docValuesProvider, fieldDataSettings, context.indexSettings());
|
||||
fieldMapper.includeInAll(includeInAll);
|
||||
return fieldMapper;
|
||||
}
|
||||
}
|
||||
|
||||
GeoPointFieldMapper geoMapper;
|
||||
|
||||
public GeoStringFieldMapper(Names names, float boost, FieldType fieldType, String nullValue,
|
||||
NamedAnalyzer indexAnalyzer, NamedAnalyzer searchAnalyzer,
|
||||
PostingsFormatProvider postingsProvider, DocValuesFormatProvider docValuesProvider,
|
||||
@Nullable Settings fieldDataSettings, Settings indexSettings) {
|
||||
super(names, boost, fieldType, nullValue, indexAnalyzer, searchAnalyzer, searchAnalyzer, Defaults.POSITION_OFFSET_GAP, Defaults.IGNORE_ABOVE,
|
||||
postingsProvider, docValuesProvider, null, fieldDataSettings, indexSettings);
|
||||
if (hasDocValues()) {
|
||||
throw new MapperParsingException("Field [" + names.fullName() + "] cannot have doc values");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldType defaultFieldType() {
|
||||
return GeoPointFieldMapper.Defaults.FIELD_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasDocValues() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldDataType defaultFieldDataType() {
|
||||
return new FieldDataType("geo_point");
|
||||
}
|
||||
|
||||
public GeoPointFieldMapper geoMapper() {
|
||||
return geoMapper;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -19,9 +19,8 @@
|
||||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.elasticsearch.ElasticSearchParseException;
|
||||
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.elasticsearch.ElasticSearchParseException;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
@ -126,10 +125,10 @@ public class GeoBoundingBoxFilterParser implements FilterParser {
|
||||
throw new QueryParsingException(parseContext.index(), "failed to find geo_point field [" + fieldName + "]");
|
||||
}
|
||||
FieldMapper<?> mapper = smartMappers.mapper();
|
||||
if (!(mapper instanceof GeoPointFieldMapper.GeoStringFieldMapper)) {
|
||||
if (!(mapper instanceof GeoPointFieldMapper)) {
|
||||
throw new QueryParsingException(parseContext.index(), "field [" + fieldName + "] is not a geo_point field");
|
||||
}
|
||||
GeoPointFieldMapper geoMapper = ((GeoPointFieldMapper.GeoStringFieldMapper) mapper).geoMapper();
|
||||
GeoPointFieldMapper geoMapper = ((GeoPointFieldMapper) mapper);
|
||||
|
||||
Filter filter;
|
||||
if ("indexed".equals(type)) {
|
||||
|
@ -156,14 +156,14 @@ public class GeoDistanceFilterParser implements FilterParser {
|
||||
if (smartMappers == null || !smartMappers.hasMapper()) {
|
||||
throw new QueryParsingException(parseContext.index(), "failed to find geo_point field [" + fieldName + "]");
|
||||
}
|
||||
FieldMapper mapper = smartMappers.mapper();
|
||||
if (!(mapper instanceof GeoPointFieldMapper.GeoStringFieldMapper)) {
|
||||
FieldMapper<?> mapper = smartMappers.mapper();
|
||||
if (!(mapper instanceof GeoPointFieldMapper)) {
|
||||
throw new QueryParsingException(parseContext.index(), "field [" + fieldName + "] is not a geo_point field");
|
||||
}
|
||||
GeoPointFieldMapper geoMapper = ((GeoPointFieldMapper.GeoStringFieldMapper) mapper).geoMapper();
|
||||
GeoPointFieldMapper geoMapper = ((GeoPointFieldMapper) mapper);
|
||||
|
||||
|
||||
IndexGeoPointFieldData indexFieldData = parseContext.fieldData().getForField(mapper);
|
||||
IndexGeoPointFieldData<?> indexFieldData = parseContext.fieldData().getForField(mapper);
|
||||
Filter filter = new GeoDistanceFilter(point.lat(), point.lon(), distance, geoDistance, indexFieldData, geoMapper, optimizeBbox);
|
||||
if (cache) {
|
||||
filter = parseContext.cacheFilter(filter, cacheKey);
|
||||
|
@ -199,13 +199,13 @@ public class GeoDistanceRangeFilterParser implements FilterParser {
|
||||
if (smartMappers == null || !smartMappers.hasMapper()) {
|
||||
throw new QueryParsingException(parseContext.index(), "failed to find geo_point field [" + fieldName + "]");
|
||||
}
|
||||
FieldMapper mapper = smartMappers.mapper();
|
||||
if (!(mapper instanceof GeoPointFieldMapper.GeoStringFieldMapper)) {
|
||||
FieldMapper<?> mapper = smartMappers.mapper();
|
||||
if (!(mapper instanceof GeoPointFieldMapper)) {
|
||||
throw new QueryParsingException(parseContext.index(), "field [" + fieldName + "] is not a geo_point field");
|
||||
}
|
||||
GeoPointFieldMapper geoMapper = ((GeoPointFieldMapper.GeoStringFieldMapper) mapper).geoMapper();
|
||||
GeoPointFieldMapper geoMapper = ((GeoPointFieldMapper) mapper);
|
||||
|
||||
IndexGeoPointFieldData indexFieldData = parseContext.fieldData().getForField(mapper);
|
||||
IndexGeoPointFieldData<?> indexFieldData = parseContext.fieldData().getForField(mapper);
|
||||
Filter filter = new GeoDistanceRangeFilter(point, from, to, includeLower, includeUpper, geoDistance, geoMapper, indexFieldData, optimizeBbox);
|
||||
if (cache) {
|
||||
filter = parseContext.cacheFilter(filter, cacheKey);
|
||||
|
@ -21,7 +21,6 @@ package org.elasticsearch.index.query;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.elasticsearch.common.geo.GeoHashUtils;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
@ -128,12 +127,12 @@ public class GeoPolygonFilterParser implements FilterParser {
|
||||
if (smartMappers == null || !smartMappers.hasMapper()) {
|
||||
throw new QueryParsingException(parseContext.index(), "failed to find geo_point field [" + fieldName + "]");
|
||||
}
|
||||
FieldMapper mapper = smartMappers.mapper();
|
||||
if (!(mapper instanceof GeoPointFieldMapper.GeoStringFieldMapper)) {
|
||||
FieldMapper<?> mapper = smartMappers.mapper();
|
||||
if (!(mapper instanceof GeoPointFieldMapper)) {
|
||||
throw new QueryParsingException(parseContext.index(), "field [" + fieldName + "] is not a geo_point field");
|
||||
}
|
||||
|
||||
IndexGeoPointFieldData indexFieldData = parseContext.fieldData().getForField(mapper);
|
||||
IndexGeoPointFieldData<?> indexFieldData = parseContext.fieldData().getForField(mapper);
|
||||
Filter filter = new GeoPolygonFilter(points.toArray(new GeoPoint[points.size()]), indexFieldData);
|
||||
if (cache) {
|
||||
filter = parseContext.cacheFilter(filter, cacheKey);
|
||||
|
@ -239,11 +239,11 @@ public class GeohashCellFilter {
|
||||
}
|
||||
|
||||
FieldMapper<?> mapper = smartMappers.mapper();
|
||||
if (!(mapper instanceof GeoPointFieldMapper.GeoStringFieldMapper)) {
|
||||
if (!(mapper instanceof GeoPointFieldMapper)) {
|
||||
throw new QueryParsingException(parseContext.index(), "field [" + fieldName + "] is not a geo_point field");
|
||||
}
|
||||
|
||||
GeoPointFieldMapper geoMapper = ((GeoPointFieldMapper.GeoStringFieldMapper) mapper).geoMapper();
|
||||
GeoPointFieldMapper geoMapper = ((GeoPointFieldMapper) mapper);
|
||||
if (!geoMapper.isEnableGeohashPrefix()) {
|
||||
throw new QueryParsingException(parseContext.index(), "can't execute geohash_cell on field [" + fieldName + "], geohash_prefix is not enabled");
|
||||
}
|
||||
|
@ -39,7 +39,7 @@ import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.core.DateFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper.GeoStringFieldMapper;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.index.query.functionscore.gauss.GaussDecayFunctionBuilder;
|
||||
@ -147,8 +147,8 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
|
||||
// dates and time need special handling
|
||||
if (mapper instanceof DateFieldMapper) {
|
||||
return parseDateVariable(fieldName, parser, parseContext, (DateFieldMapper) mapper);
|
||||
} else if (mapper instanceof GeoStringFieldMapper) {
|
||||
return parseGeoVariable(fieldName, parser, parseContext, (GeoStringFieldMapper) mapper);
|
||||
} else if (mapper instanceof GeoPointFieldMapper) {
|
||||
return parseGeoVariable(fieldName, parser, parseContext, (GeoPointFieldMapper) mapper);
|
||||
} else if (mapper instanceof NumberFieldMapper<?>) {
|
||||
return parseNumberVariable(fieldName, parser, parseContext, (NumberFieldMapper<?>) mapper);
|
||||
} else {
|
||||
@ -193,7 +193,7 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
|
||||
}
|
||||
|
||||
private ScoreFunction parseGeoVariable(String fieldName, XContentParser parser, QueryParseContext parseContext,
|
||||
GeoStringFieldMapper mapper) throws IOException {
|
||||
GeoPointFieldMapper mapper) throws IOException {
|
||||
XContentParser.Token token;
|
||||
String parameterName = null;
|
||||
GeoPoint origin = new GeoPoint();
|
||||
|
@ -66,6 +66,8 @@ public abstract class AbstractFieldDataTests extends ElasticsearchTestCase {
|
||||
mapper = MapperBuilders.shortField(fieldName).fieldDataSettings(type.getSettings()).build(context);
|
||||
} else if (type.getType().equals("byte")) {
|
||||
mapper = MapperBuilders.byteField(fieldName).fieldDataSettings(type.getSettings()).build(context);
|
||||
} else if (type.getType().equals("geo_point")) {
|
||||
mapper = MapperBuilders.geoPointField(fieldName).fieldDataSettings(type.getSettings()).build(context);
|
||||
} else {
|
||||
throw new UnsupportedOperationException(type.getType());
|
||||
}
|
||||
|
@ -18,6 +18,7 @@
|
||||
*/
|
||||
package org.elasticsearch.index.fielddata;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import org.apache.lucene.document.*;
|
||||
import org.apache.lucene.index.AtomicReaderContext;
|
||||
import org.apache.lucene.index.CompositeReaderContext;
|
||||
@ -26,7 +27,11 @@ import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.English;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.elasticsearch.common.geo.GeoDistance;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.common.unit.DistanceUnit.Distance;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.*;
|
||||
@ -337,6 +342,59 @@ public class DuelFieldDataTests extends AbstractFieldDataTests {
|
||||
|
||||
}
|
||||
|
||||
public void testDuelGeoPoints() throws Exception {
|
||||
Random random = getRandom();
|
||||
int atLeast = atLeast(random, 1000);
|
||||
int maxValuesPerDoc = randomIntBetween(1, 3);
|
||||
for (int i = 0; i < atLeast; i++) {
|
||||
Document d = new Document();
|
||||
d.add(new StringField("_id", "" + i, Field.Store.NO));
|
||||
final int numValues = randomInt(maxValuesPerDoc);
|
||||
for (int j = 0; j < numValues; ++j) {
|
||||
final double lat = randomDouble() * 180 - 90;
|
||||
final double lon = randomDouble() * 360 - 180;
|
||||
d.add(new StringField("geopoint", lat + "," + lon, Field.Store.NO));
|
||||
}
|
||||
writer.addDocument(d);
|
||||
if (random.nextInt(10) == 0) {
|
||||
refreshReader();
|
||||
}
|
||||
}
|
||||
AtomicReaderContext context = refreshReader();
|
||||
Map<FieldDataType, Type> typeMap = new HashMap<FieldDataType, DuelFieldDataTests.Type>();
|
||||
final Distance precision = new Distance(1, randomFrom(DistanceUnit.values()));
|
||||
typeMap.put(new FieldDataType("geo_point", ImmutableSettings.builder().put("format", "array")), Type.GeoPoint);
|
||||
typeMap.put(new FieldDataType("geo_point", ImmutableSettings.builder().put("format", "compressed").put("precision", precision)), Type.GeoPoint);
|
||||
|
||||
ArrayList<Entry<FieldDataType, Type>> list = new ArrayList<Entry<FieldDataType, Type>>(typeMap.entrySet());
|
||||
while (!list.isEmpty()) {
|
||||
Entry<FieldDataType, Type> left;
|
||||
Entry<FieldDataType, Type> right;
|
||||
if (list.size() > 1) {
|
||||
left = list.remove(random.nextInt(list.size()));
|
||||
right = list.remove(random.nextInt(list.size()));
|
||||
} else {
|
||||
right = left = list.remove(0);
|
||||
}
|
||||
ifdService.clear();
|
||||
IndexGeoPointFieldData<?> leftFieldData = getForField(left.getKey(), left.getValue().name().toLowerCase(Locale.ROOT));
|
||||
|
||||
ifdService.clear();
|
||||
IndexGeoPointFieldData<?> rightFieldData = getForField(right.getKey(), right.getValue().name().toLowerCase(Locale.ROOT));
|
||||
|
||||
duelFieldDataGeoPoint(random, context, leftFieldData, rightFieldData, precision);
|
||||
duelFieldDataGeoPoint(random, context, rightFieldData, leftFieldData, precision);
|
||||
|
||||
DirectoryReader perSegment = DirectoryReader.open(writer, true);
|
||||
CompositeReaderContext composite = perSegment.getContext();
|
||||
List<AtomicReaderContext> leaves = composite.leaves();
|
||||
for (AtomicReaderContext atomicReaderContext : leaves) {
|
||||
duelFieldDataGeoPoint(random, atomicReaderContext, leftFieldData, rightFieldData, precision);
|
||||
}
|
||||
perSegment.close();
|
||||
}
|
||||
}
|
||||
|
||||
private void assertOrder(AtomicFieldData.Order order, IndexFieldData<?> data, AtomicReaderContext context) throws Exception {
|
||||
AtomicFieldData<?> leftData = randomBoolean() ? data.load(context) : data.loadDirect(context);
|
||||
assertThat(leftData.getBytesValues(randomBoolean()).getOrder(), is(order));
|
||||
@ -438,6 +496,44 @@ public class DuelFieldDataTests extends AbstractFieldDataTests {
|
||||
}
|
||||
}
|
||||
|
||||
private static void duelFieldDataGeoPoint(Random random, AtomicReaderContext context, IndexGeoPointFieldData<?> left, IndexGeoPointFieldData<?> right, Distance precision) throws Exception {
|
||||
AtomicGeoPointFieldData<?> leftData = random.nextBoolean() ? left.load(context) : left.loadDirect(context);
|
||||
AtomicGeoPointFieldData<?> rightData = random.nextBoolean() ? right.load(context) : right.loadDirect(context);
|
||||
|
||||
assertThat(leftData.getNumDocs(), equalTo(rightData.getNumDocs()));
|
||||
|
||||
int numDocs = leftData.getNumDocs();
|
||||
GeoPointValues leftValues = leftData.getGeoPointValues();
|
||||
GeoPointValues rightValues = rightData.getGeoPointValues();
|
||||
for (int i = 0; i < numDocs; ++i) {
|
||||
final int numValues = leftValues.setDocument(i);
|
||||
assertEquals(numValues, rightValues.setDocument(i));
|
||||
List<GeoPoint> leftPoints = Lists.newArrayList();
|
||||
List<GeoPoint> rightPoints = Lists.newArrayList();
|
||||
for (int j = 0; j < numValues; ++j) {
|
||||
GeoPoint l = leftValues.nextValue();
|
||||
leftPoints.add(new GeoPoint(l.getLat(), l.getLon()));
|
||||
GeoPoint r = rightValues.nextValue();
|
||||
rightPoints.add(new GeoPoint(r.getLat(), r.getLon()));
|
||||
}
|
||||
for (GeoPoint l : leftPoints) {
|
||||
assertTrue("Couldn't find " + l + " among " + rightPoints, contains(l, rightPoints, precision));
|
||||
}
|
||||
for (GeoPoint r : rightPoints) {
|
||||
assertTrue("Couldn't find " + r + " among " + leftPoints, contains(r, leftPoints, precision));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static boolean contains(GeoPoint point, List<GeoPoint> set, Distance precision) {
|
||||
for (GeoPoint r : set) {
|
||||
final double distance = GeoDistance.PLANE.calculate(point.getLat(), point.getLon(), r.getLat(), r.getLon(), DistanceUnit.METERS);
|
||||
if (new Distance(distance, DistanceUnit.METERS).compareTo(precision) <= 0) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private static class Preprocessor {
|
||||
|
||||
@ -467,7 +563,7 @@ public class DuelFieldDataTests extends AbstractFieldDataTests {
|
||||
|
||||
|
||||
private static enum Type {
|
||||
Float(AtomicFieldData.Order.NUMERIC), Double(AtomicFieldData.Order.NUMERIC), Integer(AtomicFieldData.Order.NUMERIC), Long(AtomicFieldData.Order.NUMERIC), Bytes(AtomicFieldData.Order.BYTES);
|
||||
Float(AtomicFieldData.Order.NUMERIC), Double(AtomicFieldData.Order.NUMERIC), Integer(AtomicFieldData.Order.NUMERIC), Long(AtomicFieldData.Order.NUMERIC), Bytes(AtomicFieldData.Order.BYTES), GeoPoint(AtomicFieldData.Order.NONE);
|
||||
|
||||
private final AtomicFieldData.Order order;
|
||||
Type(AtomicFieldData.Order order) {
|
||||
|
@ -0,0 +1,48 @@
|
||||
/*
|
||||
* Licensed to ElasticSearch and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. ElasticSearch licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper.geo;
|
||||
|
||||
import org.elasticsearch.common.geo.GeoDistance;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.common.unit.DistanceUnit.Distance;
|
||||
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import static org.hamcrest.Matchers.lessThanOrEqualTo;
|
||||
|
||||
|
||||
public class GeoEncodingTests extends ElasticsearchTestCase {
|
||||
|
||||
public void test() {
|
||||
for (int i = 0; i < 10000; ++i) {
|
||||
final double lat = randomDouble() * 180 - 90;
|
||||
final double lon = randomDouble() * 360 - 180;
|
||||
final Distance precision = new Distance(randomDouble() * 10, randomFrom(Arrays.asList(DistanceUnit.MILLIMETERS, DistanceUnit.METERS, DistanceUnit.KILOMETERS)));
|
||||
final GeoPointFieldMapper.Encoding encoding = GeoPointFieldMapper.Encoding.of(precision);
|
||||
assertThat(encoding.precision().convert(DistanceUnit.METERS).value, lessThanOrEqualTo(precision.convert(DistanceUnit.METERS).value));
|
||||
final GeoPoint geoPoint = encoding.decode(encoding.encodeCoordinate(lat), encoding.encodeCoordinate(lon), new GeoPoint());
|
||||
final double error = GeoDistance.PLANE.calculate(lat, lon, geoPoint.lat(), geoPoint.lon(), DistanceUnit.METERS);
|
||||
assertThat(error, lessThanOrEqualTo(precision.convert(DistanceUnit.METERS).value));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,78 @@
|
||||
/*
|
||||
* Licensed to ElasticSearch and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. ElasticSearch licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper.geo;
|
||||
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest;
|
||||
import org.elasticsearch.cluster.metadata.MappingMetaData;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.common.unit.DistanceUnit.Distance;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.test.ElasticsearchIntegrationTest;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
public class GeoMappingTests extends ElasticsearchIntegrationTest {
|
||||
|
||||
public void testUpdatePrecision() throws Exception {
|
||||
prepareCreate("test").addMapping("type1", XContentFactory.jsonBuilder().startObject()
|
||||
.startObject("type1")
|
||||
.startObject("properties")
|
||||
.startObject("pin")
|
||||
.field("type", "geo_point")
|
||||
.startObject("fielddata")
|
||||
.field("format", "compressed")
|
||||
.field("precision", "2mm")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()).execute().actionGet();
|
||||
|
||||
assertPrecision(new Distance(2, DistanceUnit.MILLIMETERS));
|
||||
|
||||
client().admin().indices().preparePutMapping("test").setType("type1").setSource(XContentFactory.jsonBuilder().startObject()
|
||||
.startObject("type1")
|
||||
.startObject("properties")
|
||||
.startObject("pin")
|
||||
.field("type", "geo_point")
|
||||
.startObject("fielddata")
|
||||
.field("format", "compressed")
|
||||
.field("precision", "11m")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()).execute().actionGet();
|
||||
|
||||
assertPrecision(new Distance(11, DistanceUnit.METERS));
|
||||
}
|
||||
|
||||
private void assertPrecision(Distance expected) throws Exception {
|
||||
ImmutableOpenMap<String, ImmutableOpenMap<String, MappingMetaData>> mappings = client().admin().indices().getMappings(new GetMappingsRequest().indices("test").types("type1")).actionGet().getMappings();
|
||||
assertNotNull(mappings);
|
||||
Map<String, ?> properties = (Map<String, ?>) mappings.get("test").get("type1").getSourceAsMap().get("properties");
|
||||
Map<String, ?> pinProperties = (Map<String, ?>) properties.get("pin");
|
||||
Map<String, ?> pinFieldData = (Map<String, ?>) pinProperties.get("fielddata");
|
||||
Distance precision = Distance.parseDistance(pinFieldData.get("precision").toString(), DistanceUnit.METERS);
|
||||
assertEquals(expected, precision);
|
||||
}
|
||||
|
||||
}
|
@ -38,6 +38,7 @@ import org.elasticsearch.test.ElasticsearchIntegrationTest;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
||||
import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
@ -54,7 +55,8 @@ public class GeoDistanceTests extends ElasticsearchIntegrationTest {
|
||||
@Test
|
||||
public void simpleDistanceTests() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("location").field("type", "geo_point").field("lat_lon", true).endObject().endObject()
|
||||
.startObject("properties").startObject("location").field("type", "geo_point").field("lat_lon", true)
|
||||
.startObject("fielddata").field("format", randomFrom(Arrays.asList("array", "compressed"))).endObject().endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
client().admin().indices().prepareCreate("test").addMapping("type1", mapping).execute().actionGet();
|
||||
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();
|
||||
@ -205,7 +207,8 @@ public class GeoDistanceTests extends ElasticsearchIntegrationTest {
|
||||
@Test
|
||||
public void testDistanceSortingMVFields() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("locations").field("type", "geo_point").field("lat_lon", true).endObject().endObject()
|
||||
.startObject("properties").startObject("locations").field("type", "geo_point").field("lat_lon", true)
|
||||
.startObject("fielddata").field("format", randomFrom(Arrays.asList("array", "compressed"))).endObject().endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
client().admin().indices().prepareCreate("test")
|
||||
@ -259,7 +262,7 @@ public class GeoDistanceTests extends ElasticsearchIntegrationTest {
|
||||
|
||||
assertHitCount(searchResponse, 4);
|
||||
assertOrderedSearchHits(searchResponse, "1", "2", "3", "4");
|
||||
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), equalTo(0d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(0d, 0.01d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(0.4621d, 0.01d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(1.055d, 0.01d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(2.029d, 0.01d));
|
||||
@ -271,7 +274,7 @@ public class GeoDistanceTests extends ElasticsearchIntegrationTest {
|
||||
|
||||
assertHitCount(searchResponse, 4);
|
||||
assertOrderedSearchHits(searchResponse, "1", "3", "2", "4");
|
||||
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), equalTo(0d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(0d, 0.01d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(1.258d, 0.01d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(5.286d, 0.01d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(8.572d, 0.01d));
|
||||
@ -286,7 +289,7 @@ public class GeoDistanceTests extends ElasticsearchIntegrationTest {
|
||||
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(8.572d, 0.01d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(5.286d, 0.01d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(1.258d, 0.01d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), equalTo(0d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(0d, 0.01d));
|
||||
|
||||
// Order: Desc, Mode: min
|
||||
searchResponse = client().prepareSearch("test").setQuery(matchAllQuery())
|
||||
@ -298,7 +301,7 @@ public class GeoDistanceTests extends ElasticsearchIntegrationTest {
|
||||
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(2.029d, 0.01d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(1.055d, 0.01d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(0.4621d, 0.01d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), equalTo(0d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(0d, 0.01d));
|
||||
|
||||
searchResponse = client().prepareSearch("test").setQuery(matchAllQuery())
|
||||
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).sortMode("avg").order(SortOrder.ASC))
|
||||
@ -306,7 +309,7 @@ public class GeoDistanceTests extends ElasticsearchIntegrationTest {
|
||||
|
||||
assertHitCount(searchResponse, 4);
|
||||
assertOrderedSearchHits(searchResponse, "1", "3", "2", "4");
|
||||
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), equalTo(0d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(0d, 0.01d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(1.157d, 0.01d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(2.874d, 0.01d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(5.301d, 0.01d));
|
||||
@ -320,7 +323,7 @@ public class GeoDistanceTests extends ElasticsearchIntegrationTest {
|
||||
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(5.301d, 0.01d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(2.874d, 0.01d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(1.157d, 0.01d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), equalTo(0d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(0d, 0.01d));
|
||||
|
||||
try {
|
||||
client().prepareSearch("test").setQuery(matchAllQuery())
|
||||
@ -336,7 +339,8 @@ public class GeoDistanceTests extends ElasticsearchIntegrationTest {
|
||||
// Regression bug: https://github.com/elasticsearch/elasticsearch/issues/2851
|
||||
public void testDistanceSortingWithMissingGeoPoint() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("locations").field("type", "geo_point").field("lat_lon", true).endObject().endObject()
|
||||
.startObject("properties").startObject("locations").field("type", "geo_point").field("lat_lon", true)
|
||||
.startObject("fielddata").field("format", randomFrom(Arrays.asList("array", "compressed"))).endObject().endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
client().admin().indices().prepareCreate("test")
|
||||
@ -405,27 +409,27 @@ public class GeoDistanceTests extends ElasticsearchIntegrationTest {
|
||||
|
||||
SearchResponse searchResponse1 = client().prepareSearch().addField("_source").addScriptField("distance", "doc['location'].arcDistance(" + target_lat + "," + target_long + ")").execute().actionGet();
|
||||
Double resultDistance1 = searchResponse1.getHits().getHits()[0].getFields().get("distance").getValue();
|
||||
assertThat(resultDistance1, equalTo(GeoDistance.ARC.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.MILES)));
|
||||
assertThat(resultDistance1, closeTo(GeoDistance.ARC.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.MILES), 0.0001d));
|
||||
|
||||
SearchResponse searchResponse2 = client().prepareSearch().addField("_source").addScriptField("distance", "doc['location'].distance(" + target_lat + "," + target_long + ")").execute().actionGet();
|
||||
Double resultDistance2 = searchResponse2.getHits().getHits()[0].getFields().get("distance").getValue();
|
||||
assertThat(resultDistance2, equalTo(GeoDistance.PLANE.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.MILES)));
|
||||
assertThat(resultDistance2, closeTo(GeoDistance.PLANE.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.MILES), 0.0001d));
|
||||
|
||||
SearchResponse searchResponse3 = client().prepareSearch().addField("_source").addScriptField("distance", "doc['location'].arcDistanceInKm(" + target_lat + "," + target_long + ")").execute().actionGet();
|
||||
Double resultArcDistance3 = searchResponse3.getHits().getHits()[0].getFields().get("distance").getValue();
|
||||
assertThat(resultArcDistance3, equalTo(GeoDistance.ARC.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.KILOMETERS)));
|
||||
assertThat(resultArcDistance3, closeTo(GeoDistance.ARC.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.KILOMETERS), 0.0001d));
|
||||
|
||||
SearchResponse searchResponse4 = client().prepareSearch().addField("_source").addScriptField("distance", "doc['location'].distanceInKm(" + target_lat + "," + target_long + ")").execute().actionGet();
|
||||
Double resultDistance4 = searchResponse4.getHits().getHits()[0].getFields().get("distance").getValue();
|
||||
assertThat(resultDistance4, equalTo(GeoDistance.PLANE.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.KILOMETERS)));
|
||||
assertThat(resultDistance4, closeTo(GeoDistance.PLANE.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.KILOMETERS), 0.0001d));
|
||||
|
||||
SearchResponse searchResponse5 = client().prepareSearch().addField("_source").addScriptField("distance", "doc['location'].arcDistanceInKm(" + (target_lat) + "," + (target_long + 360) + ")").execute().actionGet();
|
||||
Double resultArcDistance5 = searchResponse5.getHits().getHits()[0].getFields().get("distance").getValue();
|
||||
assertThat(resultArcDistance5, equalTo(GeoDistance.ARC.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.KILOMETERS)));
|
||||
assertThat(resultArcDistance5, closeTo(GeoDistance.ARC.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.KILOMETERS), 0.0001d));
|
||||
|
||||
SearchResponse searchResponse6 = client().prepareSearch().addField("_source").addScriptField("distance", "doc['location'].arcDistanceInKm(" + (target_lat + 360) + "," + (target_long) + ")").execute().actionGet();
|
||||
Double resultArcDistance6 = searchResponse6.getHits().getHits()[0].getFields().get("distance").getValue();
|
||||
assertThat(resultArcDistance6, equalTo(GeoDistance.ARC.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.KILOMETERS)));
|
||||
assertThat(resultArcDistance6, closeTo(GeoDistance.ARC.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.KILOMETERS), 0.0001d));
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -437,7 +441,8 @@ public class GeoDistanceTests extends ElasticsearchIntegrationTest {
|
||||
.field("type", "nested")
|
||||
.startObject("properties")
|
||||
.startObject("name").field("type", "string").endObject()
|
||||
.startObject("location").field("type", "geo_point").field("lat_lon", true).endObject()
|
||||
.startObject("location").field("type", "geo_point").field("lat_lon", true)
|
||||
.startObject("fielddata").field("format", randomFrom(Arrays.asList("array", "compressed"))).endObject().endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
@ -504,7 +509,7 @@ public class GeoDistanceTests extends ElasticsearchIntegrationTest {
|
||||
|
||||
assertHitCount(searchResponse, 4);
|
||||
assertOrderedSearchHits(searchResponse, "1", "2", "3", "4");
|
||||
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), equalTo(0d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(0d, 0.01d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(0.4621d, 0.01d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(1.055d, 0.01d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(2.029d, 0.01d));
|
||||
@ -516,7 +521,7 @@ public class GeoDistanceTests extends ElasticsearchIntegrationTest {
|
||||
|
||||
assertHitCount(searchResponse, 4);
|
||||
assertOrderedSearchHits(searchResponse, "1", "3", "2", "4");
|
||||
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), equalTo(0d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(0d, 0.01d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(1.258d, 0.01d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(5.286d, 0.01d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(8.572d, 0.01d));
|
||||
@ -531,7 +536,7 @@ public class GeoDistanceTests extends ElasticsearchIntegrationTest {
|
||||
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(8.572d, 0.01d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(5.286d, 0.01d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(1.258d, 0.01d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), equalTo(0d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(0d, 0.01d));
|
||||
|
||||
// Order: Desc, Mode: min
|
||||
searchResponse = client().prepareSearch("companies").setQuery(matchAllQuery())
|
||||
@ -543,7 +548,7 @@ public class GeoDistanceTests extends ElasticsearchIntegrationTest {
|
||||
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(2.029d, 0.01d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(1.055d, 0.01d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(0.4621d, 0.01d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), equalTo(0d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(0d, 0.01d));
|
||||
|
||||
searchResponse = client().prepareSearch("companies").setQuery(matchAllQuery())
|
||||
.addSort(SortBuilders.geoDistanceSort("branches.location").point(40.7143528, -74.0059731).sortMode("avg").order(SortOrder.ASC))
|
||||
@ -551,7 +556,7 @@ public class GeoDistanceTests extends ElasticsearchIntegrationTest {
|
||||
|
||||
assertHitCount(searchResponse, 4);
|
||||
assertOrderedSearchHits(searchResponse, "1", "3", "2", "4");
|
||||
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), equalTo(0d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(0d, 0.01d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(1.157d, 0.01d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(2.874d, 0.01d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(5.301d, 0.01d));
|
||||
@ -568,7 +573,7 @@ public class GeoDistanceTests extends ElasticsearchIntegrationTest {
|
||||
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(5.301d, 0.01d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(2.874d, 0.01d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(1.157d, 0.01d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), equalTo(0d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(0d, 0.01d));
|
||||
|
||||
searchResponse = client().prepareSearch("companies").setQuery(matchAllQuery())
|
||||
.addSort(
|
||||
@ -611,6 +616,9 @@ public class GeoDistanceTests extends ElasticsearchIntegrationTest {
|
||||
.field("geohash", true)
|
||||
.field("geohash_precision", 24)
|
||||
.field("lat_lon", true)
|
||||
.startObject("fielddata")
|
||||
.field("format", randomFrom(Arrays.asList("array", "compressed")))
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
|
Loading…
x
Reference in New Issue
Block a user