Add RangeFieldMapper for numeric and date range types

Lucene 6.2 added index and query support for numeric ranges. This commit adds a new RangeFieldMapper for indexing numeric (int, long, float, double) and date ranges and creating appropriate range and term queries. The design is similar to NumericFieldMapper in that it uses a RangeType enumerator for implementing the logic specific to each type. The following range types are supported by this field mapper: int_range, float_range, long_range, double_range, date_range.

Lucene does not provide a DocValue field specific to RangeField types so the RangeFieldMapper implements a CustomRangeDocValuesField for handling doc value support.

When executing a Range query over a Range field, the RangeQueryBuilder has been enhanced to accept a new relation parameter for defining the type of query as one of: WITHIN, CONTAINS, INTERSECTS. This provides support for finding all ranges that are related to a specific range in a desired way. As with other spatial queries, DISJOINT can be achieved as a MUST_NOT of an INTERSECTS query.
This commit is contained in:
Nicholas Knize 2016-09-16 08:50:56 -05:00
parent c3a95a6666
commit af1ab68b64
59 changed files with 1843 additions and 181 deletions

View File

@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
@ -208,7 +209,7 @@ public class AllFieldMapper extends MetadataFieldMapper {
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
if (!enabledState.enabled) {
return;
}

View File

@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.geo.GeoHashUtils;
import org.apache.lucene.util.LegacyNumericUtils;
@ -466,7 +467,7 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
throw new UnsupportedOperationException("Parsing is implemented in parse(), this method should NEVER be called");
}

View File

@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper;
import com.carrotsearch.hppc.ObjectArrayList;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.Query;
import org.apache.lucene.store.ByteArrayDataOutput;
import org.apache.lucene.util.BytesRef;
@ -137,7 +138,7 @@ public class BinaryFieldMapper extends FieldMapper {
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
if (!fieldType().stored() && !fieldType().hasDocValues()) {
return;
}

View File

@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.SortedNumericDocValuesField;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermRangeQuery;
import org.apache.lucene.util.BytesRef;
@ -217,7 +218,7 @@ public class BooleanFieldMapper extends FieldMapper {
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
if (fieldType().indexOptions() == IndexOptions.NONE && !fieldType().stored() && !fieldType().hasDocValues()) {
return;
}

View File

@ -20,6 +20,7 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.suggest.document.Completion50PostingsFormat;
import org.apache.lucene.search.suggest.document.CompletionAnalyzer;
@ -590,7 +591,7 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
// no-op
}

View File

@ -22,6 +22,7 @@ import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.suggest.analyzing.XAnalyzingSuggester;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchParseException;
@ -563,7 +564,7 @@ public class CompletionFieldMapper2x extends FieldMapper {
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
}
@Override

View File

@ -19,13 +19,13 @@
package org.elasticsearch.index.mapper;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.document.SortedNumericDocValuesField;
import org.apache.lucene.document.LongPoint;
import org.apache.lucene.index.PointValues;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
@ -314,8 +314,7 @@ public class DateFieldMapper extends FieldMapper {
@Override
public Relation isFieldWithinQuery(IndexReader reader,
Object from, Object to,
boolean includeLower, boolean includeUpper,
Object from, Object to, boolean includeLower, boolean includeUpper,
DateTimeZone timeZone, DateMathParser dateParser, QueryRewriteContext context) throws IOException {
if (dateParser == null) {
dateParser = this.dateMathParser;
@ -425,7 +424,7 @@ public class DateFieldMapper extends FieldMapper {
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
String dateAsString;
if (context.externalValueSet()) {
Object dateAsObject = context.externalValue();

View File

@ -24,6 +24,7 @@ import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.Version;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.lucene.Lucene;
@ -281,15 +282,15 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
* mappings were not modified.
*/
public Mapper parse(ParseContext context) throws IOException {
final List<Field> fields = new ArrayList<>(2);
final List<IndexableField> fields = new ArrayList<>(2);
try {
parseCreateField(context, fields);
for (Field field : fields) {
for (IndexableField field : fields) {
if (!customBoost()
// don't set boosts eg. on dv fields
&& field.fieldType().indexOptions() != IndexOptions.NONE
&& indexCreatedVersion.before(Version.V_5_0_0_alpha1)) {
field.setBoost(fieldType().boost());
((Field)(field)).setBoost(fieldType().boost());
}
context.doc().add(field);
}
@ -303,7 +304,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
/**
* Parse the field value and populate <code>fields</code>.
*/
protected abstract void parseCreateField(ParseContext context, List<Field> fields) throws IOException;
protected abstract void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException;
/**
* Derived classes can override it to specify that boost value is set by derived classes.

View File

@ -248,7 +248,7 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
if (fieldType().isEnabled() == false) {
return;
}

View File

@ -18,6 +18,7 @@
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.index.IndexableField;
import org.locationtech.spatial4j.shape.Point;
import org.locationtech.spatial4j.shape.Shape;
import org.locationtech.spatial4j.shape.jts.JtsGeometry;
@ -462,7 +463,7 @@ public class GeoShapeFieldMapper extends FieldMapper {
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
}
@Override

View File

@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.Term;
import org.apache.lucene.queries.TermsQuery;
import org.apache.lucene.search.BooleanClause;
@ -135,7 +136,7 @@ public class IdFieldMapper extends MetadataFieldMapper {
public void postParse(ParseContext context) throws IOException {}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {}
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {}
@Override
protected String contentType() {

View File

@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
@ -175,7 +176,7 @@ public class IndexFieldMapper extends MetadataFieldMapper {
public void postParse(ParseContext context) throws IOException {}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {}
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {}
@Override
protected String contentType() {

View File

@ -25,6 +25,7 @@ import org.apache.lucene.document.SortedSetDocValuesField;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.PointValues;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
@ -285,7 +286,7 @@ public class IpFieldMapper extends FieldMapper {
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
Object addressAsObject;
if (context.externalValueSet()) {
addressAsObject = context.externalValue();

View File

@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.SortedSetDocValuesField;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
@ -235,7 +236,7 @@ public final class KeywordFieldMapper extends FieldMapper {
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
final String value;
if (context.externalValueSet()) {
value = context.externalValue().toString();

View File

@ -23,6 +23,7 @@ import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.Terms;
import org.apache.lucene.search.LegacyNumericRangeQuery;
import org.apache.lucene.search.Query;
@ -199,7 +200,7 @@ public class LegacyByteFieldMapper extends LegacyNumberFieldMapper {
}
@Override
protected void innerParseCreateField(ParseContext context, List<Field> fields) throws IOException {
protected void innerParseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
byte value;
float boost = fieldType().boost();
if (context.externalValueSet()) {

View File

@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.Terms;
import org.apache.lucene.search.LegacyNumericRangeQuery;
import org.apache.lucene.search.Query;
@ -426,7 +427,7 @@ public class LegacyDateFieldMapper extends LegacyNumberFieldMapper {
}
@Override
protected void innerParseCreateField(ParseContext context, List<Field> fields) throws IOException {
protected void innerParseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
String dateAsString = null;
float boost = fieldType().boost();
if (context.externalValueSet()) {

View File

@ -24,6 +24,7 @@ import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.Terms;
import org.apache.lucene.search.LegacyNumericRangeQuery;
import org.apache.lucene.search.Query;
@ -199,7 +200,7 @@ public class LegacyDoubleFieldMapper extends LegacyNumberFieldMapper {
}
@Override
protected void innerParseCreateField(ParseContext context, List<Field> fields) throws IOException {
protected void innerParseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
double value;
float boost = fieldType().boost();
if (context.externalValueSet()) {

View File

@ -24,6 +24,7 @@ import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.Terms;
import org.apache.lucene.search.LegacyNumericRangeQuery;
import org.apache.lucene.search.Query;
@ -194,7 +195,7 @@ public class LegacyFloatFieldMapper extends LegacyNumberFieldMapper {
}
@Override
protected void innerParseCreateField(ParseContext context, List<Field> fields) throws IOException {
protected void innerParseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
float value;
float boost = fieldType().boost();
if (context.externalValueSet()) {

View File

@ -24,6 +24,7 @@ import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.Terms;
import org.apache.lucene.search.LegacyNumericRangeQuery;
import org.apache.lucene.search.Query;
@ -198,7 +199,7 @@ public class LegacyIntegerFieldMapper extends LegacyNumberFieldMapper {
}
@Override
protected void innerParseCreateField(ParseContext context, List<Field> fields) throws IOException {
protected void innerParseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
int value;
float boost = fieldType().boost();
if (context.externalValueSet()) {
@ -270,7 +271,7 @@ public class LegacyIntegerFieldMapper extends LegacyNumberFieldMapper {
addIntegerFields(context, fields, value, boost);
}
protected void addIntegerFields(ParseContext context, List<Field> fields, int value, float boost) {
protected void addIntegerFields(ParseContext context, List<IndexableField> fields, int value, float boost) {
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
CustomIntegerNumericField field = new CustomIntegerNumericField(value, fieldType());
if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {

View File

@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.Terms;
import org.apache.lucene.search.LegacyNumericRangeQuery;
import org.apache.lucene.search.Query;
@ -279,7 +280,7 @@ public class LegacyIpFieldMapper extends LegacyNumberFieldMapper {
}
@Override
protected void innerParseCreateField(ParseContext context, List<Field> fields) throws IOException {
protected void innerParseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
String ipAsString;
if (context.externalValueSet()) {
ipAsString = (String) context.externalValue();

View File

@ -24,6 +24,7 @@ import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.Terms;
import org.apache.lucene.search.LegacyNumericRangeQuery;
import org.apache.lucene.search.Query;
@ -189,7 +190,7 @@ public class LegacyLongFieldMapper extends LegacyNumberFieldMapper {
}
@Override
protected void innerParseCreateField(ParseContext context, List<Field> fields) throws IOException {
protected void innerParseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
long value;
float boost = fieldType().boost();
if (context.externalValueSet()) {

View File

@ -27,6 +27,7 @@ import org.apache.lucene.analysis.LegacyNumericTokenStream;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.SortedNumericDocValuesField;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Nullable;
@ -170,7 +171,7 @@ public abstract class LegacyNumberFieldMapper extends FieldMapper {
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
RuntimeException e = null;
try {
innerParseCreateField(context, fields);
@ -185,9 +186,9 @@ public abstract class LegacyNumberFieldMapper extends FieldMapper {
}
}
protected abstract void innerParseCreateField(ParseContext context, List<Field> fields) throws IOException;
protected abstract void innerParseCreateField(ParseContext context, List<IndexableField> fields) throws IOException;
protected final void addDocValue(ParseContext context, List<Field> fields, long value) {
protected final void addDocValue(ParseContext context, List<IndexableField> fields, long value) {
fields.add(new SortedNumericDocValuesField(fieldType().name(), value));
}

View File

@ -24,6 +24,7 @@ import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.Terms;
import org.apache.lucene.search.LegacyNumericRangeQuery;
import org.apache.lucene.search.Query;
@ -203,7 +204,7 @@ public class LegacyShortFieldMapper extends LegacyNumberFieldMapper {
}
@Override
protected void innerParseCreateField(ParseContext context, List<Field> fields) throws IOException {
protected void innerParseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
short value;
float boost = fieldType().boost();
if (context.externalValueSet()) {

View File

@ -23,6 +23,7 @@ import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.Version;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.settings.Settings;
@ -122,7 +123,7 @@ public class LegacyTokenCountFieldMapper extends LegacyIntegerFieldMapper {
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
ValueAndBoost valueAndBoost = StringFieldMapper.parseCreateFieldForString(context, null /* Out null value is an int so we convert*/, fieldType().boost());
if (valueAndBoost.value() == null && fieldType().nullValue() == null) {
return;

View File

@ -29,6 +29,7 @@ import org.apache.lucene.document.SortedNumericDocValuesField;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.PointValues;
import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
@ -895,7 +896,7 @@ public class NumberFieldMapper extends FieldMapper {
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
final boolean includeInAll = context.includeInAll(this.includeInAll, this);
XContentParser parser = context.parser();

View File

@ -22,6 +22,7 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.document.SortedDocValuesField;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
@ -227,7 +228,7 @@ public class ParentFieldMapper extends MetadataFieldMapper {
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
boolean parent = context.docMapper().isParent(context.sourceToParse().type());
if (parent) {
fields.add(new SortedDocValuesField(parentJoinField.fieldType().name(), new BytesRef(context.sourceToParse().id())));

View File

@ -0,0 +1,772 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANYDa
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.DoubleRangeField;
import org.apache.lucene.document.FloatRangeField;
import org.apache.lucene.document.IntRangeField;
import org.apache.lucene.document.LongRangeField;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.geo.ShapeRelation;
import org.elasticsearch.common.joda.DateMathParser;
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.LocaleUtils;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType;
import org.elasticsearch.index.query.QueryShardContext;
import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import static org.elasticsearch.index.mapper.TypeParsers.parseDateTimeFormatter;
import static org.elasticsearch.index.query.RangeQueryBuilder.GT_FIELD;
import static org.elasticsearch.index.query.RangeQueryBuilder.GTE_FIELD;
import static org.elasticsearch.index.query.RangeQueryBuilder.LT_FIELD;
import static org.elasticsearch.index.query.RangeQueryBuilder.LTE_FIELD;
/** A {@link FieldMapper} for indexing numeric and date ranges, and creating queries */
public class RangeFieldMapper extends FieldMapper {
public static final boolean DEFAULT_INCLUDE_UPPER = true;
public static final boolean DEFAULT_INCLUDE_LOWER = true;
public static class Defaults {
public static final Explicit<Boolean> COERCE = new Explicit<>(true, false);
}
// this is private since it has a different default
static final Setting<Boolean> COERCE_SETTING =
Setting.boolSetting("index.mapping.coerce", true, Setting.Property.IndexScope);
public static class Builder extends FieldMapper.Builder<Builder, RangeFieldMapper> {
private Boolean coerce;
private Locale locale;
public Builder(String name, RangeType type) {
super(name, new RangeFieldType(type), new RangeFieldType(type));
builder = this;
locale = Locale.ROOT;
}
@Override
public RangeFieldType fieldType() {
return (RangeFieldType)fieldType;
}
@Override
public Builder docValues(boolean docValues) {
if (docValues == true) {
throw new IllegalArgumentException("field [" + name + "] does not currently support " + TypeParsers.DOC_VALUES);
}
return super.docValues(docValues);
}
public Builder coerce(boolean coerce) {
this.coerce = coerce;
return builder;
}
protected Explicit<Boolean> coerce(BuilderContext context) {
if (coerce != null) {
return new Explicit<>(coerce, true);
}
if (context.indexSettings() != null) {
return new Explicit<>(COERCE_SETTING.get(context.indexSettings()), false);
}
return Defaults.COERCE;
}
public Builder dateTimeFormatter(FormatDateTimeFormatter dateTimeFormatter) {
fieldType().setDateTimeFormatter(dateTimeFormatter);
return this;
}
@Override
public Builder nullValue(Object nullValue) {
throw new IllegalArgumentException("Field [" + name() + "] does not support null value.");
}
public void locale(Locale locale) {
this.locale = locale;
}
@Override
protected void setupFieldType(BuilderContext context) {
super.setupFieldType(context);
FormatDateTimeFormatter dateTimeFormatter = fieldType().dateTimeFormatter;
if (fieldType().rangeType == RangeType.DATE) {
if (!locale.equals(dateTimeFormatter.locale())) {
fieldType().setDateTimeFormatter(new FormatDateTimeFormatter(dateTimeFormatter.format(),
dateTimeFormatter.parser(), dateTimeFormatter.printer(), locale));
}
} else if (dateTimeFormatter != null) {
throw new IllegalArgumentException("field [" + name() + "] of type [" + fieldType().rangeType
+ "] should not define a dateTimeFormatter unless it is a " + RangeType.DATE + " type");
}
}
@Override
public RangeFieldMapper build(BuilderContext context) {
setupFieldType(context);
return new RangeFieldMapper(name, fieldType, defaultFieldType, coerce(context), includeInAll,
context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
}
}
public static class TypeParser implements Mapper.TypeParser {
final RangeType type;
public TypeParser(RangeType type) {
this.type = type;
}
@Override
public Mapper.Builder<?,?> parse(String name, Map<String, Object> node,
ParserContext parserContext) throws MapperParsingException {
Builder builder = new Builder(name, type);
TypeParsers.parseField(builder, name, node, parserContext);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = entry.getKey();
Object propNode = entry.getValue();
if (propName.equals("null_value")) {
throw new MapperParsingException("Property [null_value] is not supported for [" + this.type.name
+ "] field types.");
} else if (propName.equals("coerce")) {
builder.coerce(TypeParsers.nodeBooleanValue("coerce", propNode, parserContext));
iterator.remove();
} else if (propName.equals("locale")) {
builder.locale(LocaleUtils.parse(propNode.toString()));
iterator.remove();
} else if (propName.equals("format")) {
builder.dateTimeFormatter(parseDateTimeFormatter(propNode));
iterator.remove();
} else if (TypeParsers.parseMultiField(builder, name, parserContext, propName, propNode)) {
iterator.remove();
}
}
return builder;
}
}
public static final class RangeFieldType extends MappedFieldType {
protected RangeType rangeType;
protected FormatDateTimeFormatter dateTimeFormatter;
protected DateMathParser dateMathParser;
public RangeFieldType(RangeType type) {
super();
this.rangeType = Objects.requireNonNull(type);
setTokenized(false);
setHasDocValues(false);
setOmitNorms(true);
if (rangeType == RangeType.DATE) {
setDateTimeFormatter(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER);
}
}
public RangeFieldType(RangeFieldType other) {
super(other);
this.rangeType = other.rangeType;
if (other.dateTimeFormatter() != null) {
setDateTimeFormatter(other.dateTimeFormatter);
}
}
@Override
public MappedFieldType clone() {
return new RangeFieldType(this);
}
@Override
public boolean equals(Object o) {
if (!super.equals(o)) return false;
RangeFieldType that = (RangeFieldType) o;
return Objects.equals(rangeType, that.rangeType) &&
(rangeType == RangeType.DATE) ?
Objects.equals(dateTimeFormatter.format(), that.dateTimeFormatter.format())
&& Objects.equals(dateTimeFormatter.locale(), that.dateTimeFormatter.locale())
: dateTimeFormatter == null && that.dateTimeFormatter == null;
}
@Override
public int hashCode() {
return (dateTimeFormatter == null) ? Objects.hash(super.hashCode(), rangeType)
: Objects.hash(super.hashCode(), rangeType, dateTimeFormatter.format(), dateTimeFormatter.locale());
}
@Override
public String typeName() {
return rangeType.name;
}
@Override
public void checkCompatibility(MappedFieldType fieldType, List<String> conflicts, boolean strict) {
super.checkCompatibility(fieldType, conflicts, strict);
if (strict) {
RangeFieldType other = (RangeFieldType)fieldType;
if (this.rangeType != other.rangeType) {
conflicts.add("mapper [" + name()
+ "] is attempting to update from type [" + rangeType.name
+ "] to incompatible type [" + other.rangeType.name + "].");
}
if (this.rangeType == RangeType.DATE) {
if (Objects.equals(dateTimeFormatter().format(), other.dateTimeFormatter().format()) == false) {
conflicts.add("mapper [" + name()
+ "] is used by multiple types. Set update_all_types to true to update [format] across all types.");
}
if (Objects.equals(dateTimeFormatter().locale(), other.dateTimeFormatter().locale()) == false) {
conflicts.add("mapper [" + name()
+ "] is used by multiple types. Set update_all_types to true to update [locale] across all types.");
}
}
}
}
public FormatDateTimeFormatter dateTimeFormatter() {
return dateTimeFormatter;
}
public void setDateTimeFormatter(FormatDateTimeFormatter dateTimeFormatter) {
checkIfFrozen();
this.dateTimeFormatter = dateTimeFormatter;
this.dateMathParser = new DateMathParser(dateTimeFormatter);
}
protected DateMathParser dateMathParser() {
return dateMathParser;
}
@Override
public Query termQuery(Object value, QueryShardContext context) {
Query query = rangeQuery(value, value, true, true, context);
if (boost() != 1f) {
query = new BoostQuery(query, boost());
}
return query;
}
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper,
QueryShardContext context) {
return rangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, ShapeRelation.INTERSECTS, context);
}
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper,
ShapeRelation relation, QueryShardContext context) {
failIfNotIndexed();
return rangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, relation, null, dateMathParser, context);
}
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper,
ShapeRelation relation, DateTimeZone timeZone, DateMathParser parser, QueryShardContext context) {
return rangeType.rangeQuery(name(), lowerTerm, upperTerm, includeLower, includeUpper, relation, timeZone, parser, context);
}
}
private Boolean includeInAll;
private Explicit<Boolean> coerce;
private RangeFieldMapper(
String simpleName,
MappedFieldType fieldType,
MappedFieldType defaultFieldType,
Explicit<Boolean> coerce,
Boolean includeInAll,
Settings indexSettings,
MultiFields multiFields,
CopyTo copyTo) {
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo);
this.coerce = coerce;
this.includeInAll = includeInAll;
}
@Override
public RangeFieldType fieldType() {
return (RangeFieldType) super.fieldType();
}
@Override
protected String contentType() {
return fieldType.typeName();
}
@Override
protected RangeFieldMapper clone() {
return (RangeFieldMapper) super.clone();
}
@Override
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
final boolean includeInAll = context.includeInAll(this.includeInAll, this);
Range range;
if (context.externalValueSet()) {
range = context.parseExternalValue(Range.class);
} else {
XContentParser parser = context.parser();
if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
RangeFieldType fieldType = fieldType();
RangeType rangeType = fieldType.rangeType;
String fieldName = null;
Number from = rangeType.minValue();
Number to = rangeType.maxValue();
boolean includeFrom = DEFAULT_INCLUDE_LOWER;
boolean includeTo = DEFAULT_INCLUDE_UPPER;
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
} else {
if (fieldName.equals(GT_FIELD.getPreferredName())) {
includeFrom = false;
if (parser.currentToken() != XContentParser.Token.VALUE_NULL) {
from = rangeType.parseFrom(fieldType, parser, coerce.value(), includeFrom);
}
} else if (fieldName.equals(GTE_FIELD.getPreferredName())) {
includeFrom = true;
if (parser.currentToken() != XContentParser.Token.VALUE_NULL) {
from = rangeType.parseFrom(fieldType, parser, coerce.value(), includeFrom);
}
} else if (fieldName.equals(LT_FIELD.getPreferredName())) {
includeTo = false;
if (parser.currentToken() != XContentParser.Token.VALUE_NULL) {
to = rangeType.parseTo(fieldType, parser, coerce.value(), includeTo);
}
} else if (fieldName.equals(LTE_FIELD.getPreferredName())) {
includeTo = true;
if (parser.currentToken() != XContentParser.Token.VALUE_NULL) {
to = rangeType.parseTo(fieldType, parser, coerce.value(), includeTo);
}
} else {
throw new MapperParsingException("error parsing field [" +
name() + "], with unknown parameter [" + fieldName + "]");
}
}
}
range = new Range(rangeType, from, to, includeFrom, includeTo);
} else {
throw new MapperParsingException("error parsing field ["
+ name() + "], expected an object but got " + parser.currentName());
}
}
if (includeInAll) {
context.allEntries().addText(fieldType.name(), range.toString(), fieldType.boost());
}
boolean indexed = fieldType.indexOptions() != IndexOptions.NONE;
boolean docValued = fieldType.hasDocValues();
boolean stored = fieldType.stored();
fields.addAll(fieldType().rangeType.createFields(name(), range, indexed, docValued, stored));
}
@Override
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
super.doMerge(mergeWith, updateAllTypes);
RangeFieldMapper other = (RangeFieldMapper) mergeWith;
this.includeInAll = other.includeInAll;
if (other.coerce.explicit()) {
this.coerce = other.coerce;
}
}
@Override
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
super.doXContentBody(builder, includeDefaults, params);
if (includeDefaults || (fieldType().dateTimeFormatter() != null
&& fieldType().dateTimeFormatter().format().equals(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.format()) == false)) {
builder.field("format", fieldType().dateTimeFormatter().format());
}
if (includeDefaults || (fieldType().dateTimeFormatter() != null
&& fieldType().dateTimeFormatter().locale() != Locale.ROOT)) {
builder.field("locale", fieldType().dateTimeFormatter().locale());
}
if (includeDefaults || coerce.explicit()) {
builder.field("coerce", coerce.value());
}
if (includeInAll != null) {
builder.field("include_in_all", includeInAll);
} else if (includeDefaults) {
builder.field("include_in_all", false);
}
}
/** Enum defining the type of range */
public enum RangeType {
DATE("date_range", NumberType.LONG) {
@Override
public Field getRangeField(String name, Range r) {
return new LongRangeField(name, new long[] {r.from.longValue()}, new long[] {r.to.longValue()});
}
private Number parse(DateMathParser dateMathParser, String dateStr) {
return dateMathParser.parse(dateStr, () -> {throw new IllegalArgumentException("now is not used at indexing time");});
}
@Override
public Number parseFrom(RangeFieldType fieldType, XContentParser parser, boolean coerce, boolean included)
throws IOException {
Number value = parse(fieldType.dateMathParser, parser.text());
return included ? value : nextUp(value);
}
@Override
public Number parseTo(RangeFieldType fieldType, XContentParser parser, boolean coerce, boolean included)
throws IOException{
Number value = parse(fieldType.dateMathParser, parser.text());
return included ? value : nextDown(value);
}
@Override
public Long minValue() {
return Long.MIN_VALUE;
}
@Override
public Long maxValue() {
return Long.MAX_VALUE;
}
@Override
public Number nextUp(Number value) {
return LONG.nextUp(value);
}
@Override
public Number nextDown(Number value) {
return LONG.nextDown(value);
}
@Override
public byte[] getBytes(Range r) {
return LONG.getBytes(r);
}
@Override
public Query rangeQuery(String field, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper,
ShapeRelation relation, @Nullable DateTimeZone timeZone, @Nullable DateMathParser parser,
QueryShardContext context) {
DateTimeZone zone = (timeZone == null) ? DateTimeZone.UTC : timeZone;
DateMathParser dateMathParser = (parser == null) ?
new DateMathParser(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER) : parser;
Long low = lowerTerm == null ? Long.MIN_VALUE :
dateMathParser.parse(lowerTerm instanceof BytesRef ? ((BytesRef) lowerTerm).utf8ToString() : lowerTerm.toString(),
context::nowInMillis, false, zone);
Long high = upperTerm == null ? Long.MAX_VALUE :
dateMathParser.parse(upperTerm instanceof BytesRef ? ((BytesRef) upperTerm).utf8ToString() : upperTerm.toString(),
context::nowInMillis, false, zone);
return super.rangeQuery(field, low, high, includeLower, includeUpper, relation, zone, dateMathParser, context);
}
@Override
public Query withinQuery(String field, Number from, Number to, boolean includeLower, boolean includeUpper) {
return LONG.withinQuery(field, from, to, includeLower, includeUpper);
}
@Override
public Query containsQuery(String field, Number from, Number to, boolean includeLower, boolean includeUpper) {
return LONG.containsQuery(field, from, to, includeLower, includeUpper);
}
@Override
public Query intersectsQuery(String field, Number from, Number to, boolean includeLower, boolean includeUpper) {
return LONG.intersectsQuery(field, from, to, includeLower, includeUpper);
}
},
// todo support half_float
FLOAT("float_range", NumberType.FLOAT) {
@Override
public Float minValue() {
return Float.NEGATIVE_INFINITY;
}
@Override
public Float maxValue() {
return Float.POSITIVE_INFINITY;
}
@Override
public Float nextUp(Number value) {
return Math.nextUp(value.floatValue());
}
@Override
public Float nextDown(Number value) {
return Math.nextDown(value.floatValue());
}
@Override
public Field getRangeField(String name, Range r) {
return new FloatRangeField(name, new float[] {r.from.floatValue()}, new float[] {r.to.floatValue()});
}
@Override
public byte[] getBytes(Range r) {
byte[] b = new byte[Float.BYTES*2];
NumericUtils.intToSortableBytes(NumericUtils.floatToSortableInt(r.from.floatValue()), b, 0);
NumericUtils.intToSortableBytes(NumericUtils.floatToSortableInt(r.to.floatValue()), b, Float.BYTES);
return b;
}
@Override
public Query withinQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) {
return FloatRangeField.newWithinQuery(field,
new float[] {includeFrom ? (Float)from : Math.nextUp((Float)from)},
new float[] {includeTo ? (Float)to : Math.nextDown((Float)to)});
}
@Override
public Query containsQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) {
return FloatRangeField.newContainsQuery(field,
new float[] {includeFrom ? (Float)from : Math.nextUp((Float)from)},
new float[] {includeTo ? (Float)to : Math.nextDown((Float)to)});
}
@Override
public Query intersectsQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) {
return FloatRangeField.newIntersectsQuery(field,
new float[] {includeFrom ? (Float)from : Math.nextUp((Float)from)},
new float[] {includeTo ? (Float)to : Math.nextDown((Float)to)});
}
},
DOUBLE("double_range", NumberType.DOUBLE) {
@Override
public Double minValue() {
return Double.NEGATIVE_INFINITY;
}
@Override
public Double maxValue() {
return Double.POSITIVE_INFINITY;
}
@Override
public Double nextUp(Number value) {
return Math.nextUp(value.doubleValue());
}
@Override
public Double nextDown(Number value) {
return Math.nextDown(value.doubleValue());
}
@Override
public Field getRangeField(String name, Range r) {
return new DoubleRangeField(name, new double[] {r.from.doubleValue()}, new double[] {r.to.doubleValue()});
}
@Override
public byte[] getBytes(Range r) {
byte[] b = new byte[Double.BYTES*2];
NumericUtils.longToSortableBytes(NumericUtils.doubleToSortableLong(r.from.doubleValue()), b, 0);
NumericUtils.longToSortableBytes(NumericUtils.doubleToSortableLong(r.to.doubleValue()), b, Double.BYTES);
return b;
}
@Override
public Query withinQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) {
return DoubleRangeField.newWithinQuery(field,
new double[] {includeFrom ? (Double)from : Math.nextUp((Double)from)},
new double[] {includeTo ? (Double)to : Math.nextDown((Double)to)});
}
@Override
public Query containsQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) {
return DoubleRangeField.newContainsQuery(field,
new double[] {includeFrom ? (Double)from : Math.nextUp((Double)from)},
new double[] {includeTo ? (Double)to : Math.nextDown((Double)to)});
}
@Override
public Query intersectsQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) {
return DoubleRangeField.newIntersectsQuery(field,
new double[] {includeFrom ? (Double)from : Math.nextUp((Double)from)},
new double[] {includeTo ? (Double)to : Math.nextDown((Double)to)});
}
},
// todo add BYTE support
// todo add SHORT support
INTEGER("integer_range", NumberType.INTEGER) {
@Override
public Integer minValue() {
return Integer.MIN_VALUE;
}
@Override
public Integer maxValue() {
return Integer.MAX_VALUE;
}
@Override
public Integer nextUp(Number value) {
return value.intValue() + 1;
}
@Override
public Integer nextDown(Number value) {
return value.intValue() - 1;
}
@Override
public Field getRangeField(String name, Range r) {
return new IntRangeField(name, new int[] {r.from.intValue()}, new int[] {r.to.intValue()});
}
@Override
public byte[] getBytes(Range r) {
byte[] b = new byte[Integer.BYTES*2];
NumericUtils.intToSortableBytes(r.from.intValue(), b, 0);
NumericUtils.intToSortableBytes(r.to.intValue(), b, Integer.BYTES);
return b;
}
@Override
public Query withinQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) {
return IntRangeField.newWithinQuery(field, new int[] {(Integer)from + (includeFrom ? 0 : 1)},
new int[] {(Integer)to - (includeTo ? 0 : 1)});
}
@Override
public Query containsQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) {
return IntRangeField.newContainsQuery(field, new int[] {(Integer)from + (includeFrom ? 0 : 1)},
new int[] {(Integer)to - (includeTo ? 0 : 1)});
}
@Override
public Query intersectsQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) {
return IntRangeField.newIntersectsQuery(field, new int[] {(Integer)from + (includeFrom ? 0 : 1)},
new int[] {(Integer)to - (includeTo ? 0 : 1)});
}
},
LONG("long_range", NumberType.LONG) {
@Override
public Long minValue() {
return Long.MIN_VALUE;
}
@Override
public Long maxValue() {
return Long.MAX_VALUE;
}
@Override
public Long nextUp(Number value) {
return value.longValue() + 1;
}
@Override
public Long nextDown(Number value) {
return value.longValue() - 1;
}
@Override
public Field getRangeField(String name, Range r) {
return new LongRangeField(name, new long[] {r.from.longValue()}, new long[] {r.to.longValue()});
}
@Override
public byte[] getBytes(Range r) {
byte[] b = new byte[Long.BYTES*2];
long from = r.from == null ? Long.MIN_VALUE : r.from.longValue();
long to = r.to == null ? Long.MAX_VALUE : r.to.longValue();
NumericUtils.longToSortableBytes(from, b, 0);
NumericUtils.longToSortableBytes(to, b, Long.BYTES);
return b;
}
@Override
public Query withinQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) {
return LongRangeField.newWithinQuery(field, new long[] {(Long)from + (includeFrom ? 0 : 1)},
new long[] {(Long)to - (includeTo ? 0 : 1)});
}
@Override
public Query containsQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) {
return LongRangeField.newContainsQuery(field, new long[] {(Long)from + (includeFrom ? 0 : 1)},
new long[] {(Long)to - (includeTo ? 0 : 1)});
}
@Override
public Query intersectsQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) {
return LongRangeField.newIntersectsQuery(field, new long[] {(Long)from + (includeFrom ? 0 : 1)},
new long[] {(Long)to - (includeTo ? 0 : 1)});
}
};
RangeType(String name, NumberType type) {
this.name = name;
this.numberType = type;
}
/** Get the associated type name. */
public final String typeName() {
return name;
}
protected abstract byte[] getBytes(Range range);
public abstract Field getRangeField(String name, Range range);
public List<IndexableField> createFields(String name, Range range, boolean indexed, boolean docValued, boolean stored) {
assert range != null : "range cannot be null when creating fields";
List<IndexableField> fields = new ArrayList<>();
if (indexed) {
fields.add(getRangeField(name, range));
}
// todo add docValues ranges once aggregations are supported
if (stored) {
fields.add(new StoredField(name, range.toString()));
}
return fields;
}
/** parses from value. rounds according to included flag */
public Number parseFrom(RangeFieldType fieldType, XContentParser parser, boolean coerce, boolean included) throws IOException {
Number value = numberType.parse(parser, coerce);
return included ? value : nextUp(value);
}
/** parses to value. rounds according to included flag */
public Number parseTo(RangeFieldType fieldType, XContentParser parser, boolean coerce, boolean included) throws IOException {
Number value = numberType.parse(parser, coerce);
return included ? value : nextDown(value);
}
public abstract Number minValue();
public abstract Number maxValue();
public abstract Number nextUp(Number value);
public abstract Number nextDown(Number value);
public abstract Query withinQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo);
public abstract Query containsQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo);
public abstract Query intersectsQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo);
public Query rangeQuery(String field, Object from, Object to, boolean includeFrom, boolean includeTo,
ShapeRelation relation, @Nullable DateTimeZone timeZone, @Nullable DateMathParser dateMathParser,
QueryShardContext context) {
Number lower = from == null ? minValue() : numberType.parse(from);
Number upper = to == null ? maxValue() : numberType.parse(to);
if (relation == ShapeRelation.WITHIN) {
return withinQuery(field, lower, upper, includeFrom, includeTo);
} else if (relation == ShapeRelation.CONTAINS) {
return containsQuery(field, lower, upper, includeFrom, includeTo);
}
return intersectsQuery(field, lower, upper, includeFrom, includeTo);
}
public final String name;
private final NumberType numberType;
}
/** Class defining a range */
public static class Range {
RangeType type;
private Number from;
private Number to;
private boolean includeFrom;
private boolean includeTo;
public Range(RangeType type, Number from, Number to, boolean includeFrom, boolean includeTo) {
this.type = type;
this.from = from;
this.to = to;
this.includeFrom = includeFrom;
this.includeTo = includeTo;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(includeFrom ? '[' : '(');
sb.append(includeFrom || from.equals(type.minValue()) ? from : type.nextDown(from));
sb.append(':');
sb.append(includeTo || to.equals(type.maxValue()) ? to : type.nextUp(to));
sb.append(includeTo ? ']' : ')');
return sb.toString();
}
}
}

View File

@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -154,7 +155,7 @@ public class RoutingFieldMapper extends MetadataFieldMapper {
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
String routing = context.sourceToParse().routing();
if (routing != null) {
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {

View File

@ -19,8 +19,8 @@
package org.elasticsearch.index.mapper;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
@ -364,7 +364,7 @@ public class ScaledFloatFieldMapper extends FieldMapper {
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
final boolean includeInAll = context.includeInAll(this.includeInAll, this);
XContentParser parser = context.parser();

View File

@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
@ -227,7 +228,7 @@ public class SourceFieldMapper extends MetadataFieldMapper {
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
if (!enabled) {
return;
}

View File

@ -23,6 +23,7 @@ import org.apache.logging.log4j.Logger;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.SortedSetDocValuesField;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
@ -527,7 +528,7 @@ public class StringFieldMapper extends FieldMapper {
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
ValueAndBoost valueAndBoost = parseCreateFieldForString(context, fieldType().nullValueAsString(), fieldType().boost());
if (valueAndBoost.value() == null) {
return;

View File

@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.Version;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
@ -197,7 +198,7 @@ public class TTLFieldMapper extends MetadataFieldMapper {
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException, AlreadyExpiredException {
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException, AlreadyExpiredException {
if (enabledState.enabled) {
long ttl = context.sourceToParse().ttl();
if (ttl <= 0 && defaultTTL > 0) { // no ttl provided so we use the default value

View File

@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.Query;
import org.elasticsearch.Version;
import org.elasticsearch.common.settings.Settings;
@ -371,7 +372,7 @@ public class TextFieldMapper extends FieldMapper {
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
final String value;
if (context.externalValueSet()) {
value = context.externalValue().toString();

View File

@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.Version;
import org.elasticsearch.action.TimestampParsingException;
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
@ -237,7 +238,7 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
if (enabledState.enabled) {
long timestamp = context.sourceToParse().timestamp();
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {

View File

@ -24,6 +24,7 @@ import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.Version;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -114,7 +115,7 @@ public class TokenCountFieldMapper extends FieldMapper {
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
final String value;
if (context.externalValueSet()) {
value = context.externalValue().toString();

View File

@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.SortedSetDocValuesField;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
@ -282,7 +283,7 @@ public class TypeFieldMapper extends MetadataFieldMapper {
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
if (fieldType().indexOptions() == IndexOptions.NONE && !fieldType().stored()) {
return;
}

View File

@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.document.BinaryDocValuesField;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
@ -125,7 +126,7 @@ public class UidFieldMapper extends MetadataFieldMapper {
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
Field uid = new Field(NAME, Uid.createUid(context.sourceToParse().type(), context.sourceToParse().id()), Defaults.FIELD_TYPE);
fields.add(uid);
if (fieldType().hasDocValues()) {

View File

@ -23,6 +23,7 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -101,7 +102,7 @@ public class VersionFieldMapper extends MetadataFieldMapper {
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
// see InternalEngine.updateVersion to see where the real version value is set
final Field version = new NumericDocValuesField(NAME, -1L);
context.version(version);

View File

@ -23,6 +23,7 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NumericDocValues;
@ -155,7 +156,7 @@ public class SeqNoFieldMapper extends MetadataFieldMapper {
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
// see InternalEngine.updateVersion to see where the real version value is set
final Field seqNo = new NumericDocValuesField(NAME, SequenceNumbersService.UNASSIGNED_SEQ_NO);
context.seqNo(seqNo);

View File

@ -23,9 +23,11 @@ import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermRangeQuery;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.geo.ShapeRelation;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.joda.DateMathParser;
@ -38,6 +40,7 @@ import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.mapper.LegacyDateFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.RangeFieldMapper;
import org.joda.time.DateTimeZone;
import java.io.IOException;
@ -55,17 +58,18 @@ public class RangeQueryBuilder extends AbstractQueryBuilder<RangeQueryBuilder> i
private static final ParseField FIELDDATA_FIELD = new ParseField("fielddata").withAllDeprecated("[no replacement]");
private static final ParseField NAME_FIELD = new ParseField("_name")
.withAllDeprecated("query name is not supported in short version of range query");
private static final ParseField LTE_FIELD = new ParseField("lte", "le");
private static final ParseField GTE_FIELD = new ParseField("gte", "ge");
private static final ParseField FROM_FIELD = new ParseField("from");
private static final ParseField TO_FIELD = new ParseField("to");
.withAllDeprecated("query name is not supported in short version of range query");
public static final ParseField LTE_FIELD = new ParseField("lte", "le");
public static final ParseField GTE_FIELD = new ParseField("gte", "ge");
public static final ParseField FROM_FIELD = new ParseField("from");
public static final ParseField TO_FIELD = new ParseField("to");
private static final ParseField INCLUDE_LOWER_FIELD = new ParseField("include_lower");
private static final ParseField INCLUDE_UPPER_FIELD = new ParseField("include_upper");
private static final ParseField GT_FIELD = new ParseField("gt");
private static final ParseField LT_FIELD = new ParseField("lt");
public static final ParseField GT_FIELD = new ParseField("gt");
public static final ParseField LT_FIELD = new ParseField("lt");
private static final ParseField TIME_ZONE_FIELD = new ParseField("time_zone");
private static final ParseField FORMAT_FIELD = new ParseField("format");
private static final ParseField RELATION_FIELD = new ParseField("relation");
private final String fieldName;
@ -81,6 +85,8 @@ public class RangeQueryBuilder extends AbstractQueryBuilder<RangeQueryBuilder> i
private FormatDateTimeFormatter format;
private ShapeRelation relation;
/**
* A Query that matches documents within an range of terms.
*
@ -108,6 +114,12 @@ public class RangeQueryBuilder extends AbstractQueryBuilder<RangeQueryBuilder> i
if (formatString != null) {
format = Joda.forPattern(formatString);
}
if (in.getVersion().onOrAfter(Version.V_5_2_0_UNRELEASED)) {
String relationString = in.readOptionalString();
if (relationString != null) {
relation = ShapeRelation.getRelationByName(relationString);
}
}
}
@Override
@ -123,6 +135,13 @@ public class RangeQueryBuilder extends AbstractQueryBuilder<RangeQueryBuilder> i
formatString = this.format.format();
}
out.writeOptionalString(formatString);
if (out.getVersion().onOrAfter(Version.V_5_2_0_UNRELEASED)) {
String relationString = null;
if (this.relation != null) {
relationString = this.relation.getRelationName();
}
out.writeOptionalString(relationString);
}
}
/**
@ -283,12 +302,27 @@ public class RangeQueryBuilder extends AbstractQueryBuilder<RangeQueryBuilder> i
}
DateMathParser getForceDateParser() { // pkg private for testing
if (this.format != null) {
if (this.format != null) {
return new DateMathParser(this.format);
}
return null;
}
public ShapeRelation relation() {
return this.relation;
}
public RangeQueryBuilder relation(String relation) {
if (relation == null) {
throw new IllegalArgumentException("relation cannot be null");
}
this.relation = ShapeRelation.getRelationByName(relation);
if (this.relation == null) {
throw new IllegalArgumentException(relation + " is not a valid relation");
}
return this;
}
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAME);
@ -303,6 +337,9 @@ public class RangeQueryBuilder extends AbstractQueryBuilder<RangeQueryBuilder> i
if (format != null) {
builder.field(FORMAT_FIELD.getPreferredName(), format.format());
}
if (relation != null) {
builder.field(RELATION_FIELD.getPreferredName(), relation.getRelationName());
}
printBoostAndQueryName(builder);
builder.endObject();
builder.endObject();
@ -320,6 +357,7 @@ public class RangeQueryBuilder extends AbstractQueryBuilder<RangeQueryBuilder> i
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
String queryName = null;
String format = null;
String relation = null;
String currentFieldName = null;
XContentParser.Token token;
@ -361,6 +399,8 @@ public class RangeQueryBuilder extends AbstractQueryBuilder<RangeQueryBuilder> i
timeZone = parser.text();
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, FORMAT_FIELD)) {
format = parser.text();
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, RELATION_FIELD)) {
relation = parser.text();
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else {
@ -393,6 +433,9 @@ public class RangeQueryBuilder extends AbstractQueryBuilder<RangeQueryBuilder> i
if (format != null) {
rangeQuery.format(format);
}
if (relation != null) {
rangeQuery.relation(relation);
}
return Optional.of(rangeQuery);
}
@ -458,7 +501,14 @@ public class RangeQueryBuilder extends AbstractQueryBuilder<RangeQueryBuilder> i
query = ((DateFieldMapper.DateFieldType) mapper).rangeQuery(from, to, includeLower, includeUpper,
timeZone, getForceDateParser(), context);
} else {
} else if (mapper instanceof RangeFieldMapper.RangeFieldType && mapper.typeName() == RangeFieldMapper.RangeType.DATE.name) {
DateMathParser forcedDateParser = null;
if (this.format != null) {
forcedDateParser = new DateMathParser(this.format);
}
query = ((RangeFieldMapper.RangeFieldType) mapper).rangeQuery(from, to, includeLower, includeUpper,
relation, timeZone, forcedDateParser, context);
} else {
if (timeZone != null) {
throw new QueryShardException(context, "[range] time_zone can not be applied to non date field ["
+ fieldName + "]");

View File

@ -43,6 +43,7 @@ import org.elasticsearch.index.mapper.MetadataFieldMapper;
import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.index.mapper.ObjectMapper;
import org.elasticsearch.index.mapper.ParentFieldMapper;
import org.elasticsearch.index.mapper.RangeFieldMapper;
import org.elasticsearch.index.mapper.RoutingFieldMapper;
import org.elasticsearch.index.mapper.ScaledFloatFieldMapper;
import org.elasticsearch.index.mapper.SourceFieldMapper;
@ -101,6 +102,9 @@ public class IndicesModule extends AbstractModule {
for (NumberFieldMapper.NumberType type : NumberFieldMapper.NumberType.values()) {
mappers.put(type.typeName(), new NumberFieldMapper.TypeParser(type));
}
for (RangeFieldMapper.RangeType type : RangeFieldMapper.RangeType.values()) {
mappers.put(type.typeName(), new RangeFieldMapper.TypeParser(type));
}
mappers.put(BooleanFieldMapper.CONTENT_TYPE, new BooleanFieldMapper.TypeParser());
mappers.put(BinaryFieldMapper.CONTENT_TYPE, new BinaryFieldMapper.TypeParser());
mappers.put(DateFieldMapper.CONTENT_TYPE, new DateFieldMapper.TypeParser());

View File

@ -0,0 +1,144 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
import org.elasticsearch.test.VersionUtils;
import org.junit.Before;
import java.io.IOException;
import java.util.Collection;
import java.util.Set;
import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom;
import static org.hamcrest.Matchers.containsString;
public abstract class AbstractNumericFieldMapperTestCase extends ESSingleNodeTestCase {
protected Set<String> TYPES;
protected IndexService indexService;
protected DocumentMapperParser parser;
@Before
public void before() {
indexService = createIndex("test");
parser = indexService.mapperService().documentMapperParser();
setTypeList();
}
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return pluginList(InternalSettingsPlugin.class);
}
protected abstract void setTypeList();
public void testDefaults() throws Exception {
for (String type : TYPES) {
doTestDefaults(type);
}
}
protected abstract void doTestDefaults(String type) throws Exception;
public void testNotIndexed() throws Exception {
for (String type : TYPES) {
doTestNotIndexed(type);
}
}
protected abstract void doTestNotIndexed(String type) throws Exception;
public void testNoDocValues() throws Exception {
for (String type : TYPES) {
doTestNoDocValues(type);
}
}
protected abstract void doTestNoDocValues(String type) throws Exception;
public void testStore() throws Exception {
for (String type : TYPES) {
doTestStore(type);
}
}
protected abstract void doTestStore(String type) throws Exception;
public void testCoerce() throws Exception {
for (String type : TYPES) {
doTestCoerce(type);
}
}
protected abstract void doTestCoerce(String type) throws IOException;
public void testIncludeInAll() throws Exception {
for (String type : TYPES) {
doTestIncludeInAll(type);
}
}
protected abstract void doTestIncludeInAll(String type) throws Exception;
public void testNullValue() throws IOException {
for (String type : TYPES) {
doTestNullValue(type);
}
}
protected abstract void doTestNullValue(String type) throws IOException;
public void testEmptyName() throws IOException {
// after version 5
for (String type : TYPES) {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("").field("type", type).endObject().endObject()
.endObject().endObject().string();
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> parser.parse("type", new CompressedXContent(mapping))
);
assertThat(e.getMessage(), containsString("name cannot be empty string"));
}
// before 5.x
Version oldVersion = VersionUtils.randomVersionBetween(getRandom(), Version.V_2_0_0, Version.V_2_3_5);
Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, oldVersion).build();
indexService = createIndex("test_old", oldIndexSettings);
parser = indexService.mapperService().documentMapperParser();
for (String type : TYPES) {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("").field("type", type).endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, defaultMapper.mappingSource().string());
}
}
}

View File

@ -24,6 +24,7 @@ import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
@ -98,7 +99,7 @@ public class DocumentFieldMapperTests extends LuceneTestCase {
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
}
@Override

View File

@ -19,6 +19,7 @@
package org.elasticsearch.index.mapper;
import org.apache.lucene.index.IndexableField;
import org.locationtech.spatial4j.shape.Point;
import org.apache.lucene.document.Field;
import org.elasticsearch.Version;
@ -190,7 +191,7 @@ public class ExternalMapper extends FieldMapper {
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
throw new UnsupportedOperationException();
}

View File

@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.BooleanFieldMapper;
@ -54,7 +55,7 @@ public class ExternalMetadataMapper extends MetadataFieldMapper {
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
// handled in post parse
}

View File

@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.SortedSetDocValuesField;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.settings.Settings;
@ -133,7 +134,7 @@ public class FakeStringFieldMapper extends FieldMapper {
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
StringFieldMapper.ValueAndBoost valueAndBoost = parseCreateFieldForString(context, fieldType().boost());
if (valueAndBoost.value() == null) {
return;

View File

@ -216,7 +216,7 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase {
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
}
@Override

View File

@ -21,58 +21,23 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
import org.elasticsearch.test.VersionUtils;
import org.junit.Before;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom;
import static org.hamcrest.Matchers.containsString;
public class NumberFieldMapperTests extends ESSingleNodeTestCase {
public class NumberFieldMapperTests extends AbstractNumericFieldMapperTestCase {
private static final Set<String> TYPES = new HashSet<>(Arrays.asList("byte", "short", "integer", "long", "float", "double"));
IndexService indexService;
DocumentMapperParser parser;
@Before
public void before() {
indexService = createIndex("test");
parser = indexService.mapperService().documentMapperParser();
}
public void testDefaults() throws Exception {
for (String type : TYPES) {
doTestDefaults(type);
}
@Override
protected void setTypeList() {
TYPES = new HashSet<>(Arrays.asList("byte", "short", "integer", "long", "float", "double"));
}
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return pluginList(InternalSettingsPlugin.class);
}
public void doTestDefaults(String type) throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", type).endObject().endObject()
@ -99,12 +64,7 @@ public class NumberFieldMapperTests extends ESSingleNodeTestCase {
assertFalse(dvField.fieldType().stored());
}
public void testNotIndexed() throws Exception {
for (String type : TYPES) {
doTestNotIndexed(type);
}
}
@Override
public void doTestNotIndexed(String type) throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", type).field("index", false).endObject().endObject()
@ -126,12 +86,7 @@ public class NumberFieldMapperTests extends ESSingleNodeTestCase {
assertEquals(DocValuesType.SORTED_NUMERIC, dvField.fieldType().docValuesType());
}
public void testNoDocValues() throws Exception {
for (String type : TYPES) {
doTestNoDocValues(type);
}
}
@Override
public void doTestNoDocValues(String type) throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", type).field("doc_values", false).endObject().endObject()
@ -154,12 +109,7 @@ public class NumberFieldMapperTests extends ESSingleNodeTestCase {
assertEquals(123, pointField.numericValue().doubleValue(), 0d);
}
public void testStore() throws Exception {
for (String type : TYPES) {
doTestStore(type);
}
}
@Override
public void doTestStore(String type) throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", type).field("store", true).endObject().endObject()
@ -187,12 +137,7 @@ public class NumberFieldMapperTests extends ESSingleNodeTestCase {
assertEquals(123, storedField.numericValue().doubleValue(), 0d);
}
public void testCoerce() throws Exception {
for (String type : TYPES) {
doTestCoerce(type);
}
}
@Override
public void doTestCoerce(String type) throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", type).endObject().endObject()
@ -239,7 +184,7 @@ public class NumberFieldMapperTests extends ESSingleNodeTestCase {
}
}
public void doTestIgnoreMalformed(String type) throws IOException {
private void doTestIgnoreMalformed(String type) throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", type).endObject().endObject()
.endObject().endObject().string();
@ -272,12 +217,7 @@ public class NumberFieldMapperTests extends ESSingleNodeTestCase {
assertEquals(0, fields.length);
}
public void testIncludeInAll() throws Exception {
for (String type : TYPES) {
doTestIncludeInAll(type);
}
}
@Override
public void doTestIncludeInAll(String type) throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", type).endObject().endObject()
@ -333,13 +273,8 @@ public class NumberFieldMapperTests extends ESSingleNodeTestCase {
}
}
public void testNullValue() throws IOException {
for (String type : TYPES) {
doTestNullValue(type);
}
}
private void doTestNullValue(String type) throws IOException {
@Override
protected void doTestNullValue(String type) throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject()
.startObject("type")
.startObject("properties")
@ -393,32 +328,4 @@ public class NumberFieldMapperTests extends ESSingleNodeTestCase {
assertEquals(DocValuesType.SORTED_NUMERIC, dvField.fieldType().docValuesType());
assertFalse(dvField.fieldType().stored());
}
public void testEmptyName() throws IOException {
// after version 5
for (String type : TYPES) {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("").field("type", type).endObject().endObject()
.endObject().endObject().string();
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> parser.parse("type", new CompressedXContent(mapping))
);
assertThat(e.getMessage(), containsString("name cannot be empty string"));
}
// before 5.x
Version oldVersion = VersionUtils.randomVersionBetween(getRandom(), Version.V_2_0_0, Version.V_2_3_5);
Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, oldVersion).build();
indexService = createIndex("test_old", oldIndexSettings);
parser = indexService.mapperService().documentMapperParser();
for (String type : TYPES) {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("").field("type", type).endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, defaultMapper.mappingSource().string());
}
}
}

View File

@ -0,0 +1,370 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashSet;
import static org.elasticsearch.index.query.RangeQueryBuilder.GT_FIELD;
import static org.elasticsearch.index.query.RangeQueryBuilder.GTE_FIELD;
import static org.elasticsearch.index.query.RangeQueryBuilder.LT_FIELD;
import static org.elasticsearch.index.query.RangeQueryBuilder.LTE_FIELD;
import static org.hamcrest.Matchers.anyOf;
import static org.hamcrest.Matchers.containsString;
public class RangeFieldMapperTests extends AbstractNumericFieldMapperTestCase {
private static String FROM_DATE = "2016-10-31";
private static String TO_DATE = "2016-11-01 20:00:00";
private static int FROM = 5;
private static String FROM_STR = FROM + "";
private static int TO = 10;
private static String TO_STR = TO + "";
private static String DATE_FORMAT = "yyyy-MM-dd HH:mm:ss||yyyy-MM-dd||epoch_millis";
@Override
protected void setTypeList() {
TYPES = new HashSet<>(Arrays.asList("date_range", "float_range", "double_range", "integer_range", "long_range"));
}
private Object getFrom(String type) {
if (type.equals("date_range")) {
return FROM_DATE;
}
return random().nextBoolean() ? FROM : FROM_STR;
}
private String getFromField() {
return random().nextBoolean() ? GT_FIELD.getPreferredName() : GTE_FIELD.getPreferredName();
}
private String getToField() {
return random().nextBoolean() ? LT_FIELD.getPreferredName() : LTE_FIELD.getPreferredName();
}
private Object getTo(String type) {
if (type.equals("date_range")) {
return TO_DATE;
}
return random().nextBoolean() ? TO : TO_STR;
}
private Number getMax(String type) {
if (type.equals("date_range") || type.equals("long_range")) {
return Long.MAX_VALUE;
} else if (type.equals("integer_range")) {
return Integer.MAX_VALUE;
} else if (type.equals("float_range")) {
return Float.POSITIVE_INFINITY;
}
return Double.POSITIVE_INFINITY;
}
@Override
public void doTestDefaults(String type) throws Exception {
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", type);
if (type.equals("date_range")) {
mapping = mapping.field("format", DATE_FORMAT);
}
mapping = mapping.endObject().endObject().endObject().endObject();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string()));
assertEquals(mapping.string(), mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("field")
.field(getFromField(), getFrom(type))
.field(getToField(), getTo(type))
.endObject()
.endObject().bytes());
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(1, fields.length);
IndexableField pointField = fields[0];
assertEquals(2, pointField.fieldType().pointDimensionCount());
assertFalse(pointField.fieldType().stored());
}
@Override
protected void doTestNotIndexed(String type) throws Exception {
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", type).field("index", false);
if (type.equals("date_range")) {
mapping = mapping.field("format", DATE_FORMAT);
}
mapping = mapping.endObject().endObject().endObject().endObject();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string()));
assertEquals(mapping.string(), mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("field")
.field(getFromField(), getFrom(type))
.field(getToField(), getTo(type))
.endObject()
.endObject().bytes());
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(0, fields.length);
}
@Override
protected void doTestNoDocValues(String type) throws Exception {
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", type).field("doc_values", false);
if (type.equals("date_range")) {
mapping = mapping.field("format", DATE_FORMAT);
}
mapping = mapping.endObject().endObject().endObject().endObject();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string()));
assertEquals(mapping.string(), mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("field")
.field(getFromField(), getFrom(type))
.field(getToField(), getTo(type))
.endObject()
.endObject().bytes());
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(1, fields.length);
IndexableField pointField = fields[0];
assertEquals(2, pointField.fieldType().pointDimensionCount());
}
@Override
protected void doTestStore(String type) throws Exception {
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", type).field("store", true);
if (type.equals("date_range")) {
mapping = mapping.field("format", DATE_FORMAT);
}
mapping = mapping.endObject().endObject().endObject().endObject();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string()));
assertEquals(mapping.string(), mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("field")
.field(getFromField(), getFrom(type))
.field(getToField(), getTo(type))
.endObject()
.endObject().bytes());
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(2, fields.length);
IndexableField pointField = fields[0];
assertEquals(2, pointField.fieldType().pointDimensionCount());
IndexableField storedField = fields[1];
assertTrue(storedField.fieldType().stored());
assertThat(storedField.stringValue(), containsString(type.equals("date_range") ? "1477872000000" : "5"));
}
@Override
public void doTestCoerce(String type) throws IOException {
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", type);
if (type.equals("date_range")) {
mapping = mapping.field("format", DATE_FORMAT);
}
mapping = mapping.endObject().endObject().endObject().endObject();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string()));
assertEquals(mapping.string(), mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("field")
.field(getFromField(), getFrom(type))
.field(getToField(), getTo(type))
.endObject()
.endObject().bytes());
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(1, fields.length);
IndexableField pointField = fields[0];
assertEquals(2, pointField.fieldType().pointDimensionCount());
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", type).field("coerce", false).endObject().endObject()
.endObject().endObject();
DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping.string()));
assertEquals(mapping.string(), mapper2.mappingSource().toString());
ThrowingRunnable runnable = () -> mapper2.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("field")
.field(getFromField(), "5.2")
.field(getToField(), "10")
.endObject()
.endObject().bytes());
MapperParsingException e = expectThrows(MapperParsingException.class, runnable);
assertThat(e.getCause().getMessage(), anyOf(containsString("passed as String"), containsString("failed to parse date")));
}
@Override
protected void doTestIncludeInAll(String type) throws Exception {
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", type);
if (type.equals("date_range")) {
mapping = mapping.field("format", DATE_FORMAT);
}
mapping = mapping.endObject().endObject().endObject().endObject();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string()));
assertEquals(mapping.string(), mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("field")
.field(getFromField(), getFrom(type))
.field(getToField(), getTo(type))
.endObject()
.endObject().bytes());
IndexableField[] fields = doc.rootDoc().getFields("_all");
assertEquals(1, fields.length);
assertThat(fields[0].stringValue(), containsString(type.equals("date_range") ? "1477872000000" : "5"));
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", type);
if (type.equals("date_range")) {
mapping = mapping.field("format", DATE_FORMAT);
}
mapping = mapping.field("include_in_all", false).endObject().endObject()
.endObject().endObject();
mapper = parser.parse("type", new CompressedXContent(mapping.string()));
assertEquals(mapping.string(), mapper.mappingSource().toString());
doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("field")
.field(getFromField(), getFrom(type))
.field(getToField(), getTo(type))
.endObject()
.endObject().bytes());
fields = doc.rootDoc().getFields("_all");
assertEquals(0, fields.length);
}
@Override
protected void doTestNullValue(String type) throws IOException {
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", type).field("store", true);
if (type.equals("date_range")) {
mapping = mapping.field("format", DATE_FORMAT);
}
mapping = mapping.endObject().endObject().endObject().endObject();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string()));
assertEquals(mapping.string(), mapper.mappingSource().toString());
// test null value for min and max
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("field")
.nullField(getFromField())
.nullField(getToField())
.endObject()
.endObject().bytes());
assertEquals(2, doc.rootDoc().getFields("field").length);
IndexableField[] fields = doc.rootDoc().getFields("field");
IndexableField storedField = fields[1];
assertThat(storedField.stringValue(), containsString(type.equals("date_range") ? Long.MAX_VALUE+"" : getMax(type)+""));
// test null max value
doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("field")
.field(getFromField(), getFrom(type))
.nullField(getToField())
.endObject()
.endObject().bytes());
fields = doc.rootDoc().getFields("field");
assertEquals(2, fields.length);
IndexableField pointField = fields[0];
assertEquals(2, pointField.fieldType().pointDimensionCount());
assertFalse(pointField.fieldType().stored());
storedField = fields[1];
assertTrue(storedField.fieldType().stored());
assertThat(storedField.stringValue(), containsString(type.equals("date_range") ? "1477872000000" : "5"));
assertThat(storedField.stringValue(), containsString(getMax(type) + ""));
}
public void testNoBounds() throws Exception {
for (String type : TYPES) {
doTestNoBounds(type);
}
}
public void doTestNoBounds(String type) throws IOException {
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", type).field("store", true);
if (type.equals("date_range")) {
mapping = mapping.field("format", DATE_FORMAT);
}
mapping = mapping.endObject().endObject().endObject().endObject();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string()));
assertEquals(mapping.string(), mapper.mappingSource().toString());
// test no bounds specified
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("field")
.endObject()
.endObject().bytes());
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(2, fields.length);
IndexableField pointField = fields[0];
assertEquals(2, pointField.fieldType().pointDimensionCount());
assertFalse(pointField.fieldType().stored());
IndexableField storedField = fields[1];
assertTrue(storedField.fieldType().stored());
assertThat(storedField.stringValue(), containsString(type.equals("date_range") ? Long.MAX_VALUE+"" : getMax(type)+""));
assertThat(storedField.stringValue(), containsString(getMax(type) + ""));
}
public void testIllegalArguments() throws Exception {
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", RangeFieldMapper.RangeType.INTEGER.name)
.field("format", DATE_FORMAT).endObject().endObject().endObject().endObject();
ThrowingRunnable runnable = () -> parser.parse("type", new CompressedXContent(mapping.string()));
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, runnable);
assertThat(e.getMessage(), containsString("should not define a dateTimeFormatter"));
}
}

View File

@ -0,0 +1,185 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
import org.apache.lucene.document.DoubleRangeField;
import org.apache.lucene.document.FloatRangeField;
import org.apache.lucene.document.IntRangeField;
import org.apache.lucene.document.LongRangeField;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.search.Query;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.geo.ShapeRelation;
import org.elasticsearch.common.joda.Joda;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.RangeFieldMapper.RangeType;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.test.IndexSettingsModule;
import org.joda.time.DateTime;
import org.junit.Before;
import java.util.Locale;
public class RangeFieldTypeTests extends FieldTypeTestCase {
RangeType type;
protected static String FIELDNAME = "field";
protected static int DISTANCE = 10;
private static long nowInMillis;
@Before
public void setupProperties() {
type = RandomPicks.randomFrom(random(), RangeType.values());
nowInMillis = randomPositiveLong();
if (type == RangeType.DATE) {
addModifier(new Modifier("format", true) {
@Override
public void modify(MappedFieldType ft) {
((RangeFieldMapper.RangeFieldType) ft).setDateTimeFormatter(Joda.forPattern("basic_week_date", Locale.ROOT));
}
});
addModifier(new Modifier("locale", true) {
@Override
public void modify(MappedFieldType ft) {
((RangeFieldMapper.RangeFieldType) ft).setDateTimeFormatter(Joda.forPattern("date_optional_time", Locale.CANADA));
}
});
}
}
@Override
protected RangeFieldMapper.RangeFieldType createDefaultFieldType() {
return new RangeFieldMapper.RangeFieldType(type);
}
public void testRangeQuery() throws Exception {
Settings indexSettings = Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(randomAsciiOfLengthBetween(1, 10), indexSettings);
QueryShardContext context = new QueryShardContext(0, idxSettings, null, null, null, null, null, null, null, null, null,
() -> nowInMillis);
RangeFieldMapper.RangeFieldType ft = new RangeFieldMapper.RangeFieldType(type);
ft.setName(FIELDNAME);
ft.setIndexOptions(IndexOptions.DOCS);
ShapeRelation relation = RandomPicks.randomFrom(random(), ShapeRelation.values());
boolean includeLower = random().nextBoolean();
boolean includeUpper = random().nextBoolean();
Object from = nextFrom();
Object to = nextTo(from);
assertEquals(getExpectedRangeQuery(relation, from, to, includeLower, includeUpper),
ft.rangeQuery(from, to, includeLower, includeUpper, relation, context));
}
private Query getExpectedRangeQuery(ShapeRelation relation, Object from, Object to, boolean includeLower, boolean includeUpper) {
switch (type) {
case DATE:
return getDateRangeQuery(relation, (DateTime)from, (DateTime)to, includeLower, includeUpper);
case INTEGER:
return getIntRangeQuery(relation, (int)from, (int)to, includeLower, includeUpper);
case LONG:
return getLongRangeQuery(relation, (long)from, (long)to, includeLower, includeUpper);
case DOUBLE:
return getDoubleRangeQuery(relation, (double)from, (double)to, includeLower, includeUpper);
default:
return getFloatRangeQuery(relation, (float)from, (float)to, includeLower, includeUpper);
}
}
private Query getDateRangeQuery(ShapeRelation relation, DateTime from, DateTime to, boolean includeLower, boolean includeUpper) {
return getLongRangeQuery(relation, from.getMillis(), to.getMillis(), includeLower, includeUpper);
}
private Query getIntRangeQuery(ShapeRelation relation, int from, int to, boolean includeLower, boolean includeUpper) {
int[] lower = new int[] {from + (includeLower ? 0 : 1)};
int[] upper = new int[] {to - (includeUpper ? 0 : 1)};
if (relation == ShapeRelation.WITHIN) {
return IntRangeField.newWithinQuery(FIELDNAME, lower, upper);
} else if (relation == ShapeRelation.CONTAINS) {
return IntRangeField.newContainsQuery(FIELDNAME, lower, upper);
}
return IntRangeField.newIntersectsQuery(FIELDNAME, lower, upper);
}
private Query getLongRangeQuery(ShapeRelation relation, long from, long to, boolean includeLower, boolean includeUpper) {
long[] lower = new long[] {from + (includeLower ? 0 : 1)};
long[] upper = new long[] {to - (includeUpper ? 0 : 1)};
if (relation == ShapeRelation.WITHIN) {
return LongRangeField.newWithinQuery(FIELDNAME, lower, upper);
} else if (relation == ShapeRelation.CONTAINS) {
return LongRangeField.newContainsQuery(FIELDNAME, lower, upper);
}
return LongRangeField.newIntersectsQuery(FIELDNAME, lower, upper);
}
private Query getFloatRangeQuery(ShapeRelation relation, float from, float to, boolean includeLower, boolean includeUpper) {
float[] lower = new float[] {includeLower ? from : Math.nextUp(from)};
float[] upper = new float[] {includeUpper ? to : Math.nextDown(to)};
if (relation == ShapeRelation.WITHIN) {
return FloatRangeField.newWithinQuery(FIELDNAME, lower, upper);
} else if (relation == ShapeRelation.CONTAINS) {
return FloatRangeField.newContainsQuery(FIELDNAME, lower, upper);
}
return FloatRangeField.newIntersectsQuery(FIELDNAME, lower, upper);
}
private Query getDoubleRangeQuery(ShapeRelation relation, double from, double to, boolean includeLower, boolean includeUpper) {
double[] lower = new double[] {includeLower ? from : Math.nextUp(from)};
double[] upper = new double[] {includeUpper ? to : Math.nextDown(to)};
if (relation == ShapeRelation.WITHIN) {
return DoubleRangeField.newWithinQuery(FIELDNAME, lower, upper);
} else if (relation == ShapeRelation.CONTAINS) {
return DoubleRangeField.newContainsQuery(FIELDNAME, lower, upper);
}
return DoubleRangeField.newIntersectsQuery(FIELDNAME, lower, upper);
}
private Object nextFrom() {
switch (type) {
case INTEGER:
return (int)(random().nextInt() * 0.5 - DISTANCE);
case DATE:
return DateTime.now();
case LONG:
return (long)(random().nextLong() * 0.5 - DISTANCE);
case FLOAT:
return (float)(random().nextFloat() * 0.5 - DISTANCE);
default:
return random().nextDouble() * 0.5 - DISTANCE;
}
}
private Object nextTo(Object from) {
switch (type) {
case INTEGER:
return (Integer)from + DISTANCE;
case DATE:
return DateTime.now().plusDays(DISTANCE);
case LONG:
return (Long)from + DISTANCE;
case DOUBLE:
return (Double)from + DISTANCE;
default:
return (Float)from + DISTANCE;
}
}
}

View File

@ -19,6 +19,7 @@
package org.elasticsearch.index.query;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
import org.apache.lucene.document.IntPoint;
import org.apache.lucene.document.LongPoint;
import org.apache.lucene.search.LegacyNumericRangeQuery;
@ -28,6 +29,7 @@ import org.apache.lucene.search.TermRangeQuery;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.geo.ShapeRelation;
import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.mapper.LegacyDateFieldMapper;
@ -39,6 +41,7 @@ import org.elasticsearch.test.AbstractQueryTestCase;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.chrono.ISOChronology;
import org.locationtech.spatial4j.shape.SpatialRelation;
import java.io.IOException;
import java.util.HashMap;
@ -62,13 +65,13 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
switch (randomIntBetween(0, 2)) {
case 0:
// use mapped integer field for numeric range queries
query = new RangeQueryBuilder(INT_FIELD_NAME);
query = new RangeQueryBuilder(randomBoolean() ? INT_FIELD_NAME : INT_RANGE_FIELD_NAME);
query.from(randomIntBetween(1, 100));
query.to(randomIntBetween(101, 200));
break;
case 1:
// use mapped date field, using date string representation
query = new RangeQueryBuilder(DATE_FIELD_NAME);
query = new RangeQueryBuilder(randomBoolean() ? DATE_FIELD_NAME : DATE_RANGE_FIELD_NAME);
query.from(new DateTime(System.currentTimeMillis() - randomIntBetween(0, 1000000), DateTimeZone.UTC).toString());
query.to(new DateTime(System.currentTimeMillis() + randomIntBetween(0, 1000000), DateTimeZone.UTC).toString());
// Create timestamp option only then we have a date mapper,
@ -81,6 +84,9 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
query.format("yyyy-MM-dd'T'HH:mm:ss.SSSZZ");
}
}
if (query.fieldName().equals(DATE_RANGE_FIELD_NAME)) {
query.relation(RandomPicks.randomFrom(random(), ShapeRelation.values()).getRelationName());
}
break;
case 2:
default:
@ -121,8 +127,11 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
@Override
protected void doAssertLuceneQuery(RangeQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
if (getCurrentTypes().length == 0 || (queryBuilder.fieldName().equals(DATE_FIELD_NAME) == false
&& queryBuilder.fieldName().equals(INT_FIELD_NAME) == false)) {
if (getCurrentTypes().length == 0 ||
(queryBuilder.fieldName().equals(DATE_FIELD_NAME) == false
&& queryBuilder.fieldName().equals(INT_FIELD_NAME) == false
&& queryBuilder.fieldName().equals(DATE_RANGE_FIELD_NAME) == false
&& queryBuilder.fieldName().equals(INT_RANGE_FIELD_NAME) == false)) {
assertThat(query, instanceOf(TermRangeQuery.class));
TermRangeQuery termRangeQuery = (TermRangeQuery) query;
assertThat(termRangeQuery.getField(), equalTo(queryBuilder.fieldName()));
@ -224,6 +233,9 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
}
assertEquals(IntPoint.newRangeQuery(INT_FIELD_NAME, minInt, maxInt), query);
}
} else if (queryBuilder.fieldName().equals(DATE_RANGE_FIELD_NAME)
|| queryBuilder.fieldName().equals(INT_RANGE_FIELD_NAME)) {
// todo can't check RangeFieldQuery because its currently package private (this will change)
} else {
throw new UnsupportedOperationException();
}

View File

@ -77,7 +77,8 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase<TermsQueryBuil
String fieldName;
do {
fieldName = getRandomFieldName();
} while (fieldName.equals(GEO_POINT_FIELD_NAME) || fieldName.equals(GEO_SHAPE_FIELD_NAME));
} while (fieldName.equals(GEO_POINT_FIELD_NAME) || fieldName.equals(GEO_SHAPE_FIELD_NAME)
|| fieldName.equals(INT_RANGE_FIELD_NAME) || fieldName.equals(DATE_RANGE_FIELD_NAME));
Object[] values = new Object[randomInt(5)];
for (int i = 0; i < values.length; i++) {
values[i] = getRandomValueForFieldName(fieldName);

View File

@ -344,3 +344,28 @@ buildRestTests.doFirst {
buildRestTests.setups['bank'] =
buildRestTests.setups['bank'].replace('#bank_data#', accounts)
}
buildRestTests.setups['range_index'] = '''
- do :
indices.create:
index: range_index
body:
settings:
number_of_shards: 2
number_of_replicas: 1
mappings:
my_type:
properties:
expected_attendees:
type: integer_range
time_frame:
type: date_range
format: yyyy-MM-dd HH:mm:ss||yyyy-MM-dd||epoch_millis
- do:
bulk:
index: range_index
type: my_type
refresh: true
body: |
{"index":{"_id": 1}}
{"expected_attendees": {"gte": 10, "lte": 20}, "time_frame": {"gte": "2015-10-31 12:00:00", "lte": "2015-11-01"}}'''

View File

@ -12,6 +12,7 @@ string:: <<text,`text`>> and <<keyword,`keyword`>>
<<date>>:: `date`
<<boolean>>:: `boolean`
<<binary>>:: `binary`
<<range>>:: `integer_range`, `float_range`, `long_range`, `double_range`, `date_range`
[float]
=== Complex datatypes
@ -55,6 +56,8 @@ include::types/array.asciidoc[]
include::types/binary.asciidoc[]
include::types/range.asciidoc[]
include::types/boolean.asciidoc[]
include::types/date.asciidoc[]

View File

@ -0,0 +1,145 @@
[[range]]
=== Range datatypes
The following range types are supported:
[horizontal]
`integer_range`:: A range of signed 32-bit integers with a minimum value of +-2^31^+ and maximum of +2^31^-1+.
`float_range`:: A range of single-precision 32-bit IEEE 754 floating point values.
`long_range`:: A range of signed 64-bit integers with a minimum value of +-2^63^+ and maximum of +2^63^-1+.
`double_range`:: A range of double-precision 64-bit IEEE 754 floating point values.
`date_range`:: A range of date values represented as unsigned 64-bit integer milliseconds elapsed since system epoch.
Below is an example of configuring a mapping with various range fields followed by an example that indexes several range types.
[source,js]
--------------------------------------------------
PUT range_index
{
"mappings": {
"my_type": {
"properties": {
"expected_attendees": {
"type": "integer_range"
},
"time_frame": {
"type": "date_range", <1>
"format": "yyyy-MM-dd HH:mm:ss||yyyy-MM-dd||epoch_millis"
}
}
}
}
}
PUT range_index/my_type/1
{
"expected_attendees" : { <2>
"gte" : 10,
"lte" : 20
},
"time_frame" : { <3>
"gte" : "2015-10-31 12:00:00", <4>
"lte" : "2015-11-01"
}
}
--------------------------------------------------
//CONSOLE
The following is an example of a `date_range` query over the `date_range` field named "time_frame".
[source,js]
--------------------------------------------------
POST range_index/_search
{
"query" : {
"range" : {
"time_frame" : { <5>
"gte" : "2015-10-31",
"lte" : "2015-11-01",
"relation" : "within" <6>
}
}
}
}
--------------------------------------------------
// CONSOLE
// TEST[setup:range_index]
The result produced by the above query.
[source,js]
--------------------------------------------------
{
"took": 13,
"timed_out": false,
"_shards" : {
"total": 2,
"successful": 2,
"failed": 0
},
"hits" : {
"total" : 1,
"max_score" : 1.0,
"hits" : [
{
"_index" : "range_index",
"_type" : "my_type",
"_id" : "1",
"_score" : 1.0,
"_source" : {
"expected_attendees" : {
"gte" : 10, "lte" : 20
},
"time_frame" : {
"gte" : "2015-10-31 12:00:00", "lte" : "2015-11-01"
}
}
}
]
}
}
--------------------------------------------------
// TESTRESPONSE[s/"took": 13/"took" : $body.took/]
<1> `date_range` types accept the same field parameters defined by the <<date, `date`>> type.
<2> Example indexing a meeting with 10 to 20 attendees.
<3> Date ranges accept the same format as described in <<ranges-on-dates, date range queries>>.
<4> Example date range using date time stamp. This also accepts <<date-math, date math>> formatting, or "now" for system time.
<5> Range queries work the same as described in <<query-dsl-range-query, range query>>.
<6> Range queries over range <<mapping-types, fields>> support a `relation` parameter which can be one of `WITHIN`, `CONTAINS`,
`INTERSECTS` (default).
[[range-params]]
==== Parameters for range fields
The following parameters are accepted by range types:
[horizontal]
<<coerce,`coerce`>>::
Try to convert strings to numbers and truncate fractions for integers.
Accepts `true` (default) and `false`.
<<mapping-boost,`boost`>>::
Mapping field-level query time boosting. Accepts a floating point number, defaults
to `1.0`.
<<include-in-all,`include_in_all`>>::
Whether or not the field value should be included in the
<<mapping-all-field,`_all`>> field? Accepts `true` or `false`. Defaults
to `false` if <<mapping-index,`index`>> is set to `false`, or if a parent
<<object,`object`>> field sets `include_in_all` to `false`.
Otherwise defaults to `true`.
<<mapping-index,`index`>>::
Should the field be searchable? Accepts `true` (default) and `false`.
<<mapping-store,`store`>>::
Whether the field value should be stored and retrievable separately from
the <<mapping-source-field,`_source`>> field. Accepts `true` or `false`
(default).

View File

@ -23,6 +23,7 @@ import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.Fields;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
@ -357,7 +358,7 @@ public class PercolatorFieldMapper extends FieldMapper {
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
throw new UnsupportedOperationException("should not be invoked");
}

View File

@ -23,10 +23,10 @@ import java.io.IOException;
import java.util.List;
import java.util.Map;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.SortedNumericDocValuesField;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
@ -144,7 +144,7 @@ public class Murmur3FieldMapper extends FieldMapper {
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields)
protected void parseCreateField(ParseContext context, List<IndexableField> fields)
throws IOException {
final Object value;
if (context.externalValueSet()) {

View File

@ -19,8 +19,8 @@
package org.elasticsearch.index.mapper.size;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.Version;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
@ -176,7 +176,7 @@ public class SizeFieldMapper extends MetadataFieldMapper {
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
if (!enabledState.enabled) {
return;
}

View File

@ -138,17 +138,20 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
public static final String STRING_FIELD_NAME = "mapped_string";
protected static final String STRING_FIELD_NAME_2 = "mapped_string_2";
protected static final String INT_FIELD_NAME = "mapped_int";
protected static final String INT_RANGE_FIELD_NAME = "mapped_int_range";
protected static final String DOUBLE_FIELD_NAME = "mapped_double";
protected static final String BOOLEAN_FIELD_NAME = "mapped_boolean";
protected static final String DATE_FIELD_NAME = "mapped_date";
protected static final String DATE_RANGE_FIELD_NAME = "mapped_date_range";
protected static final String OBJECT_FIELD_NAME = "mapped_object";
protected static final String GEO_POINT_FIELD_NAME = "mapped_geo_point";
protected static final String LEGACY_GEO_POINT_FIELD_MAPPING = "type=geo_point,lat_lon=true,geohash=true,geohash_prefix=true";
protected static final String GEO_SHAPE_FIELD_NAME = "mapped_geo_shape";
protected static final String[] MAPPED_FIELD_NAMES = new String[]{STRING_FIELD_NAME, INT_FIELD_NAME, DOUBLE_FIELD_NAME,
BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, OBJECT_FIELD_NAME, GEO_POINT_FIELD_NAME, GEO_SHAPE_FIELD_NAME};
protected static final String[] MAPPED_LEAF_FIELD_NAMES = new String[]{STRING_FIELD_NAME, INT_FIELD_NAME, DOUBLE_FIELD_NAME,
BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, GEO_POINT_FIELD_NAME};
protected static final String[] MAPPED_FIELD_NAMES = new String[]{STRING_FIELD_NAME, INT_FIELD_NAME, INT_RANGE_FIELD_NAME,
DOUBLE_FIELD_NAME, BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, DATE_RANGE_FIELD_NAME, OBJECT_FIELD_NAME, GEO_POINT_FIELD_NAME,
GEO_SHAPE_FIELD_NAME};
protected static final String[] MAPPED_LEAF_FIELD_NAMES = new String[]{STRING_FIELD_NAME, INT_FIELD_NAME, INT_RANGE_FIELD_NAME,
DOUBLE_FIELD_NAME, BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, DATE_RANGE_FIELD_NAME, GEO_POINT_FIELD_NAME, };
private static final int NUMBER_OF_TESTQUERIES = 20;
private static ServiceHolder serviceHolder;
@ -1116,9 +1119,11 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
STRING_FIELD_NAME, "type=text",
STRING_FIELD_NAME_2, "type=keyword",
INT_FIELD_NAME, "type=integer",
INT_RANGE_FIELD_NAME, "type=integer_range",
DOUBLE_FIELD_NAME, "type=double",
BOOLEAN_FIELD_NAME, "type=boolean",
DATE_FIELD_NAME, "type=date",
DATE_RANGE_FIELD_NAME, "type=date_range",
OBJECT_FIELD_NAME, "type=object",
GEO_POINT_FIELD_NAME, geoFieldMapping,
GEO_SHAPE_FIELD_NAME, "type=geo_shape"