Merge pull request #14082 from jpountz/remove/uninverted_numeric_fielddata
Remove "uninverted" and "binary" fielddata support for numeric and boolean fields.
This commit is contained in:
commit
f4e9f69f68
|
@ -30,12 +30,9 @@ import org.elasticsearch.index.Index;
|
|||
import org.elasticsearch.index.fielddata.plain.BytesBinaryDVIndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.plain.DisabledIndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.plain.DoubleArrayIndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.plain.FloatArrayIndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.plain.GeoPointBinaryDVIndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.plain.GeoPointDoubleArrayIndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.plain.IndexIndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.plain.PackedArrayIndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.plain.PagedBytesIndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
|
@ -64,9 +61,18 @@ public class IndexFieldDataService extends AbstractIndexComponent {
|
|||
public static final String FIELDDATA_CACHE_KEY = "index.fielddata.cache";
|
||||
public static final String FIELDDATA_CACHE_VALUE_NODE = "node";
|
||||
|
||||
private static final IndexFieldData.Builder MISSING_DOC_VALUES_BUILDER = new IndexFieldData.Builder() {
|
||||
@Override
|
||||
public IndexFieldData<?> build(Index index, Settings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) {
|
||||
throw new IllegalStateException("Can't load fielddata on [" + fieldType.names().fullName()
|
||||
+ "] of index [" + index.getName() + "] because fielddata is unsupported on fields of type ["
|
||||
+ fieldType.fieldDataType().getType() + "]. Use doc values instead.");
|
||||
}
|
||||
};
|
||||
|
||||
private static final String ARRAY_FORMAT = "array";
|
||||
private static final String DISABLED_FORMAT = "disabled";
|
||||
private static final String DOC_VALUES_FORMAT = "doc_values";
|
||||
private static final String ARRAY_FORMAT = "array";
|
||||
private static final String PAGED_BYTES_FORMAT = "paged_bytes";
|
||||
|
||||
private final static Map<String, IndexFieldData.Builder> buildersByType;
|
||||
|
@ -77,19 +83,18 @@ public class IndexFieldDataService extends AbstractIndexComponent {
|
|||
static {
|
||||
Map<String, IndexFieldData.Builder> buildersByTypeBuilder = new HashMap<>();
|
||||
buildersByTypeBuilder.put("string", new PagedBytesIndexFieldData.Builder());
|
||||
buildersByTypeBuilder.put("float", new FloatArrayIndexFieldData.Builder());
|
||||
buildersByTypeBuilder.put("double", new DoubleArrayIndexFieldData.Builder());
|
||||
buildersByTypeBuilder.put("byte", new PackedArrayIndexFieldData.Builder().setNumericType(IndexNumericFieldData.NumericType.BYTE));
|
||||
buildersByTypeBuilder.put("short", new PackedArrayIndexFieldData.Builder().setNumericType(IndexNumericFieldData.NumericType.SHORT));
|
||||
buildersByTypeBuilder.put("int", new PackedArrayIndexFieldData.Builder().setNumericType(IndexNumericFieldData.NumericType.INT));
|
||||
buildersByTypeBuilder.put("long", new PackedArrayIndexFieldData.Builder().setNumericType(IndexNumericFieldData.NumericType.LONG));
|
||||
buildersByTypeBuilder.put("float", MISSING_DOC_VALUES_BUILDER);
|
||||
buildersByTypeBuilder.put("double", MISSING_DOC_VALUES_BUILDER);
|
||||
buildersByTypeBuilder.put("byte", MISSING_DOC_VALUES_BUILDER);
|
||||
buildersByTypeBuilder.put("short", MISSING_DOC_VALUES_BUILDER);
|
||||
buildersByTypeBuilder.put("int", MISSING_DOC_VALUES_BUILDER);
|
||||
buildersByTypeBuilder.put("long", MISSING_DOC_VALUES_BUILDER);
|
||||
buildersByTypeBuilder.put("geo_point", new GeoPointDoubleArrayIndexFieldData.Builder());
|
||||
buildersByTypeBuilder.put(ParentFieldMapper.NAME, new ParentChildIndexFieldData.Builder());
|
||||
buildersByTypeBuilder.put(IndexFieldMapper.NAME, new IndexIndexFieldData.Builder());
|
||||
buildersByTypeBuilder.put("binary", new DisabledIndexFieldData.Builder());
|
||||
buildersByTypeBuilder.put(BooleanFieldMapper.CONTENT_TYPE,
|
||||
new PackedArrayIndexFieldData.Builder().setNumericType(IndexNumericFieldData.NumericType.BOOLEAN));
|
||||
buildersByType = unmodifiableMap(buildersByTypeBuilder);
|
||||
buildersByTypeBuilder.put(BooleanFieldMapper.CONTENT_TYPE, MISSING_DOC_VALUES_BUILDER);
|
||||
buildersByType = unmodifiableMap(buildersByTypeBuilder);
|
||||
|
||||
|
||||
docValuesBuildersByType = MapBuilder.<String, IndexFieldData.Builder>newMapBuilder()
|
||||
|
@ -110,27 +115,21 @@ public class IndexFieldDataService extends AbstractIndexComponent {
|
|||
.put(Tuple.tuple("string", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder())
|
||||
.put(Tuple.tuple("string", DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
|
||||
|
||||
.put(Tuple.tuple("float", ARRAY_FORMAT), new FloatArrayIndexFieldData.Builder())
|
||||
.put(Tuple.tuple("float", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.FLOAT))
|
||||
.put(Tuple.tuple("float", DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
|
||||
|
||||
.put(Tuple.tuple("double", ARRAY_FORMAT), new DoubleArrayIndexFieldData.Builder())
|
||||
.put(Tuple.tuple("double", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.DOUBLE))
|
||||
.put(Tuple.tuple("double", DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
|
||||
|
||||
.put(Tuple.tuple("byte", ARRAY_FORMAT), new PackedArrayIndexFieldData.Builder().setNumericType(IndexNumericFieldData.NumericType.BYTE))
|
||||
.put(Tuple.tuple("byte", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.BYTE))
|
||||
.put(Tuple.tuple("byte", DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
|
||||
|
||||
.put(Tuple.tuple("short", ARRAY_FORMAT), new PackedArrayIndexFieldData.Builder().setNumericType(IndexNumericFieldData.NumericType.SHORT))
|
||||
.put(Tuple.tuple("short", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.SHORT))
|
||||
.put(Tuple.tuple("short", DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
|
||||
|
||||
.put(Tuple.tuple("int", ARRAY_FORMAT), new PackedArrayIndexFieldData.Builder().setNumericType(IndexNumericFieldData.NumericType.INT))
|
||||
.put(Tuple.tuple("int", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.INT))
|
||||
.put(Tuple.tuple("int", DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
|
||||
|
||||
.put(Tuple.tuple("long", ARRAY_FORMAT), new PackedArrayIndexFieldData.Builder().setNumericType(IndexNumericFieldData.NumericType.LONG))
|
||||
.put(Tuple.tuple("long", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.LONG))
|
||||
.put(Tuple.tuple("long", DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
|
||||
|
||||
|
@ -141,7 +140,6 @@ public class IndexFieldDataService extends AbstractIndexComponent {
|
|||
.put(Tuple.tuple("binary", DOC_VALUES_FORMAT), new BytesBinaryDVIndexFieldData.Builder())
|
||||
.put(Tuple.tuple("binary", DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
|
||||
|
||||
.put(Tuple.tuple(BooleanFieldMapper.CONTENT_TYPE, ARRAY_FORMAT), new PackedArrayIndexFieldData.Builder().setNumericType(IndexNumericFieldData.NumericType.BOOLEAN))
|
||||
.put(Tuple.tuple(BooleanFieldMapper.CONTENT_TYPE, DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.BOOLEAN))
|
||||
.put(Tuple.tuple(BooleanFieldMapper.CONTENT_TYPE, DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
|
||||
|
||||
|
@ -163,12 +161,6 @@ public class IndexFieldDataService extends AbstractIndexComponent {
|
|||
};
|
||||
private volatile IndexFieldDataCache.Listener listener = DEFAULT_NOOP_LISTENER;
|
||||
|
||||
|
||||
// We need to cache fielddata on the _parent field because of 1.x indices.
|
||||
// When we don't support 1.x anymore (3.0) then remove this caching
|
||||
// This variable needs to be read/written under lock
|
||||
private IndexFieldData<?> parentIndexFieldData;
|
||||
|
||||
@Inject
|
||||
public IndexFieldDataService(Index index, @IndexSettings Settings indexSettings, IndicesFieldDataCache indicesFieldDataCache,
|
||||
CircuitBreakerService circuitBreakerService, MapperService mapperService) {
|
||||
|
@ -179,7 +171,6 @@ public class IndexFieldDataService extends AbstractIndexComponent {
|
|||
}
|
||||
|
||||
public synchronized void clear() {
|
||||
parentIndexFieldData = null;
|
||||
List<Throwable> exceptions = new ArrayList<>(0);
|
||||
final Collection<IndexFieldDataCache> fieldDataCacheValues = fieldDataCaches.values();
|
||||
for (IndexFieldDataCache cache : fieldDataCacheValues) {
|
||||
|
@ -194,9 +185,6 @@ public class IndexFieldDataService extends AbstractIndexComponent {
|
|||
}
|
||||
|
||||
public synchronized void clearField(final String fieldName) {
|
||||
if (ParentFieldMapper.NAME.equals(fieldName)) {
|
||||
parentIndexFieldData = null;
|
||||
}
|
||||
List<Throwable> exceptions = new ArrayList<>(0);
|
||||
final IndexFieldDataCache cache = fieldDataCaches.remove(fieldName);
|
||||
if (cache != null) {
|
||||
|
|
|
@ -1,225 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.fielddata.plain;
|
||||
|
||||
import org.apache.lucene.index.BinaryDocValues;
|
||||
import org.apache.lucene.index.DocValues;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.SortedNumericDocValues;
|
||||
import org.apache.lucene.store.ByteArrayDataInput;
|
||||
import org.apache.lucene.util.Accountable;
|
||||
import org.apache.lucene.util.ArrayUtil;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.util.ByteUtils;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.fielddata.AtomicNumericFieldData;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
|
||||
import org.elasticsearch.index.fielddata.fieldcomparator.DoubleValuesComparatorSource;
|
||||
import org.elasticsearch.index.fielddata.fieldcomparator.FloatValuesComparatorSource;
|
||||
import org.elasticsearch.index.fielddata.fieldcomparator.LongValuesComparatorSource;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType.Names;
|
||||
import org.elasticsearch.search.MultiValueMode;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
|
||||
public class BinaryDVNumericIndexFieldData extends DocValuesIndexFieldData implements IndexNumericFieldData {
|
||||
|
||||
private final NumericType numericType;
|
||||
|
||||
public BinaryDVNumericIndexFieldData(Index index, Names fieldNames, NumericType numericType, FieldDataType fieldDataType) {
|
||||
super(index, fieldNames, fieldDataType);
|
||||
if (numericType == null) {
|
||||
throw new IllegalArgumentException("numericType must be non-null");
|
||||
}
|
||||
this.numericType = numericType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource comparatorSource(final Object missingValue, final MultiValueMode sortMode, Nested nested) {
|
||||
switch (numericType) {
|
||||
case FLOAT:
|
||||
return new FloatValuesComparatorSource(this, missingValue, sortMode, nested);
|
||||
case DOUBLE:
|
||||
return new DoubleValuesComparatorSource(this, missingValue, sortMode, nested);
|
||||
default:
|
||||
assert !numericType.isFloatingPoint();
|
||||
return new LongValuesComparatorSource(this, missingValue, sortMode, nested);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public AtomicNumericFieldData load(LeafReaderContext context) {
|
||||
try {
|
||||
final BinaryDocValues values = DocValues.getBinary(context.reader(), fieldNames.indexName());
|
||||
if (numericType.isFloatingPoint()) {
|
||||
return new AtomicDoubleFieldData(-1) {
|
||||
|
||||
@Override
|
||||
public SortedNumericDoubleValues getDoubleValues() {
|
||||
switch (numericType) {
|
||||
case FLOAT:
|
||||
return new BinaryAsSortedNumericFloatValues(values);
|
||||
case DOUBLE:
|
||||
return new BinaryAsSortedNumericDoubleValues(values);
|
||||
default:
|
||||
throw new IllegalArgumentException("" + numericType);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Accountable> getChildResources() {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
};
|
||||
} else {
|
||||
return new AtomicLongFieldData(0) {
|
||||
|
||||
@Override
|
||||
public SortedNumericDocValues getLongValues() {
|
||||
return new BinaryAsSortedNumericDocValues(values);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Accountable> getChildResources() {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
};
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new IllegalStateException("Cannot load doc values", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public AtomicNumericFieldData loadDirect(LeafReaderContext context) throws Exception {
|
||||
return load(context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public NumericType getNumericType() {
|
||||
return numericType;
|
||||
}
|
||||
|
||||
private static class BinaryAsSortedNumericDocValues extends SortedNumericDocValues {
|
||||
|
||||
private final BinaryDocValues values;
|
||||
private BytesRef bytes;
|
||||
private final ByteArrayDataInput in = new ByteArrayDataInput();
|
||||
private long[] longs = new long[1];
|
||||
private int count = 0;
|
||||
|
||||
BinaryAsSortedNumericDocValues(BinaryDocValues values) {
|
||||
this.values = values;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setDocument(int docId) {
|
||||
bytes = values.get(docId);
|
||||
in.reset(bytes.bytes, bytes.offset, bytes.length);
|
||||
if (!in.eof()) {
|
||||
// first value uses vLong on top of zig-zag encoding, then deltas are encoded using vLong
|
||||
long previousValue = longs[0] = ByteUtils.zigZagDecode(ByteUtils.readVLong(in));
|
||||
count = 1;
|
||||
while (!in.eof()) {
|
||||
longs = ArrayUtil.grow(longs, count + 1);
|
||||
previousValue = longs[count++] = previousValue + ByteUtils.readVLong(in);
|
||||
}
|
||||
} else {
|
||||
count = 0;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int count() {
|
||||
return count;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long valueAt(int index) {
|
||||
return longs[index];
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static class BinaryAsSortedNumericDoubleValues extends SortedNumericDoubleValues {
|
||||
|
||||
private final BinaryDocValues values;
|
||||
private BytesRef bytes;
|
||||
private int valueCount = 0;
|
||||
|
||||
BinaryAsSortedNumericDoubleValues(BinaryDocValues values) {
|
||||
this.values = values;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setDocument(int docId) {
|
||||
bytes = values.get(docId);
|
||||
assert bytes.length % 8 == 0;
|
||||
valueCount = bytes.length / 8;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int count() {
|
||||
return valueCount;
|
||||
}
|
||||
|
||||
@Override
|
||||
public double valueAt(int index) {
|
||||
return ByteUtils.readDoubleLE(bytes.bytes, bytes.offset + index * 8);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static class BinaryAsSortedNumericFloatValues extends SortedNumericDoubleValues {
|
||||
|
||||
private final BinaryDocValues values;
|
||||
private BytesRef bytes;
|
||||
private int valueCount = 0;
|
||||
|
||||
BinaryAsSortedNumericFloatValues(BinaryDocValues values) {
|
||||
this.values = values;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setDocument(int docId) {
|
||||
bytes = values.get(docId);
|
||||
assert bytes.length % 4 == 0;
|
||||
valueCount = bytes.length / 4;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int count() {
|
||||
return valueCount;
|
||||
}
|
||||
|
||||
@Override
|
||||
public double valueAt(int index) {
|
||||
return ByteUtils.readFloatLE(bytes.bytes, bytes.offset + index * 4);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
|
@ -20,7 +20,6 @@
|
|||
package org.elasticsearch.index.fielddata.plain;
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -33,7 +32,6 @@ import org.elasticsearch.index.mapper.MappedFieldType;
|
|||
import org.elasticsearch.index.mapper.MappedFieldType.Names;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.internal.IdFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.TimestampFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
||||
import org.elasticsearch.indices.breaker.CircuitBreakerService;
|
||||
|
||||
|
@ -104,13 +102,7 @@ public abstract class DocValuesIndexFieldData {
|
|||
assert numericType == null;
|
||||
return new BinaryDVIndexFieldData(index, fieldNames, fieldType.fieldDataType());
|
||||
} else if (numericType != null) {
|
||||
if (TimestampFieldMapper.NAME.equals(fieldNames.indexName())
|
||||
|| Version.indexCreated(indexSettings).onOrAfter(Version.V_1_4_0_Beta1)) {
|
||||
return new SortedNumericDVIndexFieldData(index, fieldNames, numericType, fieldType.fieldDataType());
|
||||
} else {
|
||||
// prior to ES 1.4: multi-valued numerics were boxed inside a byte[] as BINARY
|
||||
return new BinaryDVNumericIndexFieldData(index, fieldNames, numericType, fieldType.fieldDataType());
|
||||
}
|
||||
return new SortedNumericDVIndexFieldData(index, fieldNames, numericType, fieldType.fieldDataType());
|
||||
} else {
|
||||
return new SortedSetDVOrdinalsIndexFieldData(index, cache, indexSettings, fieldNames, breakerService, fieldType.fieldDataType());
|
||||
}
|
||||
|
|
|
@ -1,243 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.fielddata.plain;
|
||||
|
||||
import org.apache.lucene.index.*;
|
||||
import org.apache.lucene.util.*;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.breaker.CircuitBreaker;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.common.util.DoubleArray;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.fielddata.*;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
|
||||
import org.elasticsearch.index.fielddata.fieldcomparator.DoubleValuesComparatorSource;
|
||||
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
||||
import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
import org.elasticsearch.indices.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.search.MultiValueMode;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class DoubleArrayIndexFieldData extends AbstractIndexFieldData<AtomicNumericFieldData> implements IndexNumericFieldData {
|
||||
|
||||
private final CircuitBreakerService breakerService;
|
||||
|
||||
public static class Builder implements IndexFieldData.Builder {
|
||||
|
||||
@Override
|
||||
public IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache,
|
||||
CircuitBreakerService breakerService, MapperService mapperService) {
|
||||
return new DoubleArrayIndexFieldData(index, indexSettings, fieldType.names(), fieldType.fieldDataType(), cache, breakerService);
|
||||
}
|
||||
}
|
||||
|
||||
public DoubleArrayIndexFieldData(Index index, @IndexSettings Settings indexSettings, MappedFieldType.Names fieldNames,
|
||||
FieldDataType fieldDataType, IndexFieldDataCache cache, CircuitBreakerService breakerService) {
|
||||
super(index, indexSettings, fieldNames, fieldDataType, cache);
|
||||
this.breakerService = breakerService;
|
||||
}
|
||||
|
||||
@Override
|
||||
public NumericType getNumericType() {
|
||||
return NumericType.DOUBLE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AtomicNumericFieldData loadDirect(LeafReaderContext context) throws Exception {
|
||||
|
||||
final LeafReader reader = context.reader();
|
||||
Terms terms = reader.terms(getFieldNames().indexName());
|
||||
AtomicNumericFieldData data = null;
|
||||
// TODO: Use an actual estimator to estimate before loading.
|
||||
NonEstimatingEstimator estimator = new NonEstimatingEstimator(breakerService.getBreaker(CircuitBreaker.FIELDDATA));
|
||||
if (terms == null) {
|
||||
data = AtomicDoubleFieldData.empty(reader.maxDoc());
|
||||
estimator.afterLoad(null, data.ramBytesUsed());
|
||||
return data;
|
||||
}
|
||||
// TODO: how can we guess the number of terms? numerics end up creating more terms per value...
|
||||
DoubleArray values = BigArrays.NON_RECYCLING_INSTANCE.newDoubleArray(128);
|
||||
|
||||
final float acceptableTransientOverheadRatio = fieldDataType.getSettings().getAsFloat("acceptable_transient_overhead_ratio", OrdinalsBuilder.DEFAULT_ACCEPTABLE_OVERHEAD_RATIO);
|
||||
boolean success = false;
|
||||
try (OrdinalsBuilder builder = new OrdinalsBuilder(reader.maxDoc(), acceptableTransientOverheadRatio)) {
|
||||
final BytesRefIterator iter = builder.buildFromTerms(getNumericType().wrapTermsEnum(terms.iterator()));
|
||||
BytesRef term;
|
||||
long numTerms = 0;
|
||||
while ((term = iter.next()) != null) {
|
||||
values = BigArrays.NON_RECYCLING_INSTANCE.grow(values, numTerms + 1);
|
||||
values.set(numTerms++, NumericUtils.sortableLongToDouble(NumericUtils.prefixCodedToLong(term)));
|
||||
}
|
||||
values = BigArrays.NON_RECYCLING_INSTANCE.resize(values, numTerms);
|
||||
final DoubleArray finalValues = values;
|
||||
final Ordinals build = builder.build(fieldDataType.getSettings());
|
||||
RandomAccessOrds ordinals = build.ordinals();
|
||||
if (FieldData.isMultiValued(ordinals) || CommonSettings.getMemoryStorageHint(fieldDataType) == CommonSettings.MemoryStorageFormat.ORDINALS) {
|
||||
final long ramBytesUsed = build.ramBytesUsed() + values.ramBytesUsed();
|
||||
data = new AtomicDoubleFieldData(ramBytesUsed) {
|
||||
|
||||
@Override
|
||||
public SortedNumericDoubleValues getDoubleValues() {
|
||||
return withOrdinals(build, finalValues, reader.maxDoc());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Accountable> getChildResources() {
|
||||
List<Accountable> resources = new ArrayList<>();
|
||||
resources.add(Accountables.namedAccountable("ordinals", build));
|
||||
resources.add(Accountables.namedAccountable("values", finalValues));
|
||||
return Collections.unmodifiableList(resources);
|
||||
}
|
||||
|
||||
};
|
||||
} else {
|
||||
final BitSet set = builder.buildDocsWithValuesSet();
|
||||
|
||||
// there's sweet spot where due to low unique value count, using ordinals will consume less memory
|
||||
long singleValuesArraySize = reader.maxDoc() * RamUsageEstimator.NUM_BYTES_DOUBLE + (set == null ? 0 : set.ramBytesUsed());
|
||||
long uniqueValuesArraySize = values.ramBytesUsed();
|
||||
long ordinalsSize = build.ramBytesUsed();
|
||||
if (uniqueValuesArraySize + ordinalsSize < singleValuesArraySize) {
|
||||
final long ramBytesUsed = build.ramBytesUsed() + values.ramBytesUsed();
|
||||
success = true;
|
||||
return data = new AtomicDoubleFieldData(ramBytesUsed) {
|
||||
|
||||
@Override
|
||||
public SortedNumericDoubleValues getDoubleValues() {
|
||||
return withOrdinals(build, finalValues, reader.maxDoc());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Accountable> getChildResources() {
|
||||
List<Accountable> resources = new ArrayList<>();
|
||||
resources.add(Accountables.namedAccountable("ordinals", build));
|
||||
resources.add(Accountables.namedAccountable("values", finalValues));
|
||||
return Collections.unmodifiableList(resources);
|
||||
}
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
int maxDoc = reader.maxDoc();
|
||||
final DoubleArray sValues = BigArrays.NON_RECYCLING_INSTANCE.newDoubleArray(maxDoc);
|
||||
for (int i = 0; i < maxDoc; i++) {
|
||||
ordinals.setDocument(i);
|
||||
final long ordinal = ordinals.nextOrd();
|
||||
if (ordinal != SortedSetDocValues.NO_MORE_ORDS) {
|
||||
sValues.set(i, values.get(ordinal));
|
||||
}
|
||||
}
|
||||
assert sValues.size() == maxDoc;
|
||||
final long ramBytesUsed = sValues.ramBytesUsed() + (set == null ? 0 : set.ramBytesUsed());
|
||||
data = new AtomicDoubleFieldData(ramBytesUsed) {
|
||||
|
||||
@Override
|
||||
public SortedNumericDoubleValues getDoubleValues() {
|
||||
return singles(sValues, set);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Accountable> getChildResources() {
|
||||
List<Accountable> resources = new ArrayList<>();
|
||||
resources.add(Accountables.namedAccountable("values", sValues));
|
||||
resources.add(Accountables.namedAccountable("missing bitset", set));
|
||||
return Collections.unmodifiableList(resources);
|
||||
}
|
||||
|
||||
};
|
||||
success = true;
|
||||
}
|
||||
success = true;
|
||||
return data;
|
||||
} finally {
|
||||
if (success) {
|
||||
estimator.afterLoad(null, data.ramBytesUsed());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected AtomicNumericFieldData empty(int maxDoc) {
|
||||
return AtomicDoubleFieldData.empty(maxDoc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XFieldComparatorSource comparatorSource(@Nullable Object missingValue, MultiValueMode sortMode, Nested nested) {
|
||||
return new DoubleValuesComparatorSource(this, missingValue, sortMode, nested);
|
||||
}
|
||||
|
||||
private static SortedNumericDoubleValues withOrdinals(Ordinals ordinals, final DoubleArray values, int maxDoc) {
|
||||
final RandomAccessOrds ords = ordinals.ordinals();
|
||||
final SortedDocValues singleOrds = DocValues.unwrapSingleton(ords);
|
||||
if (singleOrds != null) {
|
||||
final NumericDoubleValues singleValues = new NumericDoubleValues() {
|
||||
@Override
|
||||
public double get(int docID) {
|
||||
final int ord = singleOrds.getOrd(docID);
|
||||
if (ord >= 0) {
|
||||
return values.get(singleOrds.getOrd(docID));
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
};
|
||||
return FieldData.singleton(singleValues, DocValues.docsWithValue(ords, maxDoc));
|
||||
} else {
|
||||
return new SortedNumericDoubleValues() {
|
||||
@Override
|
||||
public double valueAt(int index) {
|
||||
return values.get(ords.ordAt(index));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setDocument(int doc) {
|
||||
ords.setDocument(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int count() {
|
||||
return ords.cardinality();
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private static SortedNumericDoubleValues singles(final DoubleArray values, Bits set) {
|
||||
final NumericDoubleValues numValues = new NumericDoubleValues() {
|
||||
@Override
|
||||
public double get(int docID) {
|
||||
return values.get(docID);
|
||||
}
|
||||
};
|
||||
return FieldData.singleton(numValues, set);
|
||||
}
|
||||
}
|
|
@ -1,241 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.index.fielddata.plain;
|
||||
|
||||
import org.apache.lucene.index.*;
|
||||
import org.apache.lucene.util.*;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.breaker.CircuitBreaker;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.common.util.FloatArray;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.fielddata.*;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
|
||||
import org.elasticsearch.index.fielddata.fieldcomparator.FloatValuesComparatorSource;
|
||||
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
||||
import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
import org.elasticsearch.indices.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.search.MultiValueMode;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class FloatArrayIndexFieldData extends AbstractIndexFieldData<AtomicNumericFieldData> implements IndexNumericFieldData {
|
||||
|
||||
private final CircuitBreakerService breakerService;
|
||||
|
||||
public static class Builder implements IndexFieldData.Builder {
|
||||
|
||||
@Override
|
||||
public IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache,
|
||||
CircuitBreakerService breakerService, MapperService mapperService) {
|
||||
return new FloatArrayIndexFieldData(index, indexSettings, fieldType.names(), fieldType.fieldDataType(), cache, breakerService);
|
||||
}
|
||||
}
|
||||
|
||||
public FloatArrayIndexFieldData(Index index, @IndexSettings Settings indexSettings, MappedFieldType.Names fieldNames,
|
||||
FieldDataType fieldDataType, IndexFieldDataCache cache, CircuitBreakerService breakerService) {
|
||||
super(index, indexSettings, fieldNames, fieldDataType, cache);
|
||||
this.breakerService = breakerService;
|
||||
}
|
||||
|
||||
@Override
|
||||
public NumericType getNumericType() {
|
||||
return NumericType.FLOAT;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AtomicNumericFieldData loadDirect(LeafReaderContext context) throws Exception {
|
||||
final LeafReader reader = context.reader();
|
||||
Terms terms = reader.terms(getFieldNames().indexName());
|
||||
AtomicNumericFieldData data = null;
|
||||
// TODO: Use an actual estimator to estimate before loading.
|
||||
NonEstimatingEstimator estimator = new NonEstimatingEstimator(breakerService.getBreaker(CircuitBreaker.FIELDDATA));
|
||||
if (terms == null) {
|
||||
data = AtomicDoubleFieldData.empty(reader.maxDoc());
|
||||
estimator.afterLoad(null, data.ramBytesUsed());
|
||||
return data;
|
||||
}
|
||||
// TODO: how can we guess the number of terms? numerics end up creating more terms per value...
|
||||
FloatArray values = BigArrays.NON_RECYCLING_INSTANCE.newFloatArray(128);
|
||||
|
||||
final float acceptableTransientOverheadRatio = fieldDataType.getSettings().getAsFloat("acceptable_transient_overhead_ratio", OrdinalsBuilder.DEFAULT_ACCEPTABLE_OVERHEAD_RATIO);
|
||||
boolean success = false;
|
||||
try (OrdinalsBuilder builder = new OrdinalsBuilder(reader.maxDoc(), acceptableTransientOverheadRatio)) {
|
||||
BytesRefIterator iter = builder.buildFromTerms(getNumericType().wrapTermsEnum(terms.iterator()));
|
||||
BytesRef term;
|
||||
long numTerms = 0;
|
||||
while ((term = iter.next()) != null) {
|
||||
values = BigArrays.NON_RECYCLING_INSTANCE.grow(values, numTerms + 1);
|
||||
values.set(numTerms++, NumericUtils.sortableIntToFloat(NumericUtils.prefixCodedToInt(term)));
|
||||
}
|
||||
values = BigArrays.NON_RECYCLING_INSTANCE.resize(values, numTerms);
|
||||
final FloatArray finalValues = values;
|
||||
final Ordinals build = builder.build(fieldDataType.getSettings());
|
||||
RandomAccessOrds ordinals = build.ordinals();
|
||||
if (FieldData.isMultiValued(ordinals) || CommonSettings.getMemoryStorageHint(fieldDataType) == CommonSettings.MemoryStorageFormat.ORDINALS) {
|
||||
final long ramBytesUsed = build.ramBytesUsed() + values.ramBytesUsed();
|
||||
data = new AtomicDoubleFieldData(ramBytesUsed) {
|
||||
|
||||
@Override
|
||||
public SortedNumericDoubleValues getDoubleValues() {
|
||||
return withOrdinals(build, finalValues, reader.maxDoc());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Accountable> getChildResources() {
|
||||
List<Accountable> resources = new ArrayList<>();
|
||||
resources.add(Accountables.namedAccountable("ordinals", build));
|
||||
resources.add(Accountables.namedAccountable("values", finalValues));
|
||||
return Collections.unmodifiableList(resources);
|
||||
}
|
||||
|
||||
};
|
||||
} else {
|
||||
final BitSet set = builder.buildDocsWithValuesSet();
|
||||
|
||||
// there's sweet spot where due to low unique value count, using ordinals will consume less memory
|
||||
long singleValuesArraySize = reader.maxDoc() * RamUsageEstimator.NUM_BYTES_FLOAT + (set == null ? 0 : set.ramBytesUsed());
|
||||
long uniqueValuesArraySize = values.ramBytesUsed();
|
||||
long ordinalsSize = build.ramBytesUsed();
|
||||
if (uniqueValuesArraySize + ordinalsSize < singleValuesArraySize) {
|
||||
final long ramBytesUsed = build.ramBytesUsed() + values.ramBytesUsed();
|
||||
success = true;
|
||||
return data = new AtomicDoubleFieldData(ramBytesUsed) {
|
||||
|
||||
@Override
|
||||
public SortedNumericDoubleValues getDoubleValues() {
|
||||
return withOrdinals(build, finalValues, reader.maxDoc());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Accountable> getChildResources() {
|
||||
List<Accountable> resources = new ArrayList<>();
|
||||
resources.add(Accountables.namedAccountable("ordinals", build));
|
||||
resources.add(Accountables.namedAccountable("values", finalValues));
|
||||
return Collections.unmodifiableList(resources);
|
||||
}
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
int maxDoc = reader.maxDoc();
|
||||
final FloatArray sValues = BigArrays.NON_RECYCLING_INSTANCE.newFloatArray(maxDoc);
|
||||
for (int i = 0; i < maxDoc; i++) {
|
||||
ordinals.setDocument(i);
|
||||
final long ordinal = ordinals.nextOrd();
|
||||
if (ordinal != SortedSetDocValues.NO_MORE_ORDS) {
|
||||
sValues.set(i, values.get(ordinal));
|
||||
}
|
||||
}
|
||||
assert sValues.size() == maxDoc;
|
||||
final long ramBytesUsed = sValues.ramBytesUsed() + (set == null ? 0 : set.ramBytesUsed());
|
||||
data = new AtomicDoubleFieldData(ramBytesUsed) {
|
||||
|
||||
@Override
|
||||
public SortedNumericDoubleValues getDoubleValues() {
|
||||
return singles(sValues, set);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Accountable> getChildResources() {
|
||||
List<Accountable> resources = new ArrayList<>();
|
||||
resources.add(Accountables.namedAccountable("values", sValues));
|
||||
resources.add(Accountables.namedAccountable("missing bitset", set));
|
||||
return Collections.unmodifiableList(resources);
|
||||
}
|
||||
|
||||
};
|
||||
success = true;
|
||||
}
|
||||
success = true;
|
||||
return data;
|
||||
} finally {
|
||||
if (success) {
|
||||
estimator.afterLoad(null, data.ramBytesUsed());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected AtomicNumericFieldData empty(int maxDoc) {
|
||||
return AtomicDoubleFieldData.empty(maxDoc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XFieldComparatorSource comparatorSource(@Nullable Object missingValue, MultiValueMode sortMode, Nested nested) {
|
||||
return new FloatValuesComparatorSource(this, missingValue, sortMode, nested);
|
||||
}
|
||||
|
||||
private static SortedNumericDoubleValues withOrdinals(Ordinals ordinals, final FloatArray values, int maxDoc) {
|
||||
final RandomAccessOrds ords = ordinals.ordinals();
|
||||
final SortedDocValues singleOrds = DocValues.unwrapSingleton(ords);
|
||||
if (singleOrds != null) {
|
||||
final NumericDoubleValues singleValues = new NumericDoubleValues() {
|
||||
@Override
|
||||
public double get(int docID) {
|
||||
final int ord = singleOrds.getOrd(docID);
|
||||
if (ord >= 0) {
|
||||
return values.get(singleOrds.getOrd(docID));
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
};
|
||||
return FieldData.singleton(singleValues, DocValues.docsWithValue(ords, maxDoc));
|
||||
} else {
|
||||
return new SortedNumericDoubleValues() {
|
||||
@Override
|
||||
public double valueAt(int index) {
|
||||
return values.get(ords.ordAt(index));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setDocument(int doc) {
|
||||
ords.setDocument(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int count() {
|
||||
return ords.cardinality();
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private static SortedNumericDoubleValues singles(final FloatArray values, Bits set) {
|
||||
final NumericDoubleValues numValues = new NumericDoubleValues() {
|
||||
@Override
|
||||
public double get(int docID) {
|
||||
return values.get(docID);
|
||||
}
|
||||
};
|
||||
return FieldData.singleton(numValues, set);
|
||||
}
|
||||
}
|
|
@ -1,540 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.fielddata.plain;
|
||||
|
||||
import org.apache.lucene.index.*;
|
||||
import org.apache.lucene.util.*;
|
||||
import org.apache.lucene.util.BitSet;
|
||||
import org.apache.lucene.util.packed.PackedInts;
|
||||
import org.apache.lucene.util.packed.PackedLongValues;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.breaker.CircuitBreaker;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.fielddata.*;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
|
||||
import org.elasticsearch.index.fielddata.fieldcomparator.LongValuesComparatorSource;
|
||||
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
||||
import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
import org.elasticsearch.indices.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.search.MultiValueMode;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* Stores numeric data into bit-packed arrays for better memory efficiency.
|
||||
*/
|
||||
public class PackedArrayIndexFieldData extends AbstractIndexFieldData<AtomicNumericFieldData> implements IndexNumericFieldData {
|
||||
|
||||
public static class Builder implements IndexFieldData.Builder {
|
||||
|
||||
private NumericType numericType;
|
||||
|
||||
public Builder setNumericType(NumericType numericType) {
|
||||
this.numericType = numericType;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexFieldData<AtomicNumericFieldData> build(Index index, @IndexSettings Settings indexSettings, MappedFieldType fieldType,
|
||||
IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) {
|
||||
return new PackedArrayIndexFieldData(index, indexSettings, fieldType.names(), fieldType.fieldDataType(), cache, numericType, breakerService);
|
||||
}
|
||||
}
|
||||
|
||||
private final NumericType numericType;
|
||||
private final CircuitBreakerService breakerService;
|
||||
|
||||
public PackedArrayIndexFieldData(Index index, @IndexSettings Settings indexSettings, MappedFieldType.Names fieldNames,
|
||||
FieldDataType fieldDataType, IndexFieldDataCache cache, NumericType numericType,
|
||||
CircuitBreakerService breakerService) {
|
||||
super(index, indexSettings, fieldNames, fieldDataType, cache);
|
||||
Objects.requireNonNull(numericType);
|
||||
if (!EnumSet.of(NumericType.BOOLEAN, NumericType.BYTE, NumericType.SHORT, NumericType.INT, NumericType.LONG).contains(numericType)) {
|
||||
throw new IllegalArgumentException(getClass().getSimpleName() + " only supports integer types, not " + numericType);
|
||||
}
|
||||
this.numericType = numericType;
|
||||
this.breakerService = breakerService;
|
||||
}
|
||||
|
||||
@Override
|
||||
public NumericType getNumericType() {
|
||||
return numericType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AtomicNumericFieldData loadDirect(LeafReaderContext context) throws Exception {
|
||||
final LeafReader reader = context.reader();
|
||||
Terms terms = reader.terms(getFieldNames().indexName());
|
||||
AtomicNumericFieldData data = null;
|
||||
PackedArrayEstimator estimator = new PackedArrayEstimator(breakerService.getBreaker(CircuitBreaker.FIELDDATA), getNumericType(), getFieldNames().fullName());
|
||||
if (terms == null) {
|
||||
data = AtomicLongFieldData.empty(reader.maxDoc());
|
||||
estimator.adjustForNoTerms(data.ramBytesUsed());
|
||||
return data;
|
||||
}
|
||||
// TODO: how can we guess the number of terms? numerics end up creating more terms per value...
|
||||
// Lucene encodes numeric data so that the lexicographical (encoded) order matches the integer order so we know the sequence of
|
||||
// longs is going to be monotonically increasing
|
||||
final PackedLongValues.Builder valuesBuilder = PackedLongValues.monotonicBuilder(PackedInts.COMPACT);
|
||||
|
||||
final float acceptableTransientOverheadRatio = fieldDataType.getSettings().getAsFloat("acceptable_transient_overhead_ratio", OrdinalsBuilder.DEFAULT_ACCEPTABLE_OVERHEAD_RATIO);
|
||||
TermsEnum termsEnum = estimator.beforeLoad(terms);
|
||||
assert !getNumericType().isFloatingPoint();
|
||||
boolean success = false;
|
||||
try (OrdinalsBuilder builder = new OrdinalsBuilder(-1, reader.maxDoc(), acceptableTransientOverheadRatio)) {
|
||||
BytesRefIterator iter = builder.buildFromTerms(termsEnum);
|
||||
BytesRef term;
|
||||
while ((term = iter.next()) != null) {
|
||||
final long value = numericType.toLong(term);
|
||||
valuesBuilder.add(value);
|
||||
}
|
||||
final PackedLongValues values = valuesBuilder.build();
|
||||
final Ordinals build = builder.build(fieldDataType.getSettings());
|
||||
CommonSettings.MemoryStorageFormat formatHint = CommonSettings.getMemoryStorageHint(fieldDataType);
|
||||
|
||||
RandomAccessOrds ordinals = build.ordinals();
|
||||
if (FieldData.isMultiValued(ordinals) || formatHint == CommonSettings.MemoryStorageFormat.ORDINALS) {
|
||||
final long ramBytesUsed = build.ramBytesUsed() + values.ramBytesUsed();
|
||||
data = new AtomicLongFieldData(ramBytesUsed) {
|
||||
|
||||
@Override
|
||||
public SortedNumericDocValues getLongValues() {
|
||||
return withOrdinals(build, values, reader.maxDoc());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Accountable> getChildResources() {
|
||||
List<Accountable> resources = new ArrayList<>();
|
||||
resources.add(Accountables.namedAccountable("ordinals", build));
|
||||
resources.add(Accountables.namedAccountable("values", values));
|
||||
return Collections.unmodifiableList(resources);
|
||||
}
|
||||
};
|
||||
} else {
|
||||
final BitSet docsWithValues = builder.buildDocsWithValuesSet();
|
||||
|
||||
long minV, maxV;
|
||||
minV = maxV = 0;
|
||||
if (values.size() > 0) {
|
||||
minV = values.get(0);
|
||||
maxV = values.get(values.size() - 1);
|
||||
}
|
||||
|
||||
|
||||
final float acceptableOverheadRatio = fieldDataType.getSettings().getAsFloat("acceptable_overhead_ratio", PackedInts.DEFAULT);
|
||||
final int pageSize = fieldDataType.getSettings().getAsInt("single_value_page_size", 1024);
|
||||
|
||||
if (formatHint == null) {
|
||||
formatHint = chooseStorageFormat(reader, values, build, ordinals, minV, maxV, acceptableOverheadRatio, pageSize);
|
||||
}
|
||||
|
||||
logger.trace("single value format for field [{}] set to [{}]", getFieldNames().fullName(), formatHint);
|
||||
|
||||
switch (formatHint) {
|
||||
case PACKED:
|
||||
// Encode document without a value with a special value
|
||||
long missingV = 0;
|
||||
if (docsWithValues != null) {
|
||||
if ((maxV - minV + 1) == values.size()) {
|
||||
// values are dense
|
||||
if (minV > Long.MIN_VALUE) {
|
||||
missingV = --minV;
|
||||
} else {
|
||||
assert maxV != Long.MAX_VALUE;
|
||||
missingV = ++maxV;
|
||||
}
|
||||
} else {
|
||||
for (long i = 1; i < values.size(); ++i) {
|
||||
if (values.get(i) > values.get(i - 1) + 1) {
|
||||
missingV = values.get(i - 1) + 1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
missingV -= minV;
|
||||
}
|
||||
final long missingValue = missingV;
|
||||
final long minValue = minV;
|
||||
final long maxValue = maxV;
|
||||
|
||||
final long valuesDelta = maxValue - minValue;
|
||||
int bitsRequired = valuesDelta < 0 ? 64 : PackedInts.bitsRequired(valuesDelta);
|
||||
final PackedInts.Mutable sValues = PackedInts.getMutable(reader.maxDoc(), bitsRequired, acceptableOverheadRatio);
|
||||
|
||||
if (docsWithValues != null) {
|
||||
sValues.fill(0, sValues.size(), missingV);
|
||||
}
|
||||
|
||||
for (int i = 0; i < reader.maxDoc(); i++) {
|
||||
ordinals.setDocument(i);
|
||||
if (ordinals.cardinality() > 0) {
|
||||
final long ord = ordinals.ordAt(0);
|
||||
long value = values.get(ord);
|
||||
sValues.set(i, value - minValue);
|
||||
}
|
||||
}
|
||||
long ramBytesUsed = values.ramBytesUsed() + (docsWithValues == null ? 0 : docsWithValues.ramBytesUsed());
|
||||
data = new AtomicLongFieldData(ramBytesUsed) {
|
||||
|
||||
@Override
|
||||
public SortedNumericDocValues getLongValues() {
|
||||
if (docsWithValues == null) {
|
||||
return singles(sValues, minValue);
|
||||
} else {
|
||||
return sparseSingles(sValues, minValue, missingValue, reader.maxDoc());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Accountable> getChildResources() {
|
||||
List<Accountable> resources = new ArrayList<>();
|
||||
resources.add(Accountables.namedAccountable("values", sValues));
|
||||
if (docsWithValues != null) {
|
||||
resources.add(Accountables.namedAccountable("missing bitset", docsWithValues));
|
||||
}
|
||||
return Collections.unmodifiableList(resources);
|
||||
}
|
||||
|
||||
};
|
||||
break;
|
||||
case PAGED:
|
||||
final PackedLongValues.Builder dpValues = PackedLongValues.deltaPackedBuilder(pageSize, acceptableOverheadRatio);
|
||||
|
||||
long lastValue = 0;
|
||||
for (int i = 0; i < reader.maxDoc(); i++) {
|
||||
ordinals.setDocument(i);
|
||||
if (ordinals.cardinality() > 0) {
|
||||
final long ord = ordinals.ordAt(i);
|
||||
lastValue = values.get(ord);
|
||||
}
|
||||
dpValues.add(lastValue);
|
||||
}
|
||||
final PackedLongValues pagedValues = dpValues.build();
|
||||
ramBytesUsed = pagedValues.ramBytesUsed();
|
||||
if (docsWithValues != null) {
|
||||
ramBytesUsed += docsWithValues.ramBytesUsed();
|
||||
}
|
||||
data = new AtomicLongFieldData(ramBytesUsed) {
|
||||
|
||||
@Override
|
||||
public SortedNumericDocValues getLongValues() {
|
||||
return pagedSingles(pagedValues, docsWithValues);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Accountable> getChildResources() {
|
||||
List<Accountable> resources = new ArrayList<>();
|
||||
resources.add(Accountables.namedAccountable("values", pagedValues));
|
||||
if (docsWithValues != null) {
|
||||
resources.add(Accountables.namedAccountable("missing bitset", docsWithValues));
|
||||
}
|
||||
return Collections.unmodifiableList(resources);
|
||||
}
|
||||
|
||||
};
|
||||
break;
|
||||
case ORDINALS:
|
||||
ramBytesUsed = build.ramBytesUsed() + values.ramBytesUsed();
|
||||
data = new AtomicLongFieldData(ramBytesUsed) {
|
||||
|
||||
@Override
|
||||
public SortedNumericDocValues getLongValues() {
|
||||
return withOrdinals(build, values, reader.maxDoc());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Accountable> getChildResources() {
|
||||
List<Accountable> resources = new ArrayList<>();
|
||||
resources.add(Accountables.namedAccountable("ordinals", build));
|
||||
resources.add(Accountables.namedAccountable("values", values));
|
||||
return Collections.unmodifiableList(resources);
|
||||
}
|
||||
|
||||
};
|
||||
break;
|
||||
default:
|
||||
throw new ElasticsearchException("unknown memory format: " + formatHint);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
success = true;
|
||||
return data;
|
||||
} finally {
|
||||
if (!success) {
|
||||
// If something went wrong, unwind any current estimations we've made
|
||||
estimator.afterLoad(termsEnum, 0);
|
||||
} else {
|
||||
// Adjust as usual, based on the actual size of the field data
|
||||
estimator.afterLoad(termsEnum, data.ramBytesUsed());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
protected CommonSettings.MemoryStorageFormat chooseStorageFormat(LeafReader reader, PackedLongValues values, Ordinals build, RandomAccessOrds ordinals,
|
||||
long minValue, long maxValue, float acceptableOverheadRatio, int pageSize) {
|
||||
|
||||
CommonSettings.MemoryStorageFormat format;
|
||||
|
||||
// estimate memory usage for a single packed array
|
||||
long packedDelta = maxValue - minValue + 1; // allow for a missing value
|
||||
// valuesDelta can be negative if the difference between max and min values overflows the positive side of longs.
|
||||
int bitsRequired = packedDelta < 0 ? 64 : PackedInts.bitsRequired(packedDelta);
|
||||
PackedInts.FormatAndBits formatAndBits = PackedInts.fastestFormatAndBits(reader.maxDoc(), bitsRequired, acceptableOverheadRatio);
|
||||
final long singleValuesSize = formatAndBits.format.longCount(PackedInts.VERSION_CURRENT, reader.maxDoc(), formatAndBits.bitsPerValue) * 8L;
|
||||
|
||||
// ordinal memory usage
|
||||
final long ordinalsSize = build.ramBytesUsed() + values.ramBytesUsed();
|
||||
|
||||
// estimate the memory signature of paged packing
|
||||
long pagedSingleValuesSize = (reader.maxDoc() / pageSize + 1) * RamUsageEstimator.NUM_BYTES_OBJECT_REF; // array of pages
|
||||
int pageIndex = 0;
|
||||
long pageMinOrdinal = Long.MAX_VALUE;
|
||||
long pageMaxOrdinal = Long.MIN_VALUE;
|
||||
for (int i = 1; i < reader.maxDoc(); ++i, pageIndex = (pageIndex + 1) % pageSize) {
|
||||
ordinals.setDocument(i);
|
||||
if (ordinals.cardinality() > 0) {
|
||||
long ordinal = ordinals.ordAt(0);
|
||||
pageMaxOrdinal = Math.max(ordinal, pageMaxOrdinal);
|
||||
pageMinOrdinal = Math.min(ordinal, pageMinOrdinal);
|
||||
}
|
||||
if (pageIndex == pageSize - 1) {
|
||||
// end of page, we now know enough to estimate memory usage
|
||||
pagedSingleValuesSize += getPageMemoryUsage(values, acceptableOverheadRatio, pageSize, pageMinOrdinal, pageMaxOrdinal);
|
||||
|
||||
pageMinOrdinal = Long.MAX_VALUE;
|
||||
pageMaxOrdinal = Long.MIN_VALUE;
|
||||
}
|
||||
}
|
||||
|
||||
if (pageIndex > 0) {
|
||||
// last page estimation
|
||||
pageIndex++;
|
||||
pagedSingleValuesSize += getPageMemoryUsage(values, acceptableOverheadRatio, pageSize, pageMinOrdinal, pageMaxOrdinal);
|
||||
}
|
||||
|
||||
if (ordinalsSize < singleValuesSize) {
|
||||
if (ordinalsSize < pagedSingleValuesSize) {
|
||||
format = CommonSettings.MemoryStorageFormat.ORDINALS;
|
||||
} else {
|
||||
format = CommonSettings.MemoryStorageFormat.PAGED;
|
||||
}
|
||||
} else {
|
||||
if (pagedSingleValuesSize < singleValuesSize) {
|
||||
format = CommonSettings.MemoryStorageFormat.PAGED;
|
||||
} else {
|
||||
format = CommonSettings.MemoryStorageFormat.PACKED;
|
||||
}
|
||||
}
|
||||
return format;
|
||||
}
|
||||
|
||||
private long getPageMemoryUsage(PackedLongValues values, float acceptableOverheadRatio, int pageSize, long pageMinOrdinal, long pageMaxOrdinal) {
|
||||
int bitsRequired;
|
||||
long pageMemorySize = 0;
|
||||
PackedInts.FormatAndBits formatAndBits;
|
||||
if (pageMaxOrdinal == Long.MIN_VALUE) {
|
||||
// empty page - will use the null reader which just stores size
|
||||
pageMemorySize += RamUsageEstimator.alignObjectSize(RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + RamUsageEstimator.NUM_BYTES_INT);
|
||||
|
||||
} else {
|
||||
long pageMinValue = values.get(pageMinOrdinal);
|
||||
long pageMaxValue = values.get(pageMaxOrdinal);
|
||||
long pageDelta = pageMaxValue - pageMinValue;
|
||||
if (pageDelta != 0) {
|
||||
bitsRequired = pageDelta < 0 ? 64 : PackedInts.bitsRequired(pageDelta);
|
||||
formatAndBits = PackedInts.fastestFormatAndBits(pageSize, bitsRequired, acceptableOverheadRatio);
|
||||
pageMemorySize += formatAndBits.format.longCount(PackedInts.VERSION_CURRENT, pageSize, formatAndBits.bitsPerValue) * RamUsageEstimator.NUM_BYTES_LONG;
|
||||
pageMemorySize += RamUsageEstimator.NUM_BYTES_LONG; // min value per page storage
|
||||
} else {
|
||||
// empty page
|
||||
pageMemorySize += RamUsageEstimator.alignObjectSize(RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + RamUsageEstimator.NUM_BYTES_INT);
|
||||
}
|
||||
}
|
||||
return pageMemorySize;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected AtomicNumericFieldData empty(int maxDoc) {
|
||||
return AtomicLongFieldData.empty(maxDoc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XFieldComparatorSource comparatorSource(@Nullable Object missingValue, MultiValueMode sortMode, Nested nested) {
|
||||
return new LongValuesComparatorSource(this, missingValue, sortMode, nested);
|
||||
}
|
||||
|
||||
/**
|
||||
* Estimator that wraps numeric field data loading in a
|
||||
* RamAccountingTermsEnum, adjusting the breaker after data has been
|
||||
* loaded
|
||||
*/
|
||||
public class PackedArrayEstimator implements PerValueEstimator {
|
||||
|
||||
private final CircuitBreaker breaker;
|
||||
private final NumericType type;
|
||||
private final String fieldName;
|
||||
|
||||
public PackedArrayEstimator(CircuitBreaker breaker, NumericType type, String fieldName) {
|
||||
this.breaker = breaker;
|
||||
this.type = type;
|
||||
this.fieldName = fieldName;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return number of bytes per term, based on the NumericValue.requiredBits()
|
||||
*/
|
||||
@Override
|
||||
public long bytesPerValue(BytesRef term) {
|
||||
// Estimate about about 0.8 (8 / 10) compression ratio for
|
||||
// numbers, but at least 4 bytes
|
||||
return Math.max(type.requiredBits() / 10, 4);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return A TermsEnum wrapped in a RamAccountingTermsEnum
|
||||
*/
|
||||
@Override
|
||||
public TermsEnum beforeLoad(Terms terms) throws IOException {
|
||||
return new RamAccountingTermsEnum(type.wrapTermsEnum(terms.iterator()), breaker, this, this.fieldName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adjusts the breaker based on the aggregated value from the RamAccountingTermsEnum
|
||||
*
|
||||
* @param termsEnum terms that were wrapped and loaded
|
||||
* @param actualUsed actual field data memory usage
|
||||
*/
|
||||
@Override
|
||||
public void afterLoad(TermsEnum termsEnum, long actualUsed) {
|
||||
assert termsEnum instanceof RamAccountingTermsEnum;
|
||||
long estimatedBytes = ((RamAccountingTermsEnum) termsEnum).getTotalBytes();
|
||||
breaker.addWithoutBreaking(-(estimatedBytes - actualUsed));
|
||||
}
|
||||
|
||||
/**
|
||||
* Adjust the breaker when no terms were actually loaded, but the field
|
||||
* data takes up space regardless. For instance, when ordinals are
|
||||
* used.
|
||||
*
|
||||
* @param actualUsed bytes actually used
|
||||
*/
|
||||
public void adjustForNoTerms(long actualUsed) {
|
||||
breaker.addWithoutBreaking(actualUsed);
|
||||
}
|
||||
}
|
||||
|
||||
private static SortedNumericDocValues withOrdinals(Ordinals ordinals, final LongValues values, int maxDoc) {
|
||||
final RandomAccessOrds ords = ordinals.ordinals();
|
||||
final SortedDocValues singleOrds = DocValues.unwrapSingleton(ords);
|
||||
if (singleOrds != null) {
|
||||
final NumericDocValues singleValues = new NumericDocValues() {
|
||||
@Override
|
||||
public long get(int docID) {
|
||||
final int ord = singleOrds.getOrd(docID);
|
||||
if (ord >= 0) {
|
||||
return values.get(singleOrds.getOrd(docID));
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
};
|
||||
return DocValues.singleton(singleValues, DocValues.docsWithValue(ords, maxDoc));
|
||||
} else {
|
||||
return new SortedNumericDocValues() {
|
||||
@Override
|
||||
public long valueAt(int index) {
|
||||
return values.get(ords.ordAt(index));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setDocument(int doc) {
|
||||
ords.setDocument(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int count() {
|
||||
return ords.cardinality();
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private static SortedNumericDocValues singles(final NumericDocValues deltas, final long minValue) {
|
||||
final NumericDocValues values;
|
||||
if (minValue == 0) {
|
||||
values = deltas;
|
||||
} else {
|
||||
values = new NumericDocValues() {
|
||||
@Override
|
||||
public long get(int docID) {
|
||||
return minValue + deltas.get(docID);
|
||||
}
|
||||
};
|
||||
}
|
||||
return DocValues.singleton(values, null);
|
||||
}
|
||||
|
||||
private static SortedNumericDocValues sparseSingles(final NumericDocValues deltas, final long minValue, final long missingValue, final int maxDoc) {
|
||||
final NumericDocValues values = new NumericDocValues() {
|
||||
@Override
|
||||
public long get(int docID) {
|
||||
final long delta = deltas.get(docID);
|
||||
if (delta == missingValue) {
|
||||
return 0;
|
||||
}
|
||||
return minValue + delta;
|
||||
}
|
||||
};
|
||||
final Bits docsWithFields = new Bits() {
|
||||
@Override
|
||||
public boolean get(int index) {
|
||||
return deltas.get(index) != missingValue;
|
||||
}
|
||||
@Override
|
||||
public int length() {
|
||||
return maxDoc;
|
||||
}
|
||||
};
|
||||
return DocValues.singleton(values, docsWithFields);
|
||||
}
|
||||
|
||||
private static SortedNumericDocValues pagedSingles(final PackedLongValues values, final Bits docsWithValue) {
|
||||
return DocValues.singleton(new NumericDocValues() {
|
||||
// we need to wrap since NumericDocValues must return 0 when a doc has no value
|
||||
@Override
|
||||
public long get(int docID) {
|
||||
if (docsWithValue == null || docsWithValue.get(docID)) {
|
||||
return values.get(docID);
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
}, docsWithValue);
|
||||
}
|
||||
}
|
|
@ -1,509 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.fielddata;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.StringField;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.SortedNumericDocValues;
|
||||
import org.apache.lucene.search.FieldDoc;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.search.SortField;
|
||||
import org.apache.lucene.search.TopFieldDocs;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.search.MultiValueMode;
|
||||
|
||||
import java.util.Locale;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
/**
|
||||
*/
|
||||
public abstract class AbstractNumericFieldDataTestCase extends AbstractFieldDataImplTestCase {
|
||||
@Override
|
||||
protected abstract FieldDataType getFieldDataType();
|
||||
|
||||
protected Settings.Builder getFieldDataSettings() {
|
||||
Settings.Builder builder = Settings.builder();
|
||||
IndexFieldData.CommonSettings.MemoryStorageFormat[] formats = IndexFieldData.CommonSettings.MemoryStorageFormat.values();
|
||||
int i = randomInt(formats.length);
|
||||
if (i < formats.length) {
|
||||
builder.put(IndexFieldData.CommonSettings.SETTING_MEMORY_STORAGE_HINT, formats[i].name().toLowerCase(Locale.ROOT));
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
public void testSingleValueAllSetNumber() throws Exception {
|
||||
fillSingleValueAllSet();
|
||||
IndexNumericFieldData indexFieldData = getForField("value");
|
||||
AtomicNumericFieldData fieldData = indexFieldData.load(refreshReader());
|
||||
|
||||
SortedNumericDocValues longValues = fieldData.getLongValues();
|
||||
|
||||
assertThat(FieldData.isMultiValued(longValues), equalTo(false));
|
||||
|
||||
longValues.setDocument(0);
|
||||
assertThat(longValues.count(), equalTo(1));
|
||||
assertThat(longValues.valueAt(0), equalTo(2l));
|
||||
|
||||
longValues.setDocument(1);
|
||||
assertThat(longValues.count(), equalTo(1));
|
||||
assertThat(longValues.valueAt(0), equalTo(1l));
|
||||
|
||||
longValues.setDocument(2);
|
||||
assertThat(longValues.count(), equalTo(1));
|
||||
assertThat(longValues.valueAt(0), equalTo(3l));
|
||||
|
||||
SortedNumericDoubleValues doubleValues = fieldData.getDoubleValues();
|
||||
|
||||
assertThat(FieldData.isMultiValued(doubleValues), equalTo(false));
|
||||
|
||||
doubleValues.setDocument(0);
|
||||
assertThat(1, equalTo(doubleValues.count()));
|
||||
assertThat(doubleValues.valueAt(0), equalTo(2d));
|
||||
|
||||
doubleValues.setDocument(1);
|
||||
assertThat(1, equalTo(doubleValues.count()));
|
||||
assertThat(doubleValues.valueAt(0), equalTo(1d));
|
||||
|
||||
doubleValues.setDocument(2);
|
||||
assertThat(1, equalTo(doubleValues.count()));
|
||||
assertThat(doubleValues.valueAt(0), equalTo(3d));
|
||||
|
||||
IndexSearcher searcher = new IndexSearcher(readerContext.reader());
|
||||
TopFieldDocs topDocs;
|
||||
|
||||
topDocs = searcher.search(new MatchAllDocsQuery(), 10,
|
||||
new Sort(new SortField("value", indexFieldData.comparatorSource(null, MultiValueMode.MIN, null))));
|
||||
assertThat(topDocs.totalHits, equalTo(3));
|
||||
assertThat(topDocs.scoreDocs[0].doc, equalTo(1));
|
||||
assertThat(topDocs.scoreDocs[1].doc, equalTo(0));
|
||||
assertThat(topDocs.scoreDocs[2].doc, equalTo(2));
|
||||
|
||||
topDocs = searcher.search(new MatchAllDocsQuery(), 10,
|
||||
new Sort(new SortField("value", indexFieldData.comparatorSource(null, MultiValueMode.MAX, null), true)));
|
||||
assertThat(topDocs.totalHits, equalTo(3));
|
||||
assertThat(topDocs.scoreDocs[0].doc, equalTo(2));
|
||||
assertThat(topDocs.scoreDocs[1].doc, equalTo(0));
|
||||
assertThat(topDocs.scoreDocs[2].doc, equalTo(1));
|
||||
}
|
||||
|
||||
public void testSingleValueWithMissingNumber() throws Exception {
|
||||
fillSingleValueWithMissing();
|
||||
IndexNumericFieldData indexFieldData = getForField("value");
|
||||
AtomicNumericFieldData fieldData = indexFieldData.load(refreshReader());
|
||||
|
||||
SortedNumericDocValues longValues = fieldData.getLongValues();
|
||||
|
||||
assertThat(FieldData.isMultiValued(longValues), equalTo(false));
|
||||
|
||||
longValues.setDocument(0);
|
||||
assertThat(longValues.count(), equalTo(1));
|
||||
assertThat(longValues.valueAt(0), equalTo(2l));
|
||||
|
||||
longValues.setDocument(1);
|
||||
assertThat(longValues.count(), equalTo(0));
|
||||
|
||||
longValues.setDocument(2);
|
||||
assertThat(longValues.count(), equalTo(1));
|
||||
assertThat(longValues.valueAt(0), equalTo(3l));
|
||||
|
||||
SortedNumericDoubleValues doubleValues = fieldData.getDoubleValues();
|
||||
|
||||
assertThat(FieldData.isMultiValued(doubleValues), equalTo(false));
|
||||
|
||||
doubleValues.setDocument(0);
|
||||
assertThat(1, equalTo(doubleValues.count()));
|
||||
assertThat(doubleValues.valueAt(0), equalTo(2d));
|
||||
|
||||
doubleValues.setDocument(1);
|
||||
assertThat(0, equalTo(doubleValues.count()));
|
||||
|
||||
doubleValues.setDocument(2);
|
||||
assertThat(1, equalTo(doubleValues.count()));
|
||||
assertThat(doubleValues.valueAt(0), equalTo(3d));
|
||||
|
||||
IndexSearcher searcher = new IndexSearcher(readerContext.reader());
|
||||
TopFieldDocs topDocs;
|
||||
|
||||
topDocs = searcher.search(new MatchAllDocsQuery(), 10,
|
||||
new Sort(new SortField("value", indexFieldData.comparatorSource(null, MultiValueMode.MIN, null)))); // defaults to _last
|
||||
assertThat(topDocs.totalHits, equalTo(3));
|
||||
assertThat(topDocs.scoreDocs[0].doc, equalTo(0));
|
||||
assertThat(topDocs.scoreDocs[1].doc, equalTo(2));
|
||||
assertThat(topDocs.scoreDocs[2].doc, equalTo(1));
|
||||
|
||||
topDocs = searcher.search(new MatchAllDocsQuery(), 10,
|
||||
new Sort(new SortField("value", indexFieldData.comparatorSource(null, MultiValueMode.MAX, null), true))); // defaults to _last
|
||||
assertThat(topDocs.totalHits, equalTo(3));
|
||||
assertThat(topDocs.scoreDocs[0].doc, equalTo(2));
|
||||
assertThat(topDocs.scoreDocs[1].doc, equalTo(0));
|
||||
assertThat(topDocs.scoreDocs[2].doc, equalTo(1));
|
||||
|
||||
topDocs = searcher.search(new MatchAllDocsQuery(), 10,
|
||||
new Sort(new SortField("value", indexFieldData.comparatorSource("_first", MultiValueMode.MIN, null))));
|
||||
assertThat(topDocs.totalHits, equalTo(3));
|
||||
assertThat(topDocs.scoreDocs[0].doc, equalTo(1));
|
||||
assertThat(topDocs.scoreDocs[1].doc, equalTo(0));
|
||||
assertThat(topDocs.scoreDocs[2].doc, equalTo(2));
|
||||
|
||||
topDocs = searcher.search(new MatchAllDocsQuery(), 10,
|
||||
new Sort(new SortField("value", indexFieldData.comparatorSource("_first", MultiValueMode.MAX, null), true)));
|
||||
assertThat(topDocs.totalHits, equalTo(3));
|
||||
assertThat(topDocs.scoreDocs[0].doc, equalTo(1));
|
||||
assertThat(topDocs.scoreDocs[1].doc, equalTo(2));
|
||||
assertThat(topDocs.scoreDocs[2].doc, equalTo(0));
|
||||
|
||||
topDocs = searcher.search(new MatchAllDocsQuery(), 10,
|
||||
new Sort(new SortField("value", indexFieldData.comparatorSource("1", MultiValueMode.MIN, null))));
|
||||
assertThat(topDocs.totalHits, equalTo(3));
|
||||
assertThat(topDocs.scoreDocs[0].doc, equalTo(1));
|
||||
assertThat(topDocs.scoreDocs[1].doc, equalTo(0));
|
||||
assertThat(topDocs.scoreDocs[2].doc, equalTo(2));
|
||||
|
||||
topDocs = searcher.search(new MatchAllDocsQuery(), 10,
|
||||
new Sort(new SortField("value", indexFieldData.comparatorSource("1", MultiValueMode.MAX, null), true)));
|
||||
assertThat(topDocs.totalHits, equalTo(3));
|
||||
assertThat(topDocs.scoreDocs[0].doc, equalTo(2));
|
||||
assertThat(topDocs.scoreDocs[1].doc, equalTo(0));
|
||||
assertThat(topDocs.scoreDocs[2].doc, equalTo(1));
|
||||
}
|
||||
|
||||
public void testMultiValueAllSetNumber() throws Exception {
|
||||
fillMultiValueAllSet();
|
||||
IndexNumericFieldData indexFieldData = getForField("value");
|
||||
AtomicNumericFieldData fieldData = indexFieldData.load(refreshReader());
|
||||
|
||||
SortedNumericDocValues longValues = fieldData.getLongValues();
|
||||
|
||||
assertThat(FieldData.isMultiValued(longValues), equalTo(true));
|
||||
|
||||
longValues.setDocument(0);
|
||||
assertThat(longValues.count(), equalTo(2));
|
||||
assertThat(longValues.valueAt(0), equalTo(2l));
|
||||
assertThat(longValues.valueAt(1), equalTo(4l));
|
||||
|
||||
longValues.setDocument(1);
|
||||
assertThat(longValues.count(), equalTo(1));
|
||||
assertThat(longValues.valueAt(0), equalTo(1l));
|
||||
|
||||
longValues.setDocument(2);
|
||||
assertThat(longValues.count(), equalTo(1));
|
||||
assertThat(longValues.valueAt(0), equalTo(3l));
|
||||
|
||||
SortedNumericDoubleValues doubleValues = fieldData.getDoubleValues();
|
||||
|
||||
assertThat(FieldData.isMultiValued(doubleValues), equalTo(true));
|
||||
|
||||
doubleValues.setDocument(0);
|
||||
assertThat(2, equalTo(doubleValues.count()));
|
||||
assertThat(doubleValues.valueAt(0), equalTo(2d));
|
||||
assertThat(doubleValues.valueAt(1), equalTo(4d));
|
||||
|
||||
doubleValues.setDocument(1);
|
||||
assertThat(1, equalTo(doubleValues.count()));
|
||||
assertThat(doubleValues.valueAt(0), equalTo(1d));
|
||||
|
||||
doubleValues.setDocument(2);
|
||||
assertThat(1, equalTo(doubleValues.count()));
|
||||
assertThat(doubleValues.valueAt(0), equalTo(3d));
|
||||
}
|
||||
|
||||
public void testMultiValueWithMissingNumber() throws Exception {
|
||||
fillMultiValueWithMissing();
|
||||
IndexNumericFieldData indexFieldData = getForField("value");
|
||||
AtomicNumericFieldData fieldData = indexFieldData.load(refreshReader());
|
||||
|
||||
SortedNumericDocValues longValues = fieldData.getLongValues();
|
||||
|
||||
assertThat(FieldData.isMultiValued(longValues), equalTo(true));
|
||||
|
||||
longValues.setDocument(0);
|
||||
assertThat(longValues.count(), equalTo(2));
|
||||
assertThat(longValues.valueAt(0), equalTo(2l));
|
||||
assertThat(longValues.valueAt(1), equalTo(4l));
|
||||
|
||||
longValues.setDocument(1);
|
||||
assertThat(longValues.count(), equalTo(0));
|
||||
|
||||
longValues.setDocument(2);
|
||||
assertThat(longValues.count(), equalTo(1));
|
||||
assertThat(longValues.valueAt(0), equalTo(3l));
|
||||
|
||||
SortedNumericDoubleValues doubleValues = fieldData.getDoubleValues();
|
||||
|
||||
assertThat(FieldData.isMultiValued(doubleValues), equalTo(true));
|
||||
|
||||
doubleValues.setDocument(0);
|
||||
assertThat(2, equalTo(doubleValues.count()));
|
||||
assertThat(doubleValues.valueAt(0), equalTo(2d));
|
||||
assertThat(doubleValues.valueAt(1), equalTo(4d));
|
||||
|
||||
doubleValues.setDocument(1);
|
||||
assertThat(0, equalTo(doubleValues.count()));
|
||||
|
||||
doubleValues.setDocument(2);
|
||||
assertThat(1, equalTo(doubleValues.count()));
|
||||
assertThat(doubleValues.valueAt(0), equalTo(3d));
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testMissingValueForAll() throws Exception {
|
||||
fillAllMissing();
|
||||
IndexNumericFieldData indexFieldData = getForField("value");
|
||||
AtomicNumericFieldData fieldData = indexFieldData.load(refreshReader());
|
||||
|
||||
// long values
|
||||
|
||||
SortedNumericDocValues longValues = fieldData.getLongValues();
|
||||
|
||||
assertThat(FieldData.isMultiValued(longValues), equalTo(false));
|
||||
|
||||
for (int i = 0; i < 3; ++i) {
|
||||
longValues.setDocument(0);
|
||||
assertThat(longValues.count(), equalTo(0));
|
||||
}
|
||||
|
||||
// double values
|
||||
|
||||
SortedNumericDoubleValues doubleValues = fieldData.getDoubleValues();
|
||||
|
||||
assertThat(FieldData.isMultiValued(doubleValues), equalTo(false));
|
||||
|
||||
doubleValues.setDocument(0);
|
||||
assertThat(0, equalTo(doubleValues.count()));
|
||||
|
||||
doubleValues.setDocument(1);
|
||||
assertThat(0, equalTo(doubleValues.count()));
|
||||
|
||||
doubleValues.setDocument(2);
|
||||
assertThat(0, equalTo(doubleValues.count()));
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
protected void fillAllMissing() throws Exception {
|
||||
Document d = new Document();
|
||||
d.add(new StringField("_id", "1", Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "2", Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "3", Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testSortMultiValuesFields() throws Exception {
|
||||
fillExtendedMvSet();
|
||||
IndexFieldData indexFieldData = getForField("value");
|
||||
|
||||
IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer, true));
|
||||
TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10,
|
||||
new Sort(new SortField("value", indexFieldData.comparatorSource(null, MultiValueMode.MIN, null)))); // defaults to _last
|
||||
assertThat(topDocs.totalHits, equalTo(8));
|
||||
assertThat(topDocs.scoreDocs.length, equalTo(8));
|
||||
assertThat(topDocs.scoreDocs[0].doc, equalTo(7));
|
||||
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(-10));
|
||||
assertThat(topDocs.scoreDocs[1].doc, equalTo(0));
|
||||
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[1]).fields[0]).intValue(), equalTo(2));
|
||||
assertThat(topDocs.scoreDocs[2].doc, equalTo(2));
|
||||
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[2]).fields[0]).intValue(), equalTo(3));
|
||||
assertThat(topDocs.scoreDocs[3].doc, equalTo(3));
|
||||
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[3]).fields[0]).intValue(), equalTo(4));
|
||||
assertThat(topDocs.scoreDocs[4].doc, equalTo(4));
|
||||
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[4]).fields[0]).intValue(), equalTo(6));
|
||||
assertThat(topDocs.scoreDocs[5].doc, equalTo(6));
|
||||
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[5]).fields[0]).intValue(), equalTo(8));
|
||||
assertThat(topDocs.scoreDocs[6].doc, equalTo(1));
|
||||
// assertThat(((FieldDoc) topDocs.scoreDocs[6]).fields[0], equalTo(null));
|
||||
assertThat(topDocs.scoreDocs[7].doc, equalTo(5));
|
||||
// assertThat(((FieldDoc) topDocs.scoreDocs[7]).fields[0], equalTo(null));
|
||||
|
||||
topDocs = searcher.search(new MatchAllDocsQuery(), 10,
|
||||
new Sort(new SortField("value", indexFieldData.comparatorSource(null, MultiValueMode.MAX, null), true))); // defaults to _last
|
||||
assertThat(topDocs.totalHits, equalTo(8));
|
||||
assertThat(topDocs.scoreDocs.length, equalTo(8));
|
||||
assertThat(topDocs.scoreDocs[0].doc, equalTo(6));
|
||||
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(10));
|
||||
assertThat(topDocs.scoreDocs[1].doc, equalTo(4));
|
||||
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[1]).fields[0]).intValue(), equalTo(8));
|
||||
assertThat(topDocs.scoreDocs[2].doc, equalTo(3));
|
||||
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[2]).fields[0]).intValue(), equalTo(6));
|
||||
assertThat(topDocs.scoreDocs[3].doc, equalTo(0));
|
||||
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[3]).fields[0]).intValue(), equalTo(4));
|
||||
assertThat(topDocs.scoreDocs[4].doc, equalTo(2));
|
||||
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[4]).fields[0]).intValue(), equalTo(3));
|
||||
assertThat(topDocs.scoreDocs[5].doc, equalTo(7));
|
||||
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[5]).fields[0]).intValue(), equalTo(-8));
|
||||
assertThat(topDocs.scoreDocs[6].doc, equalTo(1));
|
||||
// assertThat(((FieldDoc) topDocs.scoreDocs[6]).fields[0], equalTo(null));
|
||||
assertThat(topDocs.scoreDocs[7].doc, equalTo(5));
|
||||
// assertThat(((FieldDoc) topDocs.scoreDocs[7]).fields[0], equalTo(null));
|
||||
|
||||
searcher = new IndexSearcher(DirectoryReader.open(writer, true));
|
||||
topDocs = searcher.search(new MatchAllDocsQuery(), 10,
|
||||
new Sort(new SortField("value", indexFieldData.comparatorSource(null, MultiValueMode.SUM, null)))); // defaults to _last
|
||||
assertThat(topDocs.totalHits, equalTo(8));
|
||||
assertThat(topDocs.scoreDocs.length, equalTo(8));
|
||||
assertThat(topDocs.scoreDocs[0].doc, equalTo(7));
|
||||
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(-27));
|
||||
assertThat(topDocs.scoreDocs[1].doc, equalTo(2));
|
||||
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[1]).fields[0]).intValue(), equalTo(3));
|
||||
assertThat(topDocs.scoreDocs[2].doc, equalTo(0));
|
||||
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[2]).fields[0]).intValue(), equalTo(6));
|
||||
assertThat(topDocs.scoreDocs[3].doc, equalTo(3));
|
||||
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[3]).fields[0]).intValue(), equalTo(15));
|
||||
assertThat(topDocs.scoreDocs[4].doc, equalTo(4));
|
||||
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[4]).fields[0]).intValue(), equalTo(21));
|
||||
assertThat(topDocs.scoreDocs[5].doc, equalTo(6));
|
||||
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[5]).fields[0]).intValue(), equalTo(27));
|
||||
assertThat(topDocs.scoreDocs[6].doc, equalTo(1));
|
||||
// assertThat(((FieldDoc) topDocs.scoreDocs[6]).fields[0], equalTo(null));
|
||||
assertThat(topDocs.scoreDocs[7].doc, equalTo(5));
|
||||
// assertThat(((FieldDoc) topDocs.scoreDocs[7]).fields[0], equalTo(null));
|
||||
|
||||
searcher = new IndexSearcher(DirectoryReader.open(writer, true));
|
||||
topDocs = searcher.search(new MatchAllDocsQuery(), 10,
|
||||
new Sort(new SortField("value", indexFieldData.comparatorSource(null, MultiValueMode.SUM, null), true))); // defaults to _last
|
||||
assertThat(topDocs.totalHits, equalTo(8));
|
||||
assertThat(topDocs.scoreDocs.length, equalTo(8));
|
||||
assertThat(topDocs.scoreDocs[0].doc, equalTo(6));
|
||||
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(27));
|
||||
assertThat(topDocs.scoreDocs[1].doc, equalTo(4));
|
||||
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[1]).fields[0]).intValue(), equalTo(21));
|
||||
assertThat(topDocs.scoreDocs[2].doc, equalTo(3));
|
||||
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[2]).fields[0]).intValue(), equalTo(15));
|
||||
assertThat(topDocs.scoreDocs[3].doc, equalTo(0));
|
||||
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[3]).fields[0]).intValue(), equalTo(6));
|
||||
assertThat(topDocs.scoreDocs[4].doc, equalTo(2));
|
||||
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[4]).fields[0]).intValue(), equalTo(3));
|
||||
assertThat(topDocs.scoreDocs[5].doc, equalTo(7));
|
||||
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[5]).fields[0]).intValue(), equalTo(-27));
|
||||
assertThat(topDocs.scoreDocs[6].doc, equalTo(1));
|
||||
// assertThat(((FieldDoc) topDocs.scoreDocs[6]).fields[0], equalTo(null));
|
||||
assertThat(topDocs.scoreDocs[7].doc, equalTo(5));
|
||||
// assertThat(((FieldDoc) topDocs.scoreDocs[7]).fields[0], equalTo(null));
|
||||
|
||||
searcher = new IndexSearcher(DirectoryReader.open(writer, true));
|
||||
topDocs = searcher.search(new MatchAllDocsQuery(), 10,
|
||||
new Sort(new SortField("value", indexFieldData.comparatorSource(null, MultiValueMode.AVG, null)))); // defaults to _last
|
||||
assertThat(topDocs.totalHits, equalTo(8));
|
||||
assertThat(topDocs.scoreDocs.length, equalTo(8));
|
||||
assertThat(topDocs.scoreDocs[0].doc, equalTo(7));
|
||||
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(-9));
|
||||
assertThat(topDocs.scoreDocs[1].doc, equalTo(0));
|
||||
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[1]).fields[0]).intValue(), equalTo(3));
|
||||
assertThat(topDocs.scoreDocs[2].doc, equalTo(2));
|
||||
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[2]).fields[0]).intValue(), equalTo(3));
|
||||
assertThat(topDocs.scoreDocs[3].doc, equalTo(3));
|
||||
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[3]).fields[0]).intValue(), equalTo(5));
|
||||
assertThat(topDocs.scoreDocs[4].doc, equalTo(4));
|
||||
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[4]).fields[0]).intValue(), equalTo(7));
|
||||
assertThat(topDocs.scoreDocs[5].doc, equalTo(6));
|
||||
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[5]).fields[0]).intValue(), equalTo(9));
|
||||
assertThat(topDocs.scoreDocs[6].doc, equalTo(1));
|
||||
// assertThat(((FieldDoc) topDocs.scoreDocs[6]).fields[0], equalTo(null));
|
||||
assertThat(topDocs.scoreDocs[7].doc, equalTo(5));
|
||||
// assertThat(((FieldDoc) topDocs.scoreDocs[7]).fields[0], equalTo(null));
|
||||
|
||||
searcher = new IndexSearcher(DirectoryReader.open(writer, true));
|
||||
topDocs = searcher.search(new MatchAllDocsQuery(), 10,
|
||||
new Sort(new SortField("value", indexFieldData.comparatorSource(null, MultiValueMode.AVG, null), true))); // defaults to _last
|
||||
assertThat(topDocs.totalHits, equalTo(8));
|
||||
assertThat(topDocs.scoreDocs.length, equalTo(8));
|
||||
assertThat(topDocs.scoreDocs[0].doc, equalTo(6));
|
||||
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(9));
|
||||
assertThat(topDocs.scoreDocs[1].doc, equalTo(4));
|
||||
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[1]).fields[0]).intValue(), equalTo(7));
|
||||
assertThat(topDocs.scoreDocs[2].doc, equalTo(3));
|
||||
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[2]).fields[0]).intValue(), equalTo(5));
|
||||
assertThat(topDocs.scoreDocs[3].doc, equalTo(0));
|
||||
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[3]).fields[0]).intValue(), equalTo(3));
|
||||
assertThat(topDocs.scoreDocs[4].doc, equalTo(2));
|
||||
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[4]).fields[0]).intValue(), equalTo(3));
|
||||
assertThat(topDocs.scoreDocs[5].doc, equalTo(7));
|
||||
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[5]).fields[0]).intValue(), equalTo(-9));
|
||||
assertThat(topDocs.scoreDocs[6].doc, equalTo(1));
|
||||
// assertThat(((FieldDoc) topDocs.scoreDocs[6]).fields[0], equalTo(null));
|
||||
assertThat(topDocs.scoreDocs[7].doc, equalTo(5));
|
||||
// assertThat(((FieldDoc) topDocs.scoreDocs[7]).fields[0], equalTo(null));
|
||||
|
||||
topDocs = searcher.search(new MatchAllDocsQuery(), 10,
|
||||
new Sort(new SortField("value", indexFieldData.comparatorSource("_first", MultiValueMode.MIN, null))));
|
||||
assertThat(topDocs.totalHits, equalTo(8));
|
||||
assertThat(topDocs.scoreDocs.length, equalTo(8));
|
||||
assertThat(topDocs.scoreDocs[0].doc, equalTo(1));
|
||||
assertThat(topDocs.scoreDocs[1].doc, equalTo(5));
|
||||
assertThat(topDocs.scoreDocs[2].doc, equalTo(7));
|
||||
assertThat(topDocs.scoreDocs[3].doc, equalTo(0));
|
||||
assertThat(topDocs.scoreDocs[4].doc, equalTo(2));
|
||||
assertThat(topDocs.scoreDocs[5].doc, equalTo(3));
|
||||
assertThat(topDocs.scoreDocs[6].doc, equalTo(4));
|
||||
assertThat(topDocs.scoreDocs[7].doc, equalTo(6));
|
||||
|
||||
topDocs = searcher.search(new MatchAllDocsQuery(), 10,
|
||||
new Sort(new SortField("value", indexFieldData.comparatorSource("_first", MultiValueMode.MAX, null), true)));
|
||||
assertThat(topDocs.totalHits, equalTo(8));
|
||||
assertThat(topDocs.scoreDocs.length, equalTo(8));
|
||||
assertThat(topDocs.scoreDocs[0].doc, equalTo(1));
|
||||
assertThat(topDocs.scoreDocs[1].doc, equalTo(5));
|
||||
assertThat(topDocs.scoreDocs[2].doc, equalTo(6));
|
||||
assertThat(topDocs.scoreDocs[3].doc, equalTo(4));
|
||||
assertThat(topDocs.scoreDocs[4].doc, equalTo(3));
|
||||
assertThat(topDocs.scoreDocs[5].doc, equalTo(0));
|
||||
assertThat(topDocs.scoreDocs[6].doc, equalTo(2));
|
||||
assertThat(topDocs.scoreDocs[7].doc, equalTo(7));
|
||||
|
||||
topDocs = searcher.search(new MatchAllDocsQuery(), 10,
|
||||
new Sort(new SortField("value", indexFieldData.comparatorSource("-9", MultiValueMode.MIN, null))));
|
||||
assertThat(topDocs.totalHits, equalTo(8));
|
||||
assertThat(topDocs.scoreDocs.length, equalTo(8));
|
||||
assertThat(topDocs.scoreDocs[0].doc, equalTo(7));
|
||||
assertThat(topDocs.scoreDocs[1].doc, equalTo(1));
|
||||
assertThat(topDocs.scoreDocs[2].doc, equalTo(5));
|
||||
assertThat(topDocs.scoreDocs[3].doc, equalTo(0));
|
||||
assertThat(topDocs.scoreDocs[4].doc, equalTo(2));
|
||||
assertThat(topDocs.scoreDocs[5].doc, equalTo(3));
|
||||
assertThat(topDocs.scoreDocs[6].doc, equalTo(4));
|
||||
assertThat(topDocs.scoreDocs[7].doc, equalTo(6));
|
||||
|
||||
topDocs = searcher.search(new MatchAllDocsQuery(), 10,
|
||||
new Sort(new SortField("value", indexFieldData.comparatorSource("9", MultiValueMode.MAX, null), true)));
|
||||
assertThat(topDocs.totalHits, equalTo(8));
|
||||
assertThat(topDocs.scoreDocs.length, equalTo(8));
|
||||
assertThat(topDocs.scoreDocs[0].doc, equalTo(6));
|
||||
assertThat(topDocs.scoreDocs[1].doc, equalTo(1));
|
||||
assertThat(topDocs.scoreDocs[2].doc, equalTo(5));
|
||||
assertThat(topDocs.scoreDocs[3].doc, equalTo(4));
|
||||
assertThat(topDocs.scoreDocs[4].doc, equalTo(3));
|
||||
assertThat(topDocs.scoreDocs[5].doc, equalTo(0));
|
||||
assertThat(topDocs.scoreDocs[6].doc, equalTo(2));
|
||||
assertThat(topDocs.scoreDocs[7].doc, equalTo(7));
|
||||
}
|
||||
|
||||
}
|
|
@ -1,200 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.fielddata;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.DoubleField;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.StringField;
|
||||
import org.apache.lucene.index.Term;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class DoubleFieldDataTests extends AbstractNumericFieldDataTestCase {
|
||||
|
||||
@Override
|
||||
protected FieldDataType getFieldDataType() {
|
||||
return new FieldDataType("double", getFieldDataSettings());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String one() {
|
||||
return "1.0";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String two() {
|
||||
return "2.0";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String three() {
|
||||
return "3.0";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String four() {
|
||||
return "4.0";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void add2SingleValuedDocumentsAndDeleteOneOfThem() throws Exception {
|
||||
Document d = new Document();
|
||||
d.add(new StringField("_id", "1", Field.Store.NO));
|
||||
d.add(new DoubleField("value", 2.0d, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "2", Field.Store.NO));
|
||||
d.add(new DoubleField("value", 4.0d, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
writer.commit();
|
||||
|
||||
writer.deleteDocuments(new Term("_id", "1"));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void fillSingleValueAllSet() throws Exception {
|
||||
Document d = new Document();
|
||||
d.add(new StringField("_id", "1", Field.Store.NO));
|
||||
d.add(new DoubleField("value", 2.0d, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "2", Field.Store.NO));
|
||||
d.add(new DoubleField("value", 1.0d, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "3", Field.Store.NO));
|
||||
d.add(new DoubleField("value", 3.0d, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void fillSingleValueWithMissing() throws Exception {
|
||||
Document d = new Document();
|
||||
d.add(new StringField("_id", "1", Field.Store.NO));
|
||||
d.add(new DoubleField("value", 2.0d, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "2", Field.Store.NO));
|
||||
//d.add(new StringField("value", one(), Field.Store.NO)); // MISSING....
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "3", Field.Store.NO));
|
||||
d.add(new DoubleField("value", 3.0d, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void fillMultiValueAllSet() throws Exception {
|
||||
Document d = new Document();
|
||||
d.add(new StringField("_id", "1", Field.Store.NO));
|
||||
d.add(new DoubleField("value", 2.0d, Field.Store.NO));
|
||||
d.add(new DoubleField("value", 4.0d, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "2", Field.Store.NO));
|
||||
d.add(new DoubleField("value", 1.0d, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "3", Field.Store.NO));
|
||||
d.add(new DoubleField("value", 3.0d, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void fillMultiValueWithMissing() throws Exception {
|
||||
Document d = new Document();
|
||||
d.add(new StringField("_id", "1", Field.Store.NO));
|
||||
d.add(new DoubleField("value", 2.0d, Field.Store.NO));
|
||||
d.add(new DoubleField("value", 4.0d, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "2", Field.Store.NO));
|
||||
//d.add(new StringField("value", one(), Field.Store.NO)); // MISSING
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "3", Field.Store.NO));
|
||||
d.add(new DoubleField("value", 3.0f, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void fillExtendedMvSet() throws Exception {
|
||||
Document d = new Document();
|
||||
d.add(new StringField("_id", "1", Field.Store.NO));
|
||||
d.add(new DoubleField("value", 2, Field.Store.NO));
|
||||
d.add(new DoubleField("value", 4, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "2", Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "3", Field.Store.NO));
|
||||
d.add(new DoubleField("value", 3, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
writer.commit();
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "4", Field.Store.NO));
|
||||
d.add(new DoubleField("value", 4, Field.Store.NO));
|
||||
d.add(new DoubleField("value", 5, Field.Store.NO));
|
||||
d.add(new DoubleField("value", 6, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "5", Field.Store.NO));
|
||||
d.add(new DoubleField("value", 6, Field.Store.NO));
|
||||
d.add(new DoubleField("value", 7, Field.Store.NO));
|
||||
d.add(new DoubleField("value", 8, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "6", Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "7", Field.Store.NO));
|
||||
d.add(new DoubleField("value", 8, Field.Store.NO));
|
||||
d.add(new DoubleField("value", 9, Field.Store.NO));
|
||||
d.add(new DoubleField("value", 10, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
writer.commit();
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "8", Field.Store.NO));
|
||||
d.add(new DoubleField("value", -8, Field.Store.NO));
|
||||
d.add(new DoubleField("value", -9, Field.Store.NO));
|
||||
d.add(new DoubleField("value", -10, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
}
|
||||
|
||||
}
|
|
@ -67,13 +67,13 @@ public class DuelFieldDataTests extends AbstractFieldDataTestCase {
|
|||
public void testDuelAllTypesSingleValue() throws Exception {
|
||||
final String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("bytes").field("type", "string").field("index", "not_analyzed").startObject("fielddata").field("format", "doc_values").endObject().endObject()
|
||||
.startObject("byte").field("type", "byte").startObject("fielddata").field("format", "doc_values").endObject().endObject()
|
||||
.startObject("short").field("type", "short").startObject("fielddata").field("format", "doc_values").endObject().endObject()
|
||||
.startObject("integer").field("type", "integer").startObject("fielddata").field("format", "doc_values").endObject().endObject()
|
||||
.startObject("long").field("type", "long").startObject("fielddata").field("format", "doc_values").endObject().endObject()
|
||||
.startObject("float").field("type", "float").startObject("fielddata").field("format", "doc_values").endObject().endObject()
|
||||
.startObject("double").field("type", "double").startObject("fielddata").field("format", "doc_values").endObject().endObject()
|
||||
.startObject("bytes").field("type", "string").field("index", "not_analyzed").endObject()
|
||||
.startObject("byte").field("type", "byte").endObject()
|
||||
.startObject("short").field("type", "short").endObject()
|
||||
.startObject("integer").field("type", "integer").endObject()
|
||||
.startObject("long").field("type", "long").endObject()
|
||||
.startObject("float").field("type", "float").endObject()
|
||||
.startObject("double").field("type", "double").endObject()
|
||||
.endObject().endObject().endObject().string();
|
||||
final DocumentMapper mapper = mapperService.documentMapperParser().parse(mapping);
|
||||
Random random = getRandom();
|
||||
|
@ -99,12 +99,6 @@ public class DuelFieldDataTests extends AbstractFieldDataTestCase {
|
|||
LeafReaderContext context = refreshReader();
|
||||
Map<FieldDataType, Type> typeMap = new HashMap<>();
|
||||
typeMap.put(new FieldDataType("string", Settings.builder().put("format", "paged_bytes")), Type.Bytes);
|
||||
typeMap.put(new FieldDataType("byte", Settings.builder().put("format", "array")), Type.Integer);
|
||||
typeMap.put(new FieldDataType("short", Settings.builder().put("format", "array")), Type.Integer);
|
||||
typeMap.put(new FieldDataType("int", Settings.builder().put("format", "array")), Type.Integer);
|
||||
typeMap.put(new FieldDataType("long", Settings.builder().put("format", "array")), Type.Long);
|
||||
typeMap.put(new FieldDataType("double", Settings.builder().put("format", "array")), Type.Double);
|
||||
typeMap.put(new FieldDataType("float", Settings.builder().put("format", "array")), Type.Float);
|
||||
typeMap.put(new FieldDataType("byte", Settings.builder().put("format", "doc_values")), Type.Integer);
|
||||
typeMap.put(new FieldDataType("short", Settings.builder().put("format", "doc_values")), Type.Integer);
|
||||
typeMap.put(new FieldDataType("int", Settings.builder().put("format", "doc_values")), Type.Integer);
|
||||
|
@ -125,9 +119,9 @@ public class DuelFieldDataTests extends AbstractFieldDataTestCase {
|
|||
}
|
||||
|
||||
ifdService.clear();
|
||||
IndexFieldData<?> leftFieldData = getForField(left.getKey(), left.getValue().name().toLowerCase(Locale.ROOT));
|
||||
IndexFieldData<?> leftFieldData = getForField(left.getKey(), left.getValue().name().toLowerCase(Locale.ROOT), true);
|
||||
ifdService.clear();
|
||||
IndexFieldData<?> rightFieldData = getForField(right.getKey(), right.getValue().name().toLowerCase(Locale.ROOT));
|
||||
IndexFieldData<?> rightFieldData = getForField(right.getKey(), right.getValue().name().toLowerCase(Locale.ROOT), true);
|
||||
duelFieldDataBytes(random, context, leftFieldData, rightFieldData, pre);
|
||||
duelFieldDataBytes(random, context, rightFieldData, leftFieldData, pre);
|
||||
|
||||
|
@ -143,10 +137,10 @@ public class DuelFieldDataTests extends AbstractFieldDataTestCase {
|
|||
public void testDuelIntegers() throws Exception {
|
||||
final String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("byte").field("type", "byte").startObject("fielddata").field("format", "doc_values").endObject().endObject()
|
||||
.startObject("short").field("type", "short").startObject("fielddata").field("format", "doc_values").endObject().endObject()
|
||||
.startObject("integer").field("type", "integer").startObject("fielddata").field("format", "doc_values").endObject().endObject()
|
||||
.startObject("long").field("type", "long").startObject("fielddata").field("format", "doc_values").endObject().endObject()
|
||||
.startObject("byte").field("type", "byte").endObject()
|
||||
.startObject("short").field("type", "short").endObject()
|
||||
.startObject("integer").field("type", "integer").endObject()
|
||||
.startObject("long").field("type", "long").endObject()
|
||||
.endObject().endObject().endObject().string();
|
||||
|
||||
final DocumentMapper mapper = mapperService.documentMapperParser().parse(mapping);
|
||||
|
@ -187,10 +181,6 @@ public class DuelFieldDataTests extends AbstractFieldDataTestCase {
|
|||
}
|
||||
LeafReaderContext context = refreshReader();
|
||||
Map<FieldDataType, Type> typeMap = new HashMap<>();
|
||||
typeMap.put(new FieldDataType("byte", Settings.builder().put("format", "array")), Type.Integer);
|
||||
typeMap.put(new FieldDataType("short", Settings.builder().put("format", "array")), Type.Integer);
|
||||
typeMap.put(new FieldDataType("int", Settings.builder().put("format", "array")), Type.Integer);
|
||||
typeMap.put(new FieldDataType("long", Settings.builder().put("format", "array")), Type.Long);
|
||||
typeMap.put(new FieldDataType("byte", Settings.builder().put("format", "doc_values")), Type.Integer);
|
||||
typeMap.put(new FieldDataType("short", Settings.builder().put("format", "doc_values")), Type.Integer);
|
||||
typeMap.put(new FieldDataType("int", Settings.builder().put("format", "doc_values")), Type.Integer);
|
||||
|
@ -206,9 +196,9 @@ public class DuelFieldDataTests extends AbstractFieldDataTestCase {
|
|||
right = left = list.remove(0);
|
||||
}
|
||||
ifdService.clear();
|
||||
IndexNumericFieldData leftFieldData = getForField(left.getKey(), left.getValue().name().toLowerCase(Locale.ROOT));
|
||||
IndexNumericFieldData leftFieldData = getForField(left.getKey(), left.getValue().name().toLowerCase(Locale.ROOT), true);
|
||||
ifdService.clear();
|
||||
IndexNumericFieldData rightFieldData = getForField(right.getKey(), right.getValue().name().toLowerCase(Locale.ROOT));
|
||||
IndexNumericFieldData rightFieldData = getForField(right.getKey(), right.getValue().name().toLowerCase(Locale.ROOT), true);
|
||||
|
||||
duelFieldDataLong(random, context, leftFieldData, rightFieldData);
|
||||
duelFieldDataLong(random, context, rightFieldData, leftFieldData);
|
||||
|
@ -226,8 +216,8 @@ public class DuelFieldDataTests extends AbstractFieldDataTestCase {
|
|||
public void testDuelDoubles() throws Exception {
|
||||
final String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("float").field("type", "float").startObject("fielddata").field("format", "doc_values").endObject().endObject()
|
||||
.startObject("double").field("type", "double").startObject("fielddata").field("format", "doc_values").endObject().endObject()
|
||||
.startObject("float").field("type", "float").endObject()
|
||||
.startObject("double").field("type", "double").endObject()
|
||||
.endObject().endObject().endObject().string();
|
||||
|
||||
final DocumentMapper mapper = mapperService.documentMapperParser().parse(mapping);
|
||||
|
@ -272,8 +262,6 @@ public class DuelFieldDataTests extends AbstractFieldDataTestCase {
|
|||
}
|
||||
LeafReaderContext context = refreshReader();
|
||||
Map<FieldDataType, Type> typeMap = new HashMap<>();
|
||||
typeMap.put(new FieldDataType("double", Settings.builder().put("format", "array")), Type.Double);
|
||||
typeMap.put(new FieldDataType("float", Settings.builder().put("format", "array")), Type.Float);
|
||||
typeMap.put(new FieldDataType("double", Settings.builder().put("format", "doc_values")), Type.Double);
|
||||
typeMap.put(new FieldDataType("float", Settings.builder().put("format", "doc_values")), Type.Float);
|
||||
ArrayList<Entry<FieldDataType, Type>> list = new ArrayList<>(typeMap.entrySet());
|
||||
|
@ -287,10 +275,10 @@ public class DuelFieldDataTests extends AbstractFieldDataTestCase {
|
|||
right = left = list.remove(0);
|
||||
}
|
||||
ifdService.clear();
|
||||
IndexNumericFieldData leftFieldData = getForField(left.getKey(), left.getValue().name().toLowerCase(Locale.ROOT));
|
||||
IndexNumericFieldData leftFieldData = getForField(left.getKey(), left.getValue().name().toLowerCase(Locale.ROOT), true);
|
||||
|
||||
ifdService.clear();
|
||||
IndexNumericFieldData rightFieldData = getForField(right.getKey(), right.getValue().name().toLowerCase(Locale.ROOT));
|
||||
IndexNumericFieldData rightFieldData = getForField(right.getKey(), right.getValue().name().toLowerCase(Locale.ROOT), true);
|
||||
|
||||
duelFieldDataDouble(random, context, leftFieldData, rightFieldData);
|
||||
duelFieldDataDouble(random, context, rightFieldData, leftFieldData);
|
||||
|
|
|
@ -1,199 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.index.fielddata;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FloatField;
|
||||
import org.apache.lucene.document.StringField;
|
||||
import org.apache.lucene.index.Term;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class FloatFieldDataTests extends AbstractNumericFieldDataTestCase {
|
||||
|
||||
@Override
|
||||
protected FieldDataType getFieldDataType() {
|
||||
return new FieldDataType("float", getFieldDataSettings());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String one() {
|
||||
return "1.0";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String two() {
|
||||
return "2.0";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String three() {
|
||||
return "3.0";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String four() {
|
||||
return "4.0";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void add2SingleValuedDocumentsAndDeleteOneOfThem() throws Exception {
|
||||
Document d = new Document();
|
||||
d.add(new StringField("_id", "1", Field.Store.NO));
|
||||
d.add(new FloatField("value", 2.0f, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "2", Field.Store.NO));
|
||||
d.add(new FloatField("value", 4.0f, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
writer.commit();
|
||||
|
||||
writer.deleteDocuments(new Term("_id", "1"));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void fillSingleValueAllSet() throws Exception {
|
||||
Document d = new Document();
|
||||
d.add(new StringField("_id", "1", Field.Store.NO));
|
||||
d.add(new FloatField("value", 2.0f, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "2", Field.Store.NO));
|
||||
d.add(new FloatField("value", 1.0f, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "3", Field.Store.NO));
|
||||
d.add(new FloatField("value", 3.0f, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void fillSingleValueWithMissing() throws Exception {
|
||||
Document d = new Document();
|
||||
d.add(new StringField("_id", "1", Field.Store.NO));
|
||||
d.add(new FloatField("value", 2.0f, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "2", Field.Store.NO));
|
||||
//d.add(new StringField("value", one(), Field.Store.NO)); // MISSING....
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "3", Field.Store.NO));
|
||||
d.add(new FloatField("value", 3.0f, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void fillMultiValueAllSet() throws Exception {
|
||||
Document d = new Document();
|
||||
d.add(new StringField("_id", "1", Field.Store.NO));
|
||||
d.add(new FloatField("value", 2.0f, Field.Store.NO));
|
||||
d.add(new FloatField("value", 4.0f, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "2", Field.Store.NO));
|
||||
d.add(new FloatField("value", 1.0f, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "3", Field.Store.NO));
|
||||
d.add(new FloatField("value", 3.0f, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void fillMultiValueWithMissing() throws Exception {
|
||||
Document d = new Document();
|
||||
d.add(new StringField("_id", "1", Field.Store.NO));
|
||||
d.add(new FloatField("value", 2.0f, Field.Store.NO));
|
||||
d.add(new FloatField("value", 4.0f, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "2", Field.Store.NO));
|
||||
//d.add(new StringField("value", one(), Field.Store.NO)); // MISSING
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "3", Field.Store.NO));
|
||||
d.add(new FloatField("value", 3.0f, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void fillExtendedMvSet() throws Exception {
|
||||
Document d = new Document();
|
||||
d.add(new StringField("_id", "1", Field.Store.NO));
|
||||
d.add(new FloatField("value", 2, Field.Store.NO));
|
||||
d.add(new FloatField("value", 4, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "2", Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "3", Field.Store.NO));
|
||||
d.add(new FloatField("value", 3, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
writer.commit();
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "4", Field.Store.NO));
|
||||
d.add(new FloatField("value", 4, Field.Store.NO));
|
||||
d.add(new FloatField("value", 5, Field.Store.NO));
|
||||
d.add(new FloatField("value", 6, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "5", Field.Store.NO));
|
||||
d.add(new FloatField("value", 6, Field.Store.NO));
|
||||
d.add(new FloatField("value", 7, Field.Store.NO));
|
||||
d.add(new FloatField("value", 8, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "6", Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "7", Field.Store.NO));
|
||||
d.add(new FloatField("value", 8, Field.Store.NO));
|
||||
d.add(new FloatField("value", 9, Field.Store.NO));
|
||||
d.add(new FloatField("value", 10, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
writer.commit();
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "8", Field.Store.NO));
|
||||
d.add(new FloatField("value", -8, Field.Store.NO));
|
||||
d.add(new FloatField("value", -9, Field.Store.NO));
|
||||
d.add(new FloatField("value", -10, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
}
|
||||
|
||||
}
|
|
@ -26,20 +26,22 @@ import org.apache.lucene.document.StringField;
|
|||
import org.apache.lucene.index.*;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.Accountable;
|
||||
import org.elasticsearch.common.lucene.index.ESDirectoryReaderTests;
|
||||
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.fielddata.plain.*;
|
||||
import org.elasticsearch.index.mapper.ContentPath;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType.Names;
|
||||
import org.elasticsearch.index.mapper.Mapper.BuilderContext;
|
||||
import org.elasticsearch.index.mapper.MapperBuilders;
|
||||
import org.elasticsearch.index.mapper.core.*;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.indices.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
|
@ -47,92 +49,40 @@ import java.util.IdentityHashMap;
|
|||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
|
||||
public class IndexFieldDataServiceTests extends ESSingleNodeTestCase {
|
||||
|
||||
private static Settings DOC_VALUES_SETTINGS = Settings.builder().put(FieldDataType.FORMAT_KEY, FieldDataType.DOC_VALUES_FORMAT_VALUE).build();
|
||||
|
||||
public void testGetForFieldDefaults() {
|
||||
final IndexService indexService = createIndex("test");
|
||||
final IndexFieldDataService ifdService = indexService.fieldData();
|
||||
for (boolean docValues : Arrays.asList(true, false)) {
|
||||
final BuilderContext ctx = new BuilderContext(indexService.settingsService().getSettings(), new ContentPath(1));
|
||||
final MappedFieldType stringMapper = new StringFieldMapper.Builder("string").tokenized(false).docValues(docValues).build(ctx).fieldType();
|
||||
ifdService.clear();
|
||||
IndexFieldData<?> fd = ifdService.getForField(stringMapper);
|
||||
if (docValues) {
|
||||
assertTrue(fd instanceof SortedSetDVOrdinalsIndexFieldData);
|
||||
} else {
|
||||
assertTrue(fd instanceof PagedBytesIndexFieldData);
|
||||
}
|
||||
|
||||
for (MappedFieldType mapper : Arrays.asList(
|
||||
new ByteFieldMapper.Builder("int").docValues(docValues).build(ctx).fieldType(),
|
||||
new ShortFieldMapper.Builder("int").docValues(docValues).build(ctx).fieldType(),
|
||||
new IntegerFieldMapper.Builder("int").docValues(docValues).build(ctx).fieldType(),
|
||||
new LongFieldMapper.Builder("long").docValues(docValues).build(ctx).fieldType()
|
||||
)) {
|
||||
ifdService.clear();
|
||||
fd = ifdService.getForField(mapper);
|
||||
if (docValues) {
|
||||
assertTrue(fd instanceof SortedNumericDVIndexFieldData);
|
||||
} else {
|
||||
assertTrue(fd instanceof PackedArrayIndexFieldData);
|
||||
}
|
||||
}
|
||||
|
||||
final MappedFieldType floatMapper = new FloatFieldMapper.Builder("float").docValues(docValues).build(ctx).fieldType();
|
||||
ifdService.clear();
|
||||
fd = ifdService.getForField(floatMapper);
|
||||
if (docValues) {
|
||||
assertTrue(fd instanceof SortedNumericDVIndexFieldData);
|
||||
} else {
|
||||
assertTrue(fd instanceof FloatArrayIndexFieldData);
|
||||
}
|
||||
|
||||
final MappedFieldType doubleMapper = new DoubleFieldMapper.Builder("double").docValues(docValues).build(ctx).fieldType();
|
||||
ifdService.clear();
|
||||
fd = ifdService.getForField(doubleMapper);
|
||||
if (docValues) {
|
||||
assertTrue(fd instanceof SortedNumericDVIndexFieldData);
|
||||
} else {
|
||||
assertTrue(fd instanceof DoubleArrayIndexFieldData);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testByPassDocValues() {
|
||||
final IndexService indexService = createIndex("test");
|
||||
final IndexFieldDataService ifdService = indexService.fieldData();
|
||||
final BuilderContext ctx = new BuilderContext(indexService.settingsService().getSettings(), new ContentPath(1));
|
||||
final MappedFieldType stringMapper = MapperBuilders.stringField("string").tokenized(false).fieldDataSettings(DOC_VALUES_SETTINGS).fieldDataSettings(Settings.builder().put("format", "disabled").build()).build(ctx).fieldType();
|
||||
final MappedFieldType stringMapper = new StringFieldMapper.Builder("string").tokenized(false).build(ctx).fieldType();
|
||||
ifdService.clear();
|
||||
IndexFieldData<?> fd = ifdService.getForField(stringMapper);
|
||||
assertTrue(fd instanceof DisabledIndexFieldData);
|
||||
assertTrue(fd instanceof SortedSetDVOrdinalsIndexFieldData);
|
||||
|
||||
final Settings fdSettings = Settings.builder().put("format", "array").build();
|
||||
for (MappedFieldType mapper : Arrays.asList(
|
||||
new ByteFieldMapper.Builder("int").fieldDataSettings(DOC_VALUES_SETTINGS).fieldDataSettings(fdSettings).build(ctx).fieldType(),
|
||||
new ShortFieldMapper.Builder("int").fieldDataSettings(DOC_VALUES_SETTINGS).fieldDataSettings(fdSettings).build(ctx).fieldType(),
|
||||
new IntegerFieldMapper.Builder("int").fieldDataSettings(DOC_VALUES_SETTINGS).fieldDataSettings(fdSettings).build(ctx).fieldType(),
|
||||
new LongFieldMapper.Builder("long").fieldDataSettings(DOC_VALUES_SETTINGS).fieldDataSettings(fdSettings).build(ctx).fieldType()
|
||||
new ByteFieldMapper.Builder("int").build(ctx).fieldType(),
|
||||
new ShortFieldMapper.Builder("int").build(ctx).fieldType(),
|
||||
new IntegerFieldMapper.Builder("int").build(ctx).fieldType(),
|
||||
new LongFieldMapper.Builder("long").build(ctx).fieldType()
|
||||
)) {
|
||||
ifdService.clear();
|
||||
fd = ifdService.getForField(mapper);
|
||||
assertTrue(fd instanceof PackedArrayIndexFieldData);
|
||||
assertTrue(fd instanceof SortedNumericDVIndexFieldData);
|
||||
}
|
||||
|
||||
final MappedFieldType floatMapper = MapperBuilders.floatField("float").fieldDataSettings(DOC_VALUES_SETTINGS).fieldDataSettings(fdSettings).build(ctx).fieldType();
|
||||
final MappedFieldType floatMapper = new FloatFieldMapper.Builder("float").build(ctx).fieldType();
|
||||
ifdService.clear();
|
||||
fd = ifdService.getForField(floatMapper);
|
||||
assertTrue(fd instanceof FloatArrayIndexFieldData);
|
||||
assertTrue(fd instanceof SortedNumericDVIndexFieldData);
|
||||
|
||||
final MappedFieldType doubleMapper = MapperBuilders.doubleField("double").fieldDataSettings(DOC_VALUES_SETTINGS).fieldDataSettings(fdSettings).build(ctx).fieldType();
|
||||
final MappedFieldType doubleMapper = new DoubleFieldMapper.Builder("double").build(ctx).fieldType();
|
||||
ifdService.clear();
|
||||
fd = ifdService.getForField(doubleMapper);
|
||||
assertTrue(fd instanceof DoubleArrayIndexFieldData);
|
||||
assertTrue(fd instanceof SortedNumericDVIndexFieldData);
|
||||
}
|
||||
|
||||
public void testChangeFieldDataFormat() throws Exception {
|
||||
|
@ -238,4 +188,35 @@ public class IndexFieldDataServiceTests extends ESSingleNodeTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
private void doTestRequireDocValues(MappedFieldType ft) {
|
||||
ThreadPool threadPool = new ThreadPool("random_threadpool_name");
|
||||
try {
|
||||
IndicesFieldDataCache cache = new IndicesFieldDataCache(Settings.EMPTY, null, threadPool);
|
||||
IndexFieldDataService ifds = new IndexFieldDataService(new Index("test"), Settings.EMPTY, cache, null, null);
|
||||
ft.setNames(new Names("some_long"));
|
||||
ft.setHasDocValues(true);
|
||||
ifds.getForField(ft); // no exception
|
||||
ft.setHasDocValues(false);
|
||||
try {
|
||||
ifds.getForField(ft);
|
||||
fail();
|
||||
} catch (IllegalStateException e) {
|
||||
assertThat(e.getMessage(), containsString("doc values"));
|
||||
}
|
||||
} finally {
|
||||
threadPool.shutdown();
|
||||
}
|
||||
}
|
||||
|
||||
public void testRequireDocValuesOnLongs() {
|
||||
doTestRequireDocValues(new LongFieldMapper.LongFieldType());
|
||||
}
|
||||
|
||||
public void testRequireDocValuesOnDoubles() {
|
||||
doTestRequireDocValues(new DoubleFieldMapper.DoubleFieldType());
|
||||
}
|
||||
|
||||
public void testRequireDocValuesOnBools() {
|
||||
doTestRequireDocValues(new BooleanFieldMapper.BooleanFieldType());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,421 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.fielddata;
|
||||
|
||||
import com.carrotsearch.hppc.LongHashSet;
|
||||
import com.carrotsearch.hppc.cursors.LongCursor;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.LongField;
|
||||
import org.apache.lucene.document.StringField;
|
||||
import org.apache.lucene.index.SortedNumericDocValues;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Random;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.lessThan;
|
||||
|
||||
/**
|
||||
* Tests for all integer types (byte, short, int, long).
|
||||
*/
|
||||
public class LongFieldDataTests extends AbstractNumericFieldDataTestCase {
|
||||
|
||||
@Override
|
||||
protected FieldDataType getFieldDataType() {
|
||||
// we don't want to optimize the type so it will always be a long...
|
||||
return new FieldDataType("long", getFieldDataSettings());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void add2SingleValuedDocumentsAndDeleteOneOfThem() throws Exception {
|
||||
Document d = new Document();
|
||||
d.add(new StringField("_id", "1", Field.Store.NO));
|
||||
d.add(new LongField("value", 2, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "2", Field.Store.NO));
|
||||
d.add(new LongField("value", 4, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
writer.commit();
|
||||
|
||||
writer.deleteDocuments(new Term("_id", "1"));
|
||||
}
|
||||
|
||||
public void testOptimizeTypeLong() throws Exception {
|
||||
Document d = new Document();
|
||||
d.add(new StringField("_id", "1", Field.Store.NO));
|
||||
d.add(new LongField("value", Integer.MAX_VALUE + 1l, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "2", Field.Store.NO));
|
||||
d.add(new LongField("value", Integer.MIN_VALUE - 1l, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
IndexNumericFieldData indexFieldData = getForField("value");
|
||||
AtomicNumericFieldData fieldData = indexFieldData.load(refreshReader());
|
||||
assertThat(getFirst(fieldData.getLongValues(), 0), equalTo(Integer.MAX_VALUE + 1l));
|
||||
assertThat(getFirst(fieldData.getLongValues(), 1), equalTo(Integer.MIN_VALUE - 1l));
|
||||
}
|
||||
|
||||
private static long getFirst(SortedNumericDocValues values, int docId) {
|
||||
values.setDocument(docId);
|
||||
final int numValues = values.count();
|
||||
assertThat(numValues, is(1));
|
||||
return values.valueAt(0);
|
||||
}
|
||||
|
||||
public void testDateScripts() throws Exception {
|
||||
fillSingleValueAllSet();
|
||||
IndexNumericFieldData indexFieldData = getForField("value");
|
||||
AtomicNumericFieldData fieldData = indexFieldData.load(refreshReader());
|
||||
|
||||
ScriptDocValues.Longs scriptValues = (ScriptDocValues.Longs) fieldData.getScriptValues();
|
||||
scriptValues.setNextDocId(0);
|
||||
assertThat(scriptValues.getValue(), equalTo(2l));
|
||||
assertThat(scriptValues.getDate().getMillis(), equalTo(2l));
|
||||
assertThat(scriptValues.getDate().getZone(), equalTo(DateTimeZone.UTC));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void fillSingleValueAllSet() throws Exception {
|
||||
Document d = new Document();
|
||||
d.add(new StringField("_id", "1", Field.Store.NO));
|
||||
d.add(new LongField("value", 2, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "2", Field.Store.NO));
|
||||
d.add(new LongField("value", 1, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "3", Field.Store.NO));
|
||||
d.add(new LongField("value", 3, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void fillSingleValueWithMissing() throws Exception {
|
||||
Document d = new Document();
|
||||
d.add(new StringField("_id", "1", Field.Store.NO));
|
||||
d.add(new LongField("value", 2, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "2", Field.Store.NO));
|
||||
//d.add(new StringField("value", one(), Field.Store.NO)); // MISSING....
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "3", Field.Store.NO));
|
||||
d.add(new LongField("value", 3, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void fillMultiValueAllSet() throws Exception {
|
||||
Document d = new Document();
|
||||
d.add(new StringField("_id", "1", Field.Store.NO));
|
||||
d.add(new LongField("value", 2, Field.Store.NO));
|
||||
d.add(new LongField("value", 4, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "2", Field.Store.NO));
|
||||
d.add(new LongField("value", 1, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "3", Field.Store.NO));
|
||||
d.add(new LongField("value", 3, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void fillMultiValueWithMissing() throws Exception {
|
||||
Document d = new Document();
|
||||
d.add(new StringField("_id", "1", Field.Store.NO));
|
||||
d.add(new LongField("value", 2, Field.Store.NO));
|
||||
d.add(new LongField("value", 4, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "2", Field.Store.NO));
|
||||
//d.add(new StringField("value", one(), Field.Store.NO)); // MISSING
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "3", Field.Store.NO));
|
||||
d.add(new LongField("value", 3, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void fillExtendedMvSet() throws Exception {
|
||||
Document d = new Document();
|
||||
d.add(new StringField("_id", "1", Field.Store.NO));
|
||||
d.add(new LongField("value", 2, Field.Store.NO));
|
||||
d.add(new LongField("value", 4, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "2", Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "3", Field.Store.NO));
|
||||
d.add(new LongField("value", 3, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
writer.commit();
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "4", Field.Store.NO));
|
||||
d.add(new LongField("value", 4, Field.Store.NO));
|
||||
d.add(new LongField("value", 5, Field.Store.NO));
|
||||
d.add(new LongField("value", 6, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "5", Field.Store.NO));
|
||||
d.add(new LongField("value", 6, Field.Store.NO));
|
||||
d.add(new LongField("value", 7, Field.Store.NO));
|
||||
d.add(new LongField("value", 8, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "6", Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "7", Field.Store.NO));
|
||||
d.add(new LongField("value", 8, Field.Store.NO));
|
||||
d.add(new LongField("value", 9, Field.Store.NO));
|
||||
d.add(new LongField("value", 10, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
writer.commit();
|
||||
|
||||
d = new Document();
|
||||
d.add(new StringField("_id", "8", Field.Store.NO));
|
||||
d.add(new LongField("value", -8, Field.Store.NO));
|
||||
d.add(new LongField("value", -9, Field.Store.NO));
|
||||
d.add(new LongField("value", -10, Field.Store.NO));
|
||||
writer.addDocument(d);
|
||||
}
|
||||
|
||||
private static final int SECONDS_PER_YEAR = 60 * 60 * 24 * 365;
|
||||
|
||||
// TODO: use random() when migrating to Junit
|
||||
public static enum Data {
|
||||
SINGLE_VALUED_DENSE_ENUM {
|
||||
@Override
|
||||
public int numValues(Random r) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long nextValue(Random r) {
|
||||
return 1 + r.nextInt(16);
|
||||
}
|
||||
},
|
||||
SINGLE_VALUED_DENSE_DATE {
|
||||
@Override
|
||||
public int numValues(Random r) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long nextValue(Random r) {
|
||||
// somewhere in-between 2010 and 2012
|
||||
return 1000L * (40L * SECONDS_PER_YEAR + r.nextInt(2 * SECONDS_PER_YEAR));
|
||||
}
|
||||
},
|
||||
MULTI_VALUED_DATE {
|
||||
@Override
|
||||
public int numValues(Random r) {
|
||||
return r.nextInt(3);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long nextValue(Random r) {
|
||||
// somewhere in-between 2010 and 2012
|
||||
return 1000L * (40L * SECONDS_PER_YEAR + r.nextInt(2 * SECONDS_PER_YEAR));
|
||||
}
|
||||
},
|
||||
MULTI_VALUED_ENUM {
|
||||
@Override
|
||||
public int numValues(Random r) {
|
||||
return r.nextInt(3);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long nextValue(Random r) {
|
||||
return 3 + r.nextInt(8);
|
||||
}
|
||||
},
|
||||
SINGLE_VALUED_SPARSE_RANDOM {
|
||||
@Override
|
||||
public int numValues(Random r) {
|
||||
return r.nextFloat() < 0.01 ? 1 : 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long nextValue(Random r) {
|
||||
return r.nextLong();
|
||||
}
|
||||
},
|
||||
MULTI_VALUED_SPARSE_RANDOM {
|
||||
@Override
|
||||
public int numValues(Random r) {
|
||||
return r.nextFloat() < 0.01f ? 1 + r.nextInt(5) : 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long nextValue(Random r) {
|
||||
return r.nextLong();
|
||||
}
|
||||
},
|
||||
MULTI_VALUED_DENSE_RANDOM {
|
||||
@Override
|
||||
public int numValues(Random r) {
|
||||
return 1 + r.nextInt(3);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long nextValue(Random r) {
|
||||
return r.nextLong();
|
||||
}
|
||||
};
|
||||
|
||||
public abstract int numValues(Random r);
|
||||
|
||||
public abstract long nextValue(Random r);
|
||||
}
|
||||
|
||||
private void test(List<LongHashSet> values) throws Exception {
|
||||
StringField id = new StringField("_id", "", Field.Store.NO);
|
||||
|
||||
for (int i = 0; i < values.size(); ++i) {
|
||||
Document doc = new Document();
|
||||
id.setStringValue("" + i);
|
||||
doc.add(id);
|
||||
final LongHashSet v = values.get(i);
|
||||
for (LongCursor c : v) {
|
||||
LongField value = new LongField("value", c.value, Field.Store.NO);
|
||||
doc.add(value);
|
||||
}
|
||||
writer.addDocument(doc);
|
||||
}
|
||||
writer.forceMerge(1, true);
|
||||
|
||||
final IndexNumericFieldData indexFieldData = getForField("value");
|
||||
final AtomicNumericFieldData atomicFieldData = indexFieldData.load(refreshReader());
|
||||
final SortedNumericDocValues data = atomicFieldData.getLongValues();
|
||||
final SortedNumericDoubleValues doubleData = atomicFieldData.getDoubleValues();
|
||||
final LongHashSet set = new LongHashSet();
|
||||
final LongHashSet doubleSet = new LongHashSet();
|
||||
for (int i = 0; i < values.size(); ++i) {
|
||||
final LongHashSet v = values.get(i);
|
||||
|
||||
data.setDocument(i);
|
||||
assertThat(data.count() > 0, equalTo(!v.isEmpty()));
|
||||
doubleData.setDocument(i);
|
||||
assertThat(doubleData.count() > 0, equalTo(!v.isEmpty()));
|
||||
|
||||
set.clear();
|
||||
data.setDocument(i);
|
||||
int numValues = data.count();
|
||||
for (int j = 0; j < numValues; j++) {
|
||||
set.add(data.valueAt(j));
|
||||
}
|
||||
assertThat(set, equalTo(v));
|
||||
|
||||
final LongHashSet doubleV = new LongHashSet();
|
||||
for (LongCursor c : v) {
|
||||
doubleV.add(Double.doubleToLongBits(c.value));
|
||||
}
|
||||
doubleSet.clear();
|
||||
doubleData.setDocument(i);
|
||||
numValues = doubleData.count();
|
||||
double prev = 0;
|
||||
for (int j = 0; j < numValues; j++) {
|
||||
double current = doubleData.valueAt(j);
|
||||
doubleSet.add(Double.doubleToLongBits(current));
|
||||
if (j > 0) {
|
||||
assertThat(prev, lessThan(current));
|
||||
}
|
||||
prev = current;
|
||||
}
|
||||
assertThat(doubleSet, equalTo(doubleV));
|
||||
}
|
||||
}
|
||||
|
||||
private void test(Data data) throws Exception {
|
||||
Random r = getRandom();
|
||||
final int numDocs = 1000 + r.nextInt(19000);
|
||||
final List<LongHashSet> values = new ArrayList<>(numDocs);
|
||||
for (int i = 0; i < numDocs; ++i) {
|
||||
final int numValues = data.numValues(r);
|
||||
final LongHashSet vals = new LongHashSet(numValues);
|
||||
for (int j = 0; j < numValues; ++j) {
|
||||
vals.add(data.nextValue(r));
|
||||
}
|
||||
values.add(vals);
|
||||
}
|
||||
test(values);
|
||||
}
|
||||
|
||||
public void testSingleValuedDenseEnum() throws Exception {
|
||||
test(Data.SINGLE_VALUED_DENSE_ENUM);
|
||||
}
|
||||
|
||||
public void testSingleValuedDenseDate() throws Exception {
|
||||
test(Data.SINGLE_VALUED_DENSE_DATE);
|
||||
}
|
||||
|
||||
public void testSingleValuedSparseRandom() throws Exception {
|
||||
test(Data.SINGLE_VALUED_SPARSE_RANDOM);
|
||||
}
|
||||
|
||||
public void testMultiValuedDate() throws Exception {
|
||||
test(Data.MULTI_VALUED_DATE);
|
||||
}
|
||||
|
||||
public void testMultiValuedEnum() throws Exception {
|
||||
test(Data.MULTI_VALUED_ENUM);
|
||||
}
|
||||
|
||||
public void testMultiValuedSparseRandom() throws Exception {
|
||||
test(Data.MULTI_VALUED_SPARSE_RANDOM);
|
||||
}
|
||||
|
||||
public void testMultiValuedDenseRandom() throws Exception {
|
||||
test(Data.MULTI_VALUED_DENSE_RANDOM);
|
||||
}
|
||||
|
||||
}
|
|
@ -52,121 +52,127 @@ import static org.hamcrest.Matchers.equalTo;
|
|||
/**
|
||||
*/
|
||||
public abstract class AbstractNumberNestedSortingTestCase extends AbstractFieldDataTestCase {
|
||||
|
||||
@Override
|
||||
protected boolean hasDocValues() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public void testNestedSorting() throws Exception {
|
||||
List<Document> docs = new ArrayList<>();
|
||||
Document document = new Document();
|
||||
document.add(createField("field2", 3, Field.Store.NO));
|
||||
document.add(createField("field2", 3));
|
||||
document.add(new StringField("filter_1", "T", Field.Store.NO));
|
||||
docs.add(document);
|
||||
document = new Document();
|
||||
document.add(createField("field2", 3, Field.Store.NO));
|
||||
document.add(createField("field2", 3));
|
||||
document.add(new StringField("filter_1", "T", Field.Store.NO));
|
||||
docs.add(document);
|
||||
document = new Document();
|
||||
document.add(createField("field2", 3, Field.Store.NO));
|
||||
document.add(createField("field2", 3));
|
||||
document.add(new StringField("filter_1", "T", Field.Store.NO));
|
||||
docs.add(document);
|
||||
document = new Document();
|
||||
document.add(new StringField("__type", "parent", Field.Store.NO));
|
||||
document.add(createField("field1", 1, Field.Store.NO));
|
||||
document.add(createField("field1", 1));
|
||||
docs.add(document);
|
||||
writer.addDocuments(docs);
|
||||
writer.commit();
|
||||
|
||||
docs.clear();
|
||||
document = new Document();
|
||||
document.add(createField("field2", 3, Field.Store.NO));
|
||||
document.add(createField("field2", 3));
|
||||
document.add(new StringField("filter_1", "T", Field.Store.NO));
|
||||
docs.add(document);
|
||||
document = new Document();
|
||||
document.add(createField("field2", 3, Field.Store.NO));
|
||||
document.add(createField("field2", 3));
|
||||
document.add(new StringField("filter_1", "T", Field.Store.NO));
|
||||
docs.add(document);
|
||||
document = new Document();
|
||||
document.add(createField("field2", 2, Field.Store.NO));
|
||||
document.add(createField("field2", 2));
|
||||
document.add(new StringField("filter_1", "T", Field.Store.NO));
|
||||
docs.add(document);
|
||||
document = new Document();
|
||||
document.add(new StringField("__type", "parent", Field.Store.NO));
|
||||
document.add(createField("field1", 2, Field.Store.NO));
|
||||
document.add(createField("field1", 2));
|
||||
docs.add(document);
|
||||
writer.addDocuments(docs);
|
||||
|
||||
docs.clear();
|
||||
document = new Document();
|
||||
document.add(createField("field2", 3, Field.Store.NO));
|
||||
document.add(createField("field2", 3));
|
||||
document.add(new StringField("filter_1", "T", Field.Store.NO));
|
||||
docs.add(document);
|
||||
document = new Document();
|
||||
document.add(createField("field2", 3, Field.Store.NO));
|
||||
document.add(createField("field2", 3));
|
||||
document.add(new StringField("filter_1", "T", Field.Store.NO));
|
||||
docs.add(document);
|
||||
document = new Document();
|
||||
document.add(createField("field2", 1, Field.Store.NO));
|
||||
document.add(createField("field2", 1));
|
||||
document.add(new StringField("filter_1", "T", Field.Store.NO));
|
||||
docs.add(document);
|
||||
document = new Document();
|
||||
document.add(new StringField("__type", "parent", Field.Store.NO));
|
||||
document.add(createField("field1", 3, Field.Store.NO));
|
||||
document.add(createField("field1", 3));
|
||||
docs.add(document);
|
||||
writer.addDocuments(docs);
|
||||
|
||||
docs.clear();
|
||||
document = new Document();
|
||||
document.add(createField("field2", 3, Field.Store.NO));
|
||||
document.add(createField("field2", 3));
|
||||
document.add(new StringField("filter_1", "T", Field.Store.NO));
|
||||
docs.add(document);
|
||||
document = new Document();
|
||||
document.add(createField("field2", 3, Field.Store.NO));
|
||||
document.add(createField("field2", 3));
|
||||
document.add(new StringField("filter_1", "F", Field.Store.NO));
|
||||
docs.add(document);
|
||||
document = new Document();
|
||||
document.add(createField("field2", 4, Field.Store.NO));
|
||||
document.add(createField("field2", 4));
|
||||
document.add(new StringField("filter_1", "F", Field.Store.NO));
|
||||
docs.add(document);
|
||||
document = new Document();
|
||||
document.add(new StringField("__type", "parent", Field.Store.NO));
|
||||
document.add(createField("field1", 4, Field.Store.NO));
|
||||
document.add(createField("field1", 4));
|
||||
docs.add(document);
|
||||
writer.addDocuments(docs);
|
||||
writer.commit();
|
||||
|
||||
docs.clear();
|
||||
document = new Document();
|
||||
document.add(createField("field2", 3, Field.Store.NO));
|
||||
document.add(createField("field2", 3));
|
||||
document.add(new StringField("filter_1", "F", Field.Store.NO));
|
||||
docs.add(document);
|
||||
document = new Document();
|
||||
document.add(createField("field2", 3, Field.Store.NO));
|
||||
document.add(createField("field2", 3));
|
||||
document.add(new StringField("filter_1", "F", Field.Store.NO));
|
||||
docs.add(document);
|
||||
document = new Document();
|
||||
document.add(createField("field2", 5, Field.Store.NO));
|
||||
document.add(createField("field2", 5));
|
||||
document.add(new StringField("filter_1", "F", Field.Store.NO));
|
||||
docs.add(document);
|
||||
document = new Document();
|
||||
document.add(new StringField("__type", "parent", Field.Store.NO));
|
||||
document.add(createField("field1", 5, Field.Store.NO));
|
||||
document.add(createField("field1", 5));
|
||||
docs.add(document);
|
||||
writer.addDocuments(docs);
|
||||
writer.commit();
|
||||
|
||||
docs.clear();
|
||||
document = new Document();
|
||||
document.add(createField("field2", 3, Field.Store.NO));
|
||||
document.add(createField("field2", 3));
|
||||
document.add(new StringField("filter_1", "T", Field.Store.NO));
|
||||
docs.add(document);
|
||||
document = new Document();
|
||||
document.add(createField("field2", 3, Field.Store.NO));
|
||||
document.add(createField("field2", 3));
|
||||
document.add(new StringField("filter_1", "T", Field.Store.NO));
|
||||
docs.add(document);
|
||||
document = new Document();
|
||||
document.add(createField("field2", 6, Field.Store.NO));
|
||||
document.add(createField("field2", 6));
|
||||
document.add(new StringField("filter_1", "T", Field.Store.NO));
|
||||
docs.add(document);
|
||||
document = new Document();
|
||||
document.add(new StringField("__type", "parent", Field.Store.NO));
|
||||
document.add(createField("field1", 6, Field.Store.NO));
|
||||
document.add(createField("field1", 6));
|
||||
docs.add(document);
|
||||
writer.addDocuments(docs);
|
||||
writer.commit();
|
||||
|
@ -174,26 +180,26 @@ public abstract class AbstractNumberNestedSortingTestCase extends AbstractFieldD
|
|||
// This doc will not be included, because it doesn't have nested docs
|
||||
document = new Document();
|
||||
document.add(new StringField("__type", "parent", Field.Store.NO));
|
||||
document.add(createField("field1", 7, Field.Store.NO));
|
||||
document.add(createField("field1", 7));
|
||||
writer.addDocument(document);
|
||||
writer.commit();
|
||||
|
||||
docs.clear();
|
||||
document = new Document();
|
||||
document.add(createField("field2", 3, Field.Store.NO));
|
||||
document.add(createField("field2", 3));
|
||||
document.add(new StringField("filter_1", "T", Field.Store.NO));
|
||||
docs.add(document);
|
||||
document = new Document();
|
||||
document.add(createField("field2", 3, Field.Store.NO));
|
||||
document.add(createField("field2", 3));
|
||||
document.add(new StringField("filter_1", "F", Field.Store.NO));
|
||||
docs.add(document);
|
||||
document = new Document();
|
||||
document.add(createField("field2", 7, Field.Store.NO));
|
||||
document.add(createField("field2", 7));
|
||||
document.add(new StringField("filter_1", "F", Field.Store.NO));
|
||||
docs.add(document);
|
||||
document = new Document();
|
||||
document.add(new StringField("__type", "parent", Field.Store.NO));
|
||||
document.add(createField("field1", 8, Field.Store.NO));
|
||||
document.add(createField("field1", 8));
|
||||
docs.add(document);
|
||||
writer.addDocuments(docs);
|
||||
writer.commit();
|
||||
|
@ -342,7 +348,7 @@ public abstract class AbstractNumberNestedSortingTestCase extends AbstractFieldD
|
|||
assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[4]).fields[0]).intValue(), equalTo(4));
|
||||
}
|
||||
|
||||
protected abstract IndexableField createField(String name, int value, Field.Store store);
|
||||
protected abstract IndexableField createField(String name, int value);
|
||||
|
||||
protected abstract IndexFieldData.XFieldComparatorSource createFieldComparator(String fieldName, MultiValueMode sortMode, Object missingValue, Nested nested);
|
||||
|
||||
|
|
|
@ -18,8 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.index.search.nested;
|
||||
|
||||
import org.apache.lucene.document.DoubleField;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.SortedNumericDocValuesField;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.FieldDoc;
|
||||
|
@ -31,13 +30,14 @@ import org.apache.lucene.search.TopDocs;
|
|||
import org.apache.lucene.search.join.QueryBitSetProducer;
|
||||
import org.apache.lucene.search.join.ScoreMode;
|
||||
import org.apache.lucene.search.join.ToParentBlockJoinQuery;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.fielddata.fieldcomparator.DoubleValuesComparatorSource;
|
||||
import org.elasticsearch.index.fielddata.plain.DoubleArrayIndexFieldData;
|
||||
import org.elasticsearch.search.MultiValueMode;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -55,13 +55,13 @@ public class DoubleNestedSortingTests extends AbstractNumberNestedSortingTestCas
|
|||
|
||||
@Override
|
||||
protected IndexFieldData.XFieldComparatorSource createFieldComparator(String fieldName, MultiValueMode sortMode, Object missingValue, Nested nested) {
|
||||
DoubleArrayIndexFieldData fieldData = getForField(fieldName);
|
||||
IndexNumericFieldData fieldData = getForField(fieldName);
|
||||
return new DoubleValuesComparatorSource(fieldData, missingValue, sortMode, nested);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected IndexableField createField(String name, int value, Field.Store store) {
|
||||
return new DoubleField(name, value, store);
|
||||
protected IndexableField createField(String name, int value) {
|
||||
return new SortedNumericDocValuesField(name, NumericUtils.doubleToSortableLong(value));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -18,8 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.index.search.nested;
|
||||
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FloatField;
|
||||
import org.apache.lucene.document.SortedNumericDocValuesField;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.FieldDoc;
|
||||
|
@ -31,13 +30,14 @@ import org.apache.lucene.search.TopDocs;
|
|||
import org.apache.lucene.search.join.QueryBitSetProducer;
|
||||
import org.apache.lucene.search.join.ScoreMode;
|
||||
import org.apache.lucene.search.join.ToParentBlockJoinQuery;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.fielddata.fieldcomparator.FloatValuesComparatorSource;
|
||||
import org.elasticsearch.index.fielddata.plain.FloatArrayIndexFieldData;
|
||||
import org.elasticsearch.search.MultiValueMode;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -55,13 +55,13 @@ public class FloatNestedSortingTests extends DoubleNestedSortingTests {
|
|||
|
||||
@Override
|
||||
protected IndexFieldData.XFieldComparatorSource createFieldComparator(String fieldName, MultiValueMode sortMode, Object missingValue, Nested nested) {
|
||||
FloatArrayIndexFieldData fieldData = getForField(fieldName);
|
||||
IndexNumericFieldData fieldData = getForField(fieldName);
|
||||
return new FloatValuesComparatorSource(fieldData, missingValue, sortMode, nested);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected IndexableField createField(String name, int value, Field.Store store) {
|
||||
return new FloatField(name, value, store);
|
||||
protected IndexableField createField(String name, int value) {
|
||||
return new SortedNumericDocValuesField(name, NumericUtils.floatToSortableInt(value));
|
||||
}
|
||||
|
||||
protected void assertAvgScoreMode(Query parentFilter, IndexSearcher searcher, IndexFieldData.XFieldComparatorSource innerFieldComparator) throws IOException {
|
||||
|
|
|
@ -18,14 +18,13 @@
|
|||
*/
|
||||
package org.elasticsearch.index.search.nested;
|
||||
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.LongField;
|
||||
import org.apache.lucene.document.SortedNumericDocValuesField;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.fielddata.fieldcomparator.LongValuesComparatorSource;
|
||||
import org.elasticsearch.index.fielddata.plain.PackedArrayIndexFieldData;
|
||||
import org.elasticsearch.search.MultiValueMode;
|
||||
|
||||
/**
|
||||
|
@ -39,13 +38,13 @@ public class LongNestedSortingTests extends AbstractNumberNestedSortingTestCase
|
|||
|
||||
@Override
|
||||
protected IndexFieldData.XFieldComparatorSource createFieldComparator(String fieldName, MultiValueMode sortMode, Object missingValue, Nested nested) {
|
||||
PackedArrayIndexFieldData fieldData = getForField(fieldName);
|
||||
IndexNumericFieldData fieldData = getForField(fieldName);
|
||||
return new LongValuesComparatorSource(fieldData, missingValue, sortMode, nested);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected IndexableField createField(String name, int value, Field.Store store) {
|
||||
return new LongField(name, value, store);
|
||||
protected IndexableField createField(String name, int value) {
|
||||
return new SortedNumericDocValuesField(name, value);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -80,7 +80,6 @@ public class RandomExceptionCircuitBreakerIT extends ESIntegTestCase {
|
|||
// I don't use randomNumericType() here because I don't want "byte", and I want "float" and "double"
|
||||
.field("type", randomFrom(Arrays.asList("float", "long", "double", "short", "integer")))
|
||||
.startObject("fielddata")
|
||||
.field("format", randomNumericFieldDataFormat())
|
||||
.endObject() // fielddata
|
||||
.endObject() // test-num
|
||||
.endObject() // properties
|
||||
|
|
|
@ -44,8 +44,7 @@ public class GeoPolygonIT extends ESIntegTestCase {
|
|||
protected void setupSuiteScopeCluster() throws Exception {
|
||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("location").field("type", "geo_point").field("lat_lon", true)
|
||||
.startObject("fielddata").field("format", randomNumericFieldDataFormat()).endObject().endObject().endObject()
|
||||
.endObject().endObject();
|
||||
.endObject().endObject().endObject().endObject();
|
||||
assertAcked(prepareCreate("test").addMapping("type1", xContentBuilder));
|
||||
ensureGreen();
|
||||
|
||||
|
|
|
@ -97,7 +97,6 @@ import org.elasticsearch.discovery.zen.elect.ElectMasterService;
|
|||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.codec.CodecService;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType.Loading;
|
||||
|
@ -409,7 +408,6 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
|||
.startObject("template-longs")
|
||||
.field("match_mapping_type", "long")
|
||||
.startObject("mapping")
|
||||
.field("doc_values", randomBoolean())
|
||||
.startObject("fielddata")
|
||||
.field(Loading.KEY, randomFrom(Loading.LAZY, Loading.EAGER))
|
||||
.endObject()
|
||||
|
@ -420,7 +418,6 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
|||
.startObject("template-doubles")
|
||||
.field("match_mapping_type", "double")
|
||||
.startObject("mapping")
|
||||
.field("doc_values", randomBoolean())
|
||||
.startObject("fielddata")
|
||||
.field(Loading.KEY, randomFrom(Loading.LAZY, Loading.EAGER))
|
||||
.endObject()
|
||||
|
@ -431,7 +428,6 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
|||
.startObject("template-geo_points")
|
||||
.field("match_mapping_type", "geo_point")
|
||||
.startObject("mapping")
|
||||
.field("doc_values", randomBoolean())
|
||||
.startObject("fielddata")
|
||||
.field(Loading.KEY, randomFrom(Loading.LAZY, Loading.EAGER))
|
||||
.endObject()
|
||||
|
@ -443,7 +439,6 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
|||
.field("match_mapping_type", "boolean")
|
||||
.startObject("mapping")
|
||||
.startObject("fielddata")
|
||||
.field(FieldDataType.FORMAT_KEY, randomFrom("array", "doc_values"))
|
||||
.field(Loading.KEY, randomFrom(Loading.LAZY, Loading.EAGER))
|
||||
.endObject()
|
||||
.endObject()
|
||||
|
@ -1864,13 +1859,6 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
|||
return perTestRatio;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a random numeric field data format from the choices of "array" or "doc_values".
|
||||
*/
|
||||
public static String randomNumericFieldDataFormat() {
|
||||
return randomFrom(Arrays.asList("array", "doc_values"));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a random JODA Time Zone based on Java Time Zones
|
||||
*/
|
||||
|
|
|
@ -73,8 +73,7 @@ public class GeoDistanceTests extends ESIntegTestCase {
|
|||
public void testSimpleDistance() throws Exception {
|
||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("location").field("type", "geo_point").field("lat_lon", true)
|
||||
.startObject("fielddata").field("format", randomNumericFieldDataFormat()).endObject().endObject().endObject()
|
||||
.endObject().endObject();
|
||||
.endObject().endObject().endObject().endObject();
|
||||
assertAcked(prepareCreate("test").addMapping("type1", xContentBuilder));
|
||||
ensureGreen();
|
||||
|
||||
|
@ -225,8 +224,8 @@ public class GeoDistanceTests extends ESIntegTestCase {
|
|||
public void testDistanceSortingMVFields() throws Exception {
|
||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("locations").field("type", "geo_point").field("lat_lon", true)
|
||||
.field("ignore_malformed", true).field("coerce", true).startObject("fielddata")
|
||||
.field("format", randomNumericFieldDataFormat()).endObject().endObject().endObject().endObject().endObject();
|
||||
.field("ignore_malformed", true).field("coerce", true)
|
||||
.endObject().endObject().endObject().endObject();
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", xContentBuilder));
|
||||
ensureGreen();
|
||||
|
@ -360,8 +359,7 @@ public class GeoDistanceTests extends ESIntegTestCase {
|
|||
public void testDistanceSortingWithMissingGeoPoint() throws Exception {
|
||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("locations").field("type", "geo_point").field("lat_lon", true)
|
||||
.startObject("fielddata").field("format", randomNumericFieldDataFormat()).endObject().endObject().endObject()
|
||||
.endObject().endObject();
|
||||
.endObject().endObject().endObject().endObject();
|
||||
assertAcked(prepareCreate("test").addMapping("type1", xContentBuilder));
|
||||
ensureGreen();
|
||||
|
||||
|
@ -492,7 +490,7 @@ public class GeoDistanceTests extends ESIntegTestCase {
|
|||
.startObject("properties")
|
||||
.startObject("name").field("type", "string").endObject()
|
||||
.startObject("location").field("type", "geo_point").field("lat_lon", true)
|
||||
.startObject("fielddata").field("format", randomNumericFieldDataFormat()).endObject().endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
|
@ -659,9 +657,6 @@ public class GeoDistanceTests extends ESIntegTestCase {
|
|||
.field("geohash", true)
|
||||
.field("geohash_precision", 24)
|
||||
.field("lat_lon", true)
|
||||
.startObject("fielddata")
|
||||
.field("format", randomNumericFieldDataFormat())
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
|
|
Loading…
Reference in New Issue