Use BINARY doc values instead of SORTED_SET doc values to store numeric data.
Although SORTED_SET doc values make things like terms aggregations very fast thanks to the use of ordinals, ordinals are usually not that useful on numeric data. We are more interested in the values themselves in order to be able to compute sums, averages, etc. on these values. However, SORTED_SET is quite slow at accessing values, so BINARY doc values are better suited at storing numeric data. floats and doubles are encoded without compression with little-endian byte order (so that it may be optimizable through sun.misc.Unsafe in the future given that most computers nowadays use the little-endian byte order) and byte, short, int, and long are encoded using vLong encoding: they first encode the minimum value using zig-zag encoding (so that negative values become positive) and then deltas between successive values. Close #3993
This commit is contained in:
parent
6fbcd8f8ff
commit
a04d18d2d2
|
@ -0,0 +1,140 @@
|
||||||
|
/*
|
||||||
|
* Licensed to ElasticSearch and Shay Banon under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. ElasticSearch licenses this
|
||||||
|
* file to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.common.util;
|
||||||
|
|
||||||
|
import org.apache.lucene.store.ByteArrayDataInput;
|
||||||
|
import org.apache.lucene.store.ByteArrayDataOutput;
|
||||||
|
|
||||||
|
|
||||||
|
/** Utility methods to do byte-level encoding. These methods are biased towards little-endian byte order because it is the most
|
||||||
|
* common byte order and reading several bytes at once may be optimizable in the future with the help of sun.mist.Unsafe. */
|
||||||
|
public enum ByteUtils {
|
||||||
|
;
|
||||||
|
|
||||||
|
public static final int MAX_BYTES_VLONG = 9;
|
||||||
|
|
||||||
|
/** Zig-zag decode. */
|
||||||
|
public static long zigZagDecode(long n) {
|
||||||
|
return ((n >>> 1) ^ -(n & 1));
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Zig-zag encode: this helps transforming small signed numbers into small positive numbers. */
|
||||||
|
public static long zigZagEncode(long n) {
|
||||||
|
return (n >> 63) ^ (n << 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Write a long in little-endian format. */
|
||||||
|
public static void writeLongLE(long l, byte[] arr, int offset) {
|
||||||
|
for (int i = 0; i < 8; ++i) {
|
||||||
|
arr[offset++] = (byte) l;
|
||||||
|
l >>>= 8;
|
||||||
|
}
|
||||||
|
assert l == 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Write a long in little-endian format. */
|
||||||
|
public static long readLongLE(byte[] arr, int offset) {
|
||||||
|
long l = arr[offset++] & 0xFFL;
|
||||||
|
for (int i = 1; i < 8; ++i) {
|
||||||
|
l |= (arr[offset++] & 0xFFL) << (8 * i);
|
||||||
|
}
|
||||||
|
return l;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Write an int in little-endian format. */
|
||||||
|
public static void writeIntLE(int l, byte[] arr, int offset) {
|
||||||
|
for (int i = 0; i < 4; ++i) {
|
||||||
|
arr[offset++] = (byte) l;
|
||||||
|
l >>>= 8;
|
||||||
|
}
|
||||||
|
assert l == 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Read an int in little-endian format. */
|
||||||
|
public static int readIntLE(byte[] arr, int offset) {
|
||||||
|
int l = arr[offset++] & 0xFF;
|
||||||
|
for (int i = 1; i < 4; ++i) {
|
||||||
|
l |= (arr[offset++] & 0xFF) << (8 * i);
|
||||||
|
}
|
||||||
|
return l;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Write a double in little-endian format. */
|
||||||
|
public static void writeDoubleLE(double d, byte[] arr, int offset) {
|
||||||
|
writeLongLE(Double.doubleToRawLongBits(d), arr, offset);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Read a double in little-endian format. */
|
||||||
|
public static double readDoubleLE(byte[] arr, int offset) {
|
||||||
|
return Double.longBitsToDouble(readLongLE(arr, offset));
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Write a float in little-endian format. */
|
||||||
|
public static void writeFloatLE(float d, byte[] arr, int offset) {
|
||||||
|
writeIntLE(Float.floatToRawIntBits(d), arr, offset);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Read a float in little-endian format. */
|
||||||
|
public static float readFloatLE(byte[] arr, int offset) {
|
||||||
|
return Float.intBitsToFloat(readIntLE(arr, offset));
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Same as DataOutput#writeVLong but accepts negative values (written on 9 bytes). */
|
||||||
|
public static void writeVLong(ByteArrayDataOutput out, long i) {
|
||||||
|
for (int k = 0; k < 8 && (i & ~0x7FL) != 0L; ++k) {
|
||||||
|
out.writeByte((byte)((i & 0x7FL) | 0x80L));
|
||||||
|
i >>>= 7;
|
||||||
|
}
|
||||||
|
out.writeByte((byte)i);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Same as DataOutput#readVLong but can read negative values (read on 9 bytes). */
|
||||||
|
public static long readVLong(ByteArrayDataInput in) {
|
||||||
|
// unwinded because of hotspot bugs, see Lucene's impl
|
||||||
|
byte b = in.readByte();
|
||||||
|
if (b >= 0) return b;
|
||||||
|
long i = b & 0x7FL;
|
||||||
|
b = in.readByte();
|
||||||
|
i |= (b & 0x7FL) << 7;
|
||||||
|
if (b >= 0) return i;
|
||||||
|
b = in.readByte();
|
||||||
|
i |= (b & 0x7FL) << 14;
|
||||||
|
if (b >= 0) return i;
|
||||||
|
b = in.readByte();
|
||||||
|
i |= (b & 0x7FL) << 21;
|
||||||
|
if (b >= 0) return i;
|
||||||
|
b = in.readByte();
|
||||||
|
i |= (b & 0x7FL) << 28;
|
||||||
|
if (b >= 0) return i;
|
||||||
|
b = in.readByte();
|
||||||
|
i |= (b & 0x7FL) << 35;
|
||||||
|
if (b >= 0) return i;
|
||||||
|
b = in.readByte();
|
||||||
|
i |= (b & 0x7FL) << 42;
|
||||||
|
if (b >= 0) return i;
|
||||||
|
b = in.readByte();
|
||||||
|
i |= (b & 0x7FL) << 49;
|
||||||
|
if (b >= 0) return i;
|
||||||
|
b = in.readByte();
|
||||||
|
i |= (b & 0xFFL) << 56;
|
||||||
|
return i;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,194 @@
|
||||||
|
/*
|
||||||
|
* Licensed to ElasticSearch and Shay Banon under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. ElasticSearch licenses this
|
||||||
|
* file to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.common.util;
|
||||||
|
|
||||||
|
import com.carrotsearch.hppc.DoubleArrayList;
|
||||||
|
import com.carrotsearch.hppc.FloatArrayList;
|
||||||
|
import com.carrotsearch.hppc.LongArrayList;
|
||||||
|
import org.apache.lucene.util.IntroSorter;
|
||||||
|
|
||||||
|
/** Collections-related utility methods. */
|
||||||
|
public enum CollectionUtils {
|
||||||
|
;
|
||||||
|
|
||||||
|
private static int compare(long i, long j) {
|
||||||
|
return i < j ? -1 : (i == j ? 0 : 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void sort(LongArrayList list) {
|
||||||
|
sort(list.buffer, list.size());
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void sort(final long[] array, int len) {
|
||||||
|
new IntroSorter() {
|
||||||
|
|
||||||
|
long pivot;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void swap(int i, int j) {
|
||||||
|
final long tmp = array[i];
|
||||||
|
array[i] = array[j];
|
||||||
|
array[j] = tmp;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected int compare(int i, int j) {
|
||||||
|
return CollectionUtils.compare(array[i], array[j]);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void setPivot(int i) {
|
||||||
|
pivot = array[i];
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected int comparePivot(int j) {
|
||||||
|
return CollectionUtils.compare(pivot, array[j]);
|
||||||
|
}
|
||||||
|
|
||||||
|
}.sort(0, len);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void sortAndDedup(LongArrayList list) {
|
||||||
|
list.elementsCount = sortAndDedup(list.buffer, list.elementsCount);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Sort and deduplicate values in-place, then return the unique element count. */
|
||||||
|
public static int sortAndDedup(long[] array, int len) {
|
||||||
|
if (len <= 1) {
|
||||||
|
return len;
|
||||||
|
}
|
||||||
|
sort(array, len);
|
||||||
|
int uniqueCount = 1;
|
||||||
|
for (int i = 1; i < len; ++i) {
|
||||||
|
if (array[i] != array[i - 1]) {
|
||||||
|
array[uniqueCount++] = array[i];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return uniqueCount;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void sort(FloatArrayList list) {
|
||||||
|
sort(list.buffer, list.size());
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void sort(final float[] array, int len) {
|
||||||
|
new IntroSorter() {
|
||||||
|
|
||||||
|
float pivot;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void swap(int i, int j) {
|
||||||
|
final float tmp = array[i];
|
||||||
|
array[i] = array[j];
|
||||||
|
array[j] = tmp;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected int compare(int i, int j) {
|
||||||
|
return Float.compare(array[i], array[j]);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void setPivot(int i) {
|
||||||
|
pivot = array[i];
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected int comparePivot(int j) {
|
||||||
|
return Float.compare(pivot, array[j]);
|
||||||
|
}
|
||||||
|
|
||||||
|
}.sort(0, len);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void sortAndDedup(FloatArrayList list) {
|
||||||
|
list.elementsCount = sortAndDedup(list.buffer, list.elementsCount);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Sort and deduplicate values in-place, then return the unique element count. */
|
||||||
|
public static int sortAndDedup(float[] array, int len) {
|
||||||
|
if (len <= 1) {
|
||||||
|
return len;
|
||||||
|
}
|
||||||
|
sort(array, len);
|
||||||
|
int uniqueCount = 1;
|
||||||
|
for (int i = 1; i < len; ++i) {
|
||||||
|
if (Float.compare(array[i], array[i - 1]) != 0) {
|
||||||
|
array[uniqueCount++] = array[i];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return uniqueCount;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void sort(DoubleArrayList list) {
|
||||||
|
sort(list.buffer, list.size());
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void sort(final double[] array, int len) {
|
||||||
|
new IntroSorter() {
|
||||||
|
|
||||||
|
double pivot;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void swap(int i, int j) {
|
||||||
|
final double tmp = array[i];
|
||||||
|
array[i] = array[j];
|
||||||
|
array[j] = tmp;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected int compare(int i, int j) {
|
||||||
|
return Double.compare(array[i], array[j]);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void setPivot(int i) {
|
||||||
|
pivot = array[i];
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected int comparePivot(int j) {
|
||||||
|
return Double.compare(pivot, array[j]);
|
||||||
|
}
|
||||||
|
|
||||||
|
}.sort(0, len);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void sortAndDedup(DoubleArrayList list) {
|
||||||
|
list.elementsCount = sortAndDedup(list.buffer, list.elementsCount);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Sort and deduplicate values in-place, then return the unique element count. */
|
||||||
|
public static int sortAndDedup(double[] array, int len) {
|
||||||
|
if (len <= 1) {
|
||||||
|
return len;
|
||||||
|
}
|
||||||
|
sort(array, len);
|
||||||
|
int uniqueCount = 1;
|
||||||
|
for (int i = 1; i < len; ++i) {
|
||||||
|
if (Double.compare(array[i], array[i - 1]) != 0) {
|
||||||
|
array[uniqueCount++] = array[i];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return uniqueCount;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -20,7 +20,6 @@
|
||||||
package org.elasticsearch.index.engine;
|
package org.elasticsearch.index.engine;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.Analyzer;
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
import org.apache.lucene.document.Document;
|
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
import org.apache.lucene.search.Filter;
|
import org.apache.lucene.search.Filter;
|
||||||
|
@ -37,6 +36,7 @@ import org.elasticsearch.common.unit.TimeValue;
|
||||||
import org.elasticsearch.index.VersionType;
|
import org.elasticsearch.index.VersionType;
|
||||||
import org.elasticsearch.index.deletionpolicy.SnapshotIndexCommit;
|
import org.elasticsearch.index.deletionpolicy.SnapshotIndexCommit;
|
||||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||||
|
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||||
import org.elasticsearch.index.shard.IndexShardComponent;
|
import org.elasticsearch.index.shard.IndexShardComponent;
|
||||||
import org.elasticsearch.index.shard.ShardId;
|
import org.elasticsearch.index.shard.ShardId;
|
||||||
|
|
|
@ -31,6 +31,10 @@ public abstract class AbstractAtomicNumericFieldData implements AtomicNumericFie
|
||||||
this.isFloat = isFloat;
|
this.isFloat = isFloat;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean isValuesOrdered() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ScriptDocValues getScriptValues() {
|
public ScriptDocValues getScriptValues() {
|
||||||
|
|
|
@ -154,4 +154,21 @@ public abstract class DoubleValues {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Wrap a {@link LongValues} instance. */
|
||||||
|
public static DoubleValues asDoubleValues(final LongValues values) {
|
||||||
|
return new DoubleValues(values.isMultiValued()) {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int setDocument(int docId) {
|
||||||
|
return values.setDocument(docId);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public double nextValue() {
|
||||||
|
return (double) values.nextValue();
|
||||||
|
}
|
||||||
|
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -154,4 +154,21 @@ public abstract class LongValues {
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Wrap a {@link DoubleValues} instance. */
|
||||||
|
public static LongValues asLongValues(final DoubleValues values) {
|
||||||
|
return new LongValues(values.isMultiValued()) {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int setDocument(int docId) {
|
||||||
|
return values.setDocument(docId);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long nextValue() {
|
||||||
|
return (long) values.nextValue();
|
||||||
|
}
|
||||||
|
|
||||||
|
};
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,167 @@
|
||||||
|
/*
|
||||||
|
* Licensed to ElasticSearch and Shay Banon under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. ElasticSearch licenses this
|
||||||
|
* file to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.index.fielddata.plain;
|
||||||
|
|
||||||
|
import org.apache.lucene.index.AtomicReader;
|
||||||
|
import org.apache.lucene.index.BinaryDocValues;
|
||||||
|
import org.apache.lucene.store.ByteArrayDataInput;
|
||||||
|
import org.apache.lucene.util.ArrayUtil;
|
||||||
|
import org.apache.lucene.util.BytesRef;
|
||||||
|
import org.elasticsearch.common.util.ByteUtils;
|
||||||
|
import org.elasticsearch.index.fielddata.AbstractAtomicNumericFieldData;
|
||||||
|
import org.elasticsearch.index.fielddata.DoubleValues;
|
||||||
|
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
|
||||||
|
import org.elasticsearch.index.fielddata.LongValues;
|
||||||
|
|
||||||
|
final class BinaryDVNumericAtomicFieldData extends AbstractAtomicNumericFieldData {
|
||||||
|
|
||||||
|
private final AtomicReader reader;
|
||||||
|
private final BinaryDocValues values;
|
||||||
|
private final NumericType numericType;
|
||||||
|
|
||||||
|
BinaryDVNumericAtomicFieldData(AtomicReader reader, BinaryDocValues values, NumericType numericType) {
|
||||||
|
super(numericType.isFloatingPoint());
|
||||||
|
this.reader = reader;
|
||||||
|
this.values = values == null ? BinaryDocValues.EMPTY : values;
|
||||||
|
this.numericType = numericType;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public LongValues getLongValues() {
|
||||||
|
if (numericType.isFloatingPoint()) {
|
||||||
|
return LongValues.asLongValues(getDoubleValues());
|
||||||
|
}
|
||||||
|
return new LongValues(true) {
|
||||||
|
|
||||||
|
final BytesRef bytes = new BytesRef();
|
||||||
|
final ByteArrayDataInput in = new ByteArrayDataInput();
|
||||||
|
long[] longs = new long[8];
|
||||||
|
int i = Integer.MAX_VALUE;
|
||||||
|
int valueCount = 0;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int setDocument(int docId) {
|
||||||
|
values.get(docId, bytes);
|
||||||
|
in.reset(bytes.bytes, bytes.offset, bytes.length);
|
||||||
|
if (!in.eof()) {
|
||||||
|
// first value uses vLong on top of zig-zag encoding, then deltas are encoded using vLong
|
||||||
|
long previousValue = longs[0] = ByteUtils.zigZagDecode(ByteUtils.readVLong(in));
|
||||||
|
valueCount = 1;
|
||||||
|
while (!in.eof()) {
|
||||||
|
longs = ArrayUtil.grow(longs, valueCount + 1);
|
||||||
|
previousValue = longs[valueCount++] = previousValue + ByteUtils.readVLong(in);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
valueCount = 0;
|
||||||
|
}
|
||||||
|
i = 0;
|
||||||
|
return valueCount;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long nextValue() {
|
||||||
|
assert i < valueCount;
|
||||||
|
return longs[i++];
|
||||||
|
}
|
||||||
|
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public DoubleValues getDoubleValues() {
|
||||||
|
if (!numericType.isFloatingPoint()) {
|
||||||
|
return DoubleValues.asDoubleValues(getLongValues());
|
||||||
|
}
|
||||||
|
switch (numericType) {
|
||||||
|
case FLOAT:
|
||||||
|
return new DoubleValues(true) {
|
||||||
|
|
||||||
|
final BytesRef bytes = new BytesRef();
|
||||||
|
int i = Integer.MAX_VALUE;
|
||||||
|
int valueCount = 0;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int setDocument(int docId) {
|
||||||
|
values.get(docId, bytes);
|
||||||
|
assert bytes.length % 4 == 0;
|
||||||
|
i = 0;
|
||||||
|
return valueCount = bytes.length / 4;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public double nextValue() {
|
||||||
|
assert i < valueCount;
|
||||||
|
return ByteUtils.readFloatLE(bytes.bytes, bytes.offset + i++ * 4);
|
||||||
|
}
|
||||||
|
|
||||||
|
};
|
||||||
|
case DOUBLE:
|
||||||
|
return new DoubleValues(true) {
|
||||||
|
|
||||||
|
final BytesRef bytes = new BytesRef();
|
||||||
|
int i = Integer.MAX_VALUE;
|
||||||
|
int valueCount = 0;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int setDocument(int docId) {
|
||||||
|
values.get(docId, bytes);
|
||||||
|
assert bytes.length % 8 == 0;
|
||||||
|
i = 0;
|
||||||
|
return valueCount = bytes.length / 8;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public double nextValue() {
|
||||||
|
assert i < valueCount;
|
||||||
|
return ByteUtils.readDoubleLE(bytes.bytes, bytes.offset + i++ * 8);
|
||||||
|
}
|
||||||
|
|
||||||
|
};
|
||||||
|
default:
|
||||||
|
throw new AssertionError();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean isMultiValued() {
|
||||||
|
return true; // no way to know
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int getNumDocs() {
|
||||||
|
return reader.maxDoc();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long getNumberUniqueValues() {
|
||||||
|
return Long.MAX_VALUE; // no clue
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long getMemorySizeInBytes() {
|
||||||
|
return -1; // Lucene doesn't expose it
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void close() {
|
||||||
|
// no-op
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,81 @@
|
||||||
|
/*
|
||||||
|
* Licensed to ElasticSearch and Shay Banon under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. ElasticSearch licenses this
|
||||||
|
* file to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.index.fielddata.plain;
|
||||||
|
|
||||||
|
import com.google.common.base.Preconditions;
|
||||||
|
import org.apache.lucene.index.AtomicReaderContext;
|
||||||
|
import org.elasticsearch.ElasticSearchIllegalStateException;
|
||||||
|
import org.elasticsearch.index.Index;
|
||||||
|
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||||
|
import org.elasticsearch.index.fielddata.fieldcomparator.DoubleValuesComparatorSource;
|
||||||
|
import org.elasticsearch.index.fielddata.fieldcomparator.FloatValuesComparatorSource;
|
||||||
|
import org.elasticsearch.index.fielddata.fieldcomparator.LongValuesComparatorSource;
|
||||||
|
import org.elasticsearch.index.fielddata.fieldcomparator.SortMode;
|
||||||
|
import org.elasticsearch.index.mapper.FieldMapper.Names;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
public class BinaryDVNumericIndexFieldData extends DocValuesIndexFieldData implements IndexNumericFieldData<BinaryDVNumericAtomicFieldData> {
|
||||||
|
|
||||||
|
private final NumericType numericType;
|
||||||
|
|
||||||
|
public BinaryDVNumericIndexFieldData(Index index, Names fieldNames, NumericType numericType) {
|
||||||
|
super(index, fieldNames);
|
||||||
|
Preconditions.checkArgument(numericType != null, "numericType must be non-null");
|
||||||
|
this.numericType = numericType;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean valuesOrdered() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
public org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource comparatorSource(final Object missingValue, final SortMode sortMode) {
|
||||||
|
switch (numericType) {
|
||||||
|
case FLOAT:
|
||||||
|
return new FloatValuesComparatorSource(this, missingValue, sortMode);
|
||||||
|
case DOUBLE:
|
||||||
|
return new DoubleValuesComparatorSource(this, missingValue, sortMode);
|
||||||
|
default:
|
||||||
|
assert !numericType.isFloatingPoint();
|
||||||
|
return new LongValuesComparatorSource(this, missingValue, sortMode);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public BinaryDVNumericAtomicFieldData load(AtomicReaderContext context) {
|
||||||
|
try {
|
||||||
|
return new BinaryDVNumericAtomicFieldData(context.reader(), context.reader().getBinaryDocValues(fieldNames.indexName()), numericType);
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new ElasticSearchIllegalStateException("Cannot load doc values", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public BinaryDVNumericAtomicFieldData loadDirect(AtomicReaderContext context) throws Exception {
|
||||||
|
return load(context);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public NumericType getNumericType() {
|
||||||
|
return numericType;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -35,12 +35,10 @@ import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
||||||
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.concurrent.atomic.AtomicLong;
|
|
||||||
|
|
||||||
/** {@link IndexFieldData} impl based on Lucene's doc values. Caching is done on the Lucene side. */
|
/** {@link IndexFieldData} impl based on Lucene's doc values. Caching is done on the Lucene side. */
|
||||||
public abstract class DocValuesIndexFieldData {
|
public abstract class DocValuesIndexFieldData {
|
||||||
|
|
||||||
private final AtomicLong maxUniqueValueCount;
|
|
||||||
protected final Index index;
|
protected final Index index;
|
||||||
protected final Names fieldNames;
|
protected final Names fieldNames;
|
||||||
|
|
||||||
|
@ -48,7 +46,6 @@ public abstract class DocValuesIndexFieldData {
|
||||||
super();
|
super();
|
||||||
this.index = index;
|
this.index = index;
|
||||||
this.fieldNames = fieldNames;
|
this.fieldNames = fieldNames;
|
||||||
maxUniqueValueCount = new AtomicLong();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public final Names getFieldNames() {
|
public final Names getFieldNames() {
|
||||||
|
@ -67,19 +64,6 @@ public abstract class DocValuesIndexFieldData {
|
||||||
return index;
|
return index;
|
||||||
}
|
}
|
||||||
|
|
||||||
public final long getHighestNumberOfSeenUniqueValues() {
|
|
||||||
return maxUniqueValueCount.get();
|
|
||||||
}
|
|
||||||
|
|
||||||
void updateMaxUniqueValueCount(long uniqueValueCount) {
|
|
||||||
while (true) {
|
|
||||||
final long current = maxUniqueValueCount.get();
|
|
||||||
if (current >= uniqueValueCount || maxUniqueValueCount.compareAndSet(current, uniqueValueCount)) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public static class Builder implements IndexFieldData.Builder {
|
public static class Builder implements IndexFieldData.Builder {
|
||||||
|
|
||||||
private static final Set<String> BINARY_INDEX_FIELD_NAMES = ImmutableSet.of(UidFieldMapper.NAME, IdFieldMapper.NAME);
|
private static final Set<String> BINARY_INDEX_FIELD_NAMES = ImmutableSet.of(UidFieldMapper.NAME, IdFieldMapper.NAME);
|
||||||
|
@ -108,7 +92,7 @@ public abstract class DocValuesIndexFieldData {
|
||||||
assert !numericType.isFloatingPoint();
|
assert !numericType.isFloatingPoint();
|
||||||
return new NumericDVIndexFieldData(index, fieldNames);
|
return new NumericDVIndexFieldData(index, fieldNames);
|
||||||
} else if (numericType != null) {
|
} else if (numericType != null) {
|
||||||
return new SortedSetDVNumericIndexFieldData(index, fieldNames, numericType);
|
return new BinaryDVNumericIndexFieldData(index, fieldNames, numericType);
|
||||||
} else {
|
} else {
|
||||||
return new SortedSetDVBytesIndexFieldData(index, fieldNames);
|
return new SortedSetDVBytesIndexFieldData(index, fieldNames);
|
||||||
}
|
}
|
||||||
|
|
|
@ -43,9 +43,7 @@ public class SortedSetDVBytesIndexFieldData extends DocValuesIndexFieldData impl
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SortedSetDVBytesAtomicFieldData load(AtomicReaderContext context) {
|
public SortedSetDVBytesAtomicFieldData load(AtomicReaderContext context) {
|
||||||
final SortedSetDVBytesAtomicFieldData atomicFieldData = new SortedSetDVBytesAtomicFieldData(context.reader(), fieldNames.indexName());
|
return new SortedSetDVBytesAtomicFieldData(context.reader(), fieldNames.indexName());
|
||||||
updateMaxUniqueValueCount(atomicFieldData.getNumberUniqueValues());
|
|
||||||
return atomicFieldData;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -1,103 +0,0 @@
|
||||||
/*
|
|
||||||
* Licensed to ElasticSearch and Shay Banon under one
|
|
||||||
* or more contributor license agreements. See the NOTICE file
|
|
||||||
* distributed with this work for additional information
|
|
||||||
* regarding copyright ownership. ElasticSearch licenses this
|
|
||||||
* file to you under the Apache License, Version 2.0 (the
|
|
||||||
* "License"); you may not use this file except in compliance
|
|
||||||
* with the License. You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing,
|
|
||||||
* software distributed under the License is distributed on an
|
|
||||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
* KIND, either express or implied. See the License for the
|
|
||||||
* specific language governing permissions and limitations
|
|
||||||
* under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.elasticsearch.index.fielddata.plain;
|
|
||||||
|
|
||||||
import org.apache.lucene.index.AtomicReader;
|
|
||||||
import org.apache.lucene.util.BytesRef;
|
|
||||||
import org.elasticsearch.index.fielddata.*;
|
|
||||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
|
|
||||||
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
|
||||||
|
|
||||||
public class SortedSetDVNumericAtomicFieldData extends SortedSetDVAtomicFieldData implements AtomicNumericFieldData {
|
|
||||||
|
|
||||||
private final NumericType numericType;
|
|
||||||
|
|
||||||
SortedSetDVNumericAtomicFieldData(AtomicReader reader, String field, NumericType numericType) {
|
|
||||||
super(reader, field);
|
|
||||||
this.numericType = numericType;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isValuesOrdered() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ScriptDocValues getScriptValues() {
|
|
||||||
if (numericType.isFloatingPoint()) {
|
|
||||||
return new ScriptDocValues.Doubles(getDoubleValues());
|
|
||||||
} else {
|
|
||||||
return new ScriptDocValues.Longs(getLongValues());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public LongValues getLongValues() {
|
|
||||||
final BytesValues.WithOrdinals values = super.getBytesValues(false);
|
|
||||||
return new LongValues.WithOrdinals(values.ordinals()) {
|
|
||||||
@Override
|
|
||||||
public long getValueByOrd(long ord) {
|
|
||||||
assert ord != Ordinals.MISSING_ORDINAL;
|
|
||||||
return numericType.toLong(values.getValueByOrd(ord));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public DoubleValues getDoubleValues() {
|
|
||||||
final BytesValues.WithOrdinals values = super.getBytesValues(false);
|
|
||||||
return new DoubleValues.WithOrdinals(values.ordinals()) {
|
|
||||||
@Override
|
|
||||||
public double getValueByOrd(long ord) {
|
|
||||||
assert ord != Ordinals.MISSING_ORDINAL;
|
|
||||||
return numericType.toDouble(values.getValueByOrd(ord));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public BytesValues.WithOrdinals getBytesValues(boolean needsHashes) {
|
|
||||||
final BytesValues.WithOrdinals values = super.getBytesValues(needsHashes);
|
|
||||||
return new BytesValues.WithOrdinals(values.ordinals()) {
|
|
||||||
final BytesRef spare = new BytesRef(16);
|
|
||||||
private BytesRef convert(BytesRef input, BytesRef output) {
|
|
||||||
if (input.length == 0) {
|
|
||||||
return input;
|
|
||||||
}
|
|
||||||
if (numericType.isFloatingPoint()) {
|
|
||||||
output.copyChars(Double.toString(numericType.toDouble(input)));
|
|
||||||
} else {
|
|
||||||
output.copyChars(Long.toString(numericType.toLong(input)));
|
|
||||||
}
|
|
||||||
return output;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public BytesRef getValueByOrd(long ord) {
|
|
||||||
return convert(values.getValueByOrd(ord), scratch);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Order getOrder() {
|
|
||||||
return Order.NUMERIC;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,172 +0,0 @@
|
||||||
/*
|
|
||||||
* Licensed to ElasticSearch and Shay Banon under one
|
|
||||||
* or more contributor license agreements. See the NOTICE file
|
|
||||||
* distributed with this work for additional information
|
|
||||||
* regarding copyright ownership. ElasticSearch licenses this
|
|
||||||
* file to you under the Apache License, Version 2.0 (the
|
|
||||||
* "License"); you may not use this file except in compliance
|
|
||||||
* with the License. You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing,
|
|
||||||
* software distributed under the License is distributed on an
|
|
||||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
* KIND, either express or implied. See the License for the
|
|
||||||
* specific language governing permissions and limitations
|
|
||||||
* under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.elasticsearch.index.fielddata.plain;
|
|
||||||
|
|
||||||
import com.google.common.base.Preconditions;
|
|
||||||
import org.apache.lucene.index.AtomicReaderContext;
|
|
||||||
import org.apache.lucene.search.FieldComparator;
|
|
||||||
import org.apache.lucene.search.SortField.Type;
|
|
||||||
import org.apache.lucene.util.BytesRef;
|
|
||||||
import org.elasticsearch.index.Index;
|
|
||||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
|
||||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
|
||||||
import org.elasticsearch.index.fielddata.fieldcomparator.*;
|
|
||||||
import org.elasticsearch.index.mapper.FieldMapper.Names;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
|
|
||||||
public class SortedSetDVNumericIndexFieldData extends DocValuesIndexFieldData implements IndexNumericFieldData<SortedSetDVNumericAtomicFieldData> {
|
|
||||||
|
|
||||||
private final NumericType numericType;
|
|
||||||
|
|
||||||
public SortedSetDVNumericIndexFieldData(Index index, Names fieldNames, NumericType numericType) {
|
|
||||||
super(index, fieldNames);
|
|
||||||
Preconditions.checkArgument(numericType != null, "numericType must be non-null");
|
|
||||||
this.numericType = numericType;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean valuesOrdered() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
public org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource comparatorSource(final Object missingValue, final SortMode sortMode) {
|
|
||||||
if (sortMode == SortMode.SUM || sortMode == SortMode.AVG) {
|
|
||||||
// sort based on an aggregation, we can't use ordinals here so it may be slowish
|
|
||||||
switch (numericType) {
|
|
||||||
case FLOAT:
|
|
||||||
return new FloatValuesComparatorSource(this, missingValue, sortMode);
|
|
||||||
case DOUBLE:
|
|
||||||
return new DoubleValuesComparatorSource(this, missingValue, sortMode);
|
|
||||||
default:
|
|
||||||
assert !numericType.isFloatingPoint();
|
|
||||||
return new LongValuesComparatorSource(this, missingValue, sortMode);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
assert sortMode == SortMode.MIN || sortMode == SortMode.MAX;
|
|
||||||
// Otherwise (MIN/MAX), use ordinal-based comparison -> fast
|
|
||||||
final IndexFieldData.WithOrdinals<?> bytesIndexFieldData = new SortedSetDVBytesIndexFieldData(index, fieldNames);
|
|
||||||
return new XFieldComparatorSource() {
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Type reducedType() {
|
|
||||||
return numericType.sortFieldType();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public FieldComparator<?> newComparator(String fieldname, int numHits, int sortPos, boolean reversed) throws IOException {
|
|
||||||
assert fieldname.equals(bytesIndexFieldData.getFieldNames().indexName());
|
|
||||||
|
|
||||||
final Number missingNumber = (Number) missingObject(missingValue, reversed);
|
|
||||||
final BytesRef missingBytes = new BytesRef();
|
|
||||||
numericType.toIndexForm(missingNumber, missingBytes);
|
|
||||||
|
|
||||||
final BytesRefOrdValComparator in = new BytesRefOrdValComparator((IndexFieldData.WithOrdinals<?>) bytesIndexFieldData, numHits, sortMode, missingBytes);
|
|
||||||
return new NumericFieldComparator(in, numericType);
|
|
||||||
}
|
|
||||||
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
private static class NumericFieldComparator extends NestedWrappableComparator<Number> {
|
|
||||||
|
|
||||||
final NestedWrappableComparator<BytesRef> in;
|
|
||||||
final NumericType numericType;
|
|
||||||
final BytesRef spare;
|
|
||||||
|
|
||||||
public NumericFieldComparator(NestedWrappableComparator<BytesRef> in, NumericType numericType) {
|
|
||||||
this.in = in;
|
|
||||||
this.numericType = numericType;
|
|
||||||
spare = new BytesRef();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int compare(int slot1, int slot2) {
|
|
||||||
return in.compare(slot1, slot2);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void setBottom(int slot) {
|
|
||||||
in.setBottom(slot);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int compareBottom(int doc) throws IOException {
|
|
||||||
return in.compareBottom(doc);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void copy(int slot, int doc) throws IOException {
|
|
||||||
in.copy(slot, doc);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public FieldComparator<Number> setNextReader(AtomicReaderContext context) throws IOException {
|
|
||||||
return new NumericFieldComparator((NestedWrappableComparator<BytesRef>) in.setNextReader(context), numericType);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Number value(int slot) {
|
|
||||||
final BytesRef value = in.value(slot);
|
|
||||||
if (value == null) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return numericType.toNumber(value);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int compareDocToValue(int doc, Number value) throws IOException {
|
|
||||||
if (value == null) {
|
|
||||||
return in.compareDocToValue(doc, null);
|
|
||||||
}
|
|
||||||
numericType.toIndexForm(value, spare);
|
|
||||||
return in.compareDocToValue(doc, spare);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void missing(int slot) {
|
|
||||||
in.missing(slot);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int compareBottomMissing() {
|
|
||||||
return in.compareBottomMissing();
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public SortedSetDVNumericAtomicFieldData load(AtomicReaderContext context) {
|
|
||||||
final SortedSetDVNumericAtomicFieldData atomicFieldData = new SortedSetDVNumericAtomicFieldData(context.reader(), fieldNames.indexName(), numericType);
|
|
||||||
updateMaxUniqueValueCount(atomicFieldData.getNumberUniqueValues());
|
|
||||||
return atomicFieldData;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public SortedSetDVNumericAtomicFieldData loadDirect(AtomicReaderContext context) throws Exception {
|
|
||||||
return load(context);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public NumericType getNumericType() {
|
|
||||||
return numericType;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -23,7 +23,6 @@ import com.google.common.collect.ImmutableMap;
|
||||||
import com.google.common.collect.Maps;
|
import com.google.common.collect.Maps;
|
||||||
import com.google.common.collect.Sets;
|
import com.google.common.collect.Sets;
|
||||||
import org.apache.lucene.analysis.Analyzer;
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
import org.apache.lucene.document.Document;
|
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
import org.apache.lucene.document.FieldType;
|
import org.apache.lucene.document.FieldType;
|
||||||
import org.apache.lucene.index.IndexableField;
|
import org.apache.lucene.index.IndexableField;
|
||||||
|
@ -474,7 +473,7 @@ public class DocumentMapper implements ToXContent {
|
||||||
if (parser == null) {
|
if (parser == null) {
|
||||||
parser = XContentHelper.createParser(source.source());
|
parser = XContentHelper.createParser(source.source());
|
||||||
}
|
}
|
||||||
context.reset(parser, new Document(), source, listener);
|
context.reset(parser, new ParseContext.Document(), source, listener);
|
||||||
// on a newly created instance of document mapper, we always consider it as new mappers that have been added
|
// on a newly created instance of document mapper, we always consider it as new mappers that have been added
|
||||||
if (initMappersAdded) {
|
if (initMappersAdded) {
|
||||||
context.setMappingsModified();
|
context.setMappingsModified();
|
||||||
|
@ -553,7 +552,7 @@ public class DocumentMapper implements ToXContent {
|
||||||
// apply doc boost
|
// apply doc boost
|
||||||
if (context.docBoost() != 1.0f) {
|
if (context.docBoost() != 1.0f) {
|
||||||
Set<String> encounteredFields = Sets.newHashSet();
|
Set<String> encounteredFields = Sets.newHashSet();
|
||||||
for (Document doc : context.docs()) {
|
for (ParseContext.Document doc : context.docs()) {
|
||||||
encounteredFields.clear();
|
encounteredFields.clear();
|
||||||
for (IndexableField field : doc) {
|
for (IndexableField field : doc) {
|
||||||
if (field.fieldType().indexed() && !field.fieldType().omitNorms()) {
|
if (field.fieldType().indexed() && !field.fieldType().omitNorms()) {
|
||||||
|
|
|
@ -19,9 +19,14 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.mapper;
|
package org.elasticsearch.index.mapper;
|
||||||
|
|
||||||
|
import com.carrotsearch.hppc.ObjectObjectMap;
|
||||||
|
import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
|
||||||
|
import com.google.common.collect.Lists;
|
||||||
import org.apache.lucene.analysis.Analyzer;
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
import org.apache.lucene.document.Document;
|
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
|
import org.apache.lucene.index.IndexableField;
|
||||||
|
import org.apache.lucene.util.BytesRef;
|
||||||
|
import org.elasticsearch.ElasticSearchIllegalStateException;
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.lucene.all.AllEntries;
|
import org.elasticsearch.common.lucene.all.AllEntries;
|
||||||
|
@ -30,16 +35,91 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.index.analysis.AnalysisService;
|
import org.elasticsearch.index.analysis.AnalysisService;
|
||||||
import org.elasticsearch.index.mapper.object.RootObjectMapper;
|
import org.elasticsearch.index.mapper.object.RootObjectMapper;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.*;
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
public class ParseContext {
|
public class ParseContext {
|
||||||
|
|
||||||
|
/** Fork of {@link org.apache.lucene.document.Document} with additional functionality. */
|
||||||
|
public static class Document implements Iterable<IndexableField> {
|
||||||
|
|
||||||
|
private final List<IndexableField> fields;
|
||||||
|
private ObjectObjectMap<Object, IndexableField> keyedFields;
|
||||||
|
|
||||||
|
public Document() {
|
||||||
|
fields = Lists.newArrayList();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Iterator<IndexableField> iterator() {
|
||||||
|
return fields.iterator();
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<IndexableField> getFields() {
|
||||||
|
return fields;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void add(IndexableField field) {
|
||||||
|
fields.add(field);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Add fields so that they can later be fetched using {@link #getByKey(Object)}. */
|
||||||
|
public void addWithKey(Object key, IndexableField field) {
|
||||||
|
if (keyedFields == null) {
|
||||||
|
keyedFields = new ObjectObjectOpenHashMap<Object, IndexableField>();
|
||||||
|
} else if (keyedFields.containsKey(key)) {
|
||||||
|
throw new ElasticSearchIllegalStateException("Only one field can be stored per key");
|
||||||
|
}
|
||||||
|
keyedFields.put(key, field);
|
||||||
|
add(field);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Get back fields that have been previously added with {@link #addWithKey(Object, IndexableField)}. */
|
||||||
|
public IndexableField getByKey(Object key) {
|
||||||
|
return keyedFields == null ? null : keyedFields.get(key);
|
||||||
|
}
|
||||||
|
|
||||||
|
public IndexableField[] getFields(String name) {
|
||||||
|
List<IndexableField> f = new ArrayList<IndexableField>();
|
||||||
|
for (IndexableField field : fields) {
|
||||||
|
if (field.name().equals(name)) {
|
||||||
|
f.add(field);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return f.toArray(new IndexableField[f.size()]);
|
||||||
|
}
|
||||||
|
|
||||||
|
public IndexableField getField(String name) {
|
||||||
|
for (IndexableField field : fields) {
|
||||||
|
if (field.name().equals(name)) {
|
||||||
|
return field;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String get(String name) {
|
||||||
|
for (IndexableField f : fields) {
|
||||||
|
if (f.name().equals(name) && f.stringValue() != null) {
|
||||||
|
return f.stringValue();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
public BytesRef getBinaryValue(String name) {
|
||||||
|
for (IndexableField f : fields) {
|
||||||
|
if (f.name().equals(name) && f.binaryValue() != null) {
|
||||||
|
return f.binaryValue();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
private final DocumentMapper docMapper;
|
private final DocumentMapper docMapper;
|
||||||
|
|
||||||
private final DocumentMapperParser docMapperParser;
|
private final DocumentMapperParser docMapperParser;
|
||||||
|
@ -50,7 +130,7 @@ public class ParseContext {
|
||||||
|
|
||||||
private Document document;
|
private Document document;
|
||||||
|
|
||||||
private List<Document> documents = new ArrayList<Document>();
|
private List<Document> documents = Lists.newArrayList();
|
||||||
|
|
||||||
private Analyzer analyzer;
|
private Analyzer analyzer;
|
||||||
|
|
||||||
|
@ -95,7 +175,7 @@ public class ParseContext {
|
||||||
this.parser = parser;
|
this.parser = parser;
|
||||||
this.document = document;
|
this.document = document;
|
||||||
if (document != null) {
|
if (document != null) {
|
||||||
this.documents = new ArrayList<Document>();
|
this.documents = Lists.newArrayList();
|
||||||
this.documents.add(document);
|
this.documents.add(document);
|
||||||
} else {
|
} else {
|
||||||
this.documents = null;
|
this.documents = null;
|
||||||
|
@ -315,4 +395,5 @@ public class ParseContext {
|
||||||
stringBuilder.setLength(0);
|
stringBuilder.setLength(0);
|
||||||
return this.stringBuilder;
|
return this.stringBuilder;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,9 +20,9 @@
|
||||||
package org.elasticsearch.index.mapper;
|
package org.elasticsearch.index.mapper;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.Analyzer;
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
import org.apache.lucene.document.Document;
|
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
|
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A mapper that exists only as a mapper within a root object.
|
* A mapper that exists only as a mapper within a root object.
|
||||||
*/
|
*/
|
||||||
|
@ -36,4 +37,5 @@ public interface RootMapper extends Mapper {
|
||||||
* Should the mapper be included in the root {@link org.elasticsearch.index.mapper.object.ObjectMapper}.
|
* Should the mapper be included in the root {@link org.elasticsearch.index.mapper.object.ObjectMapper}.
|
||||||
*/
|
*/
|
||||||
boolean includeInObject();
|
boolean includeInObject();
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -324,7 +324,7 @@ public class ByteFieldMapper extends NumberFieldMapper<Byte> {
|
||||||
fields.add(field);
|
fields.add(field);
|
||||||
}
|
}
|
||||||
if (hasDocValues()) {
|
if (hasDocValues()) {
|
||||||
fields.add(toDocValues((int) value));
|
addDocValue(context, value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -434,7 +434,7 @@ public class DateFieldMapper extends NumberFieldMapper<Long> {
|
||||||
fields.add(field);
|
fields.add(field);
|
||||||
}
|
}
|
||||||
if (hasDocValues()) {
|
if (hasDocValues()) {
|
||||||
fields.add(toDocValues(value));
|
addDocValue(context, value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,10 +19,12 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.mapper.core;
|
package org.elasticsearch.index.mapper.core;
|
||||||
|
|
||||||
|
import com.carrotsearch.hppc.DoubleArrayList;
|
||||||
import org.apache.lucene.analysis.Analyzer;
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
import org.apache.lucene.analysis.TokenStream;
|
import org.apache.lucene.analysis.TokenStream;
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
import org.apache.lucene.document.FieldType;
|
import org.apache.lucene.document.FieldType;
|
||||||
|
import org.apache.lucene.index.FieldInfo;
|
||||||
import org.apache.lucene.search.Filter;
|
import org.apache.lucene.search.Filter;
|
||||||
import org.apache.lucene.search.NumericRangeFilter;
|
import org.apache.lucene.search.NumericRangeFilter;
|
||||||
import org.apache.lucene.search.NumericRangeQuery;
|
import org.apache.lucene.search.NumericRangeQuery;
|
||||||
|
@ -34,6 +36,8 @@ import org.elasticsearch.common.Explicit;
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
import org.elasticsearch.common.Numbers;
|
import org.elasticsearch.common.Numbers;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.common.util.ByteUtils;
|
||||||
|
import org.elasticsearch.common.util.CollectionUtils;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.index.analysis.NumericDoubleAnalyzer;
|
import org.elasticsearch.index.analysis.NumericDoubleAnalyzer;
|
||||||
|
@ -310,7 +314,13 @@ public class DoubleFieldMapper extends NumberFieldMapper<Double> {
|
||||||
fields.add(field);
|
fields.add(field);
|
||||||
}
|
}
|
||||||
if (hasDocValues()) {
|
if (hasDocValues()) {
|
||||||
fields.add(toDocValues(value));
|
CustomDoubleNumericDocValuesField field = (CustomDoubleNumericDocValuesField) context.doc().getByKey(names().indexName());
|
||||||
|
if (field != null) {
|
||||||
|
field.add(value);
|
||||||
|
} else {
|
||||||
|
field = new CustomDoubleNumericDocValuesField(names().indexName(), value);
|
||||||
|
context.doc().addWithKey(names().indexName(), field);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -374,4 +384,37 @@ public class DoubleFieldMapper extends NumberFieldMapper<Double> {
|
||||||
return Double.toString(number);
|
return Double.toString(number);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static class CustomDoubleNumericDocValuesField extends CustomNumericDocValuesField {
|
||||||
|
|
||||||
|
public static final FieldType TYPE = new FieldType();
|
||||||
|
static {
|
||||||
|
TYPE.setDocValueType(FieldInfo.DocValuesType.BINARY);
|
||||||
|
TYPE.freeze();
|
||||||
|
}
|
||||||
|
|
||||||
|
private final DoubleArrayList values;
|
||||||
|
|
||||||
|
public CustomDoubleNumericDocValuesField(String name, double value) {
|
||||||
|
super(name);
|
||||||
|
values = new DoubleArrayList();
|
||||||
|
add(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void add(double value) {
|
||||||
|
values.add(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public BytesRef binaryValue() {
|
||||||
|
CollectionUtils.sortAndDedup(values);
|
||||||
|
|
||||||
|
final byte[] bytes = new byte[values.size() * 8];
|
||||||
|
for (int i = 0; i < values.size(); ++i) {
|
||||||
|
ByteUtils.writeDoubleLE(values.get(i), bytes, i * 8);
|
||||||
|
}
|
||||||
|
return new BytesRef(bytes);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,10 +19,12 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.mapper.core;
|
package org.elasticsearch.index.mapper.core;
|
||||||
|
|
||||||
|
import com.carrotsearch.hppc.FloatArrayList;
|
||||||
import org.apache.lucene.analysis.Analyzer;
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
import org.apache.lucene.analysis.TokenStream;
|
import org.apache.lucene.analysis.TokenStream;
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
import org.apache.lucene.document.FieldType;
|
import org.apache.lucene.document.FieldType;
|
||||||
|
import org.apache.lucene.index.FieldInfo;
|
||||||
import org.apache.lucene.search.Filter;
|
import org.apache.lucene.search.Filter;
|
||||||
import org.apache.lucene.search.NumericRangeFilter;
|
import org.apache.lucene.search.NumericRangeFilter;
|
||||||
import org.apache.lucene.search.NumericRangeQuery;
|
import org.apache.lucene.search.NumericRangeQuery;
|
||||||
|
@ -35,6 +37,8 @@ import org.elasticsearch.common.Nullable;
|
||||||
import org.elasticsearch.common.Numbers;
|
import org.elasticsearch.common.Numbers;
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.common.util.ByteUtils;
|
||||||
|
import org.elasticsearch.common.util.CollectionUtils;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.index.analysis.NumericFloatAnalyzer;
|
import org.elasticsearch.index.analysis.NumericFloatAnalyzer;
|
||||||
|
@ -316,7 +320,13 @@ public class FloatFieldMapper extends NumberFieldMapper<Float> {
|
||||||
fields.add(field);
|
fields.add(field);
|
||||||
}
|
}
|
||||||
if (hasDocValues()) {
|
if (hasDocValues()) {
|
||||||
fields.add(toDocValues(value));
|
CustomFloatNumericDocValuesField field = (CustomFloatNumericDocValuesField) context.doc().getByKey(names().indexName());
|
||||||
|
if (field != null) {
|
||||||
|
field.add(value);
|
||||||
|
} else {
|
||||||
|
field = new CustomFloatNumericDocValuesField(names().indexName(), value);
|
||||||
|
context.doc().addWithKey(names().indexName(), field);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -381,4 +391,37 @@ public class FloatFieldMapper extends NumberFieldMapper<Float> {
|
||||||
return Float.toString(number);
|
return Float.toString(number);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static class CustomFloatNumericDocValuesField extends CustomNumericDocValuesField {
|
||||||
|
|
||||||
|
public static final FieldType TYPE = new FieldType();
|
||||||
|
static {
|
||||||
|
TYPE.setDocValueType(FieldInfo.DocValuesType.BINARY);
|
||||||
|
TYPE.freeze();
|
||||||
|
}
|
||||||
|
|
||||||
|
private final FloatArrayList values;
|
||||||
|
|
||||||
|
public CustomFloatNumericDocValuesField(String name, float value) {
|
||||||
|
super(name);
|
||||||
|
values = new FloatArrayList();
|
||||||
|
add(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void add(float value) {
|
||||||
|
values.add(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public BytesRef binaryValue() {
|
||||||
|
CollectionUtils.sortAndDedup(values);
|
||||||
|
|
||||||
|
final byte[] bytes = new byte[values.size() * 4];
|
||||||
|
for (int i = 0; i < values.size(); ++i) {
|
||||||
|
ByteUtils.writeFloatLE(values.get(i), bytes, i * 4);
|
||||||
|
}
|
||||||
|
return new BytesRef(bytes);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,7 +23,6 @@ import org.apache.lucene.analysis.Analyzer;
|
||||||
import org.apache.lucene.analysis.TokenStream;
|
import org.apache.lucene.analysis.TokenStream;
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
import org.apache.lucene.document.FieldType;
|
import org.apache.lucene.document.FieldType;
|
||||||
import org.apache.lucene.document.SortedSetDocValuesField;
|
|
||||||
import org.apache.lucene.search.Filter;
|
import org.apache.lucene.search.Filter;
|
||||||
import org.apache.lucene.search.NumericRangeFilter;
|
import org.apache.lucene.search.NumericRangeFilter;
|
||||||
import org.apache.lucene.search.NumericRangeQuery;
|
import org.apache.lucene.search.NumericRangeQuery;
|
||||||
|
@ -313,19 +312,17 @@ public class IntegerFieldMapper extends NumberFieldMapper<Integer> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
addIntegerFields(fields, value, boost);
|
addIntegerFields(context, fields, value, boost);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected void addIntegerFields(List<Field> fields, int value, float boost) {
|
protected void addIntegerFields(ParseContext context, List<Field> fields, int value, float boost) {
|
||||||
if (fieldType.indexed() || fieldType.stored()) {
|
if (fieldType.indexed() || fieldType.stored()) {
|
||||||
CustomIntegerNumericField field = new CustomIntegerNumericField(this, value, fieldType);
|
CustomIntegerNumericField field = new CustomIntegerNumericField(this, value, fieldType);
|
||||||
field.setBoost(boost);
|
field.setBoost(boost);
|
||||||
fields.add(field);
|
fields.add(field);
|
||||||
}
|
}
|
||||||
if (hasDocValues()) {
|
if (hasDocValues()) {
|
||||||
final BytesRef bytes = new BytesRef();
|
addDocValue(context, value);
|
||||||
NumericUtils.intToPrefixCoded(value, 0, bytes);
|
|
||||||
fields.add(new SortedSetDocValuesField(names.indexName(), bytes));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -308,7 +308,7 @@ public class LongFieldMapper extends NumberFieldMapper<Long> {
|
||||||
fields.add(field);
|
fields.add(field);
|
||||||
}
|
}
|
||||||
if (hasDocValues()) {
|
if (hasDocValues()) {
|
||||||
fields.add(toDocValues(value));
|
addDocValue(context, value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -20,19 +20,27 @@
|
||||||
package org.elasticsearch.index.mapper.core;
|
package org.elasticsearch.index.mapper.core;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.DoubleOpenHashSet;
|
import com.carrotsearch.hppc.DoubleOpenHashSet;
|
||||||
|
import com.carrotsearch.hppc.LongArrayList;
|
||||||
import com.carrotsearch.hppc.LongOpenHashSet;
|
import com.carrotsearch.hppc.LongOpenHashSet;
|
||||||
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
import org.apache.lucene.analysis.NumericTokenStream;
|
import org.apache.lucene.analysis.NumericTokenStream;
|
||||||
|
import org.apache.lucene.analysis.TokenStream;
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
import org.apache.lucene.document.FieldType;
|
import org.apache.lucene.document.FieldType;
|
||||||
import org.apache.lucene.document.SortedSetDocValuesField;
|
import org.apache.lucene.index.FieldInfo;
|
||||||
import org.apache.lucene.index.FieldInfo.IndexOptions;
|
import org.apache.lucene.index.FieldInfo.IndexOptions;
|
||||||
|
import org.apache.lucene.index.IndexableField;
|
||||||
|
import org.apache.lucene.index.IndexableFieldType;
|
||||||
import org.apache.lucene.search.Filter;
|
import org.apache.lucene.search.Filter;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
|
import org.apache.lucene.store.ByteArrayDataOutput;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.apache.lucene.util.NumericUtils;
|
import org.apache.lucene.util.NumericUtils;
|
||||||
import org.elasticsearch.common.Explicit;
|
import org.elasticsearch.common.Explicit;
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.common.util.ByteUtils;
|
||||||
|
import org.elasticsearch.common.util.CollectionUtils;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||||
import org.elasticsearch.index.codec.docvaluesformat.DocValuesFormatProvider;
|
import org.elasticsearch.index.codec.docvaluesformat.DocValuesFormatProvider;
|
||||||
|
@ -186,40 +194,18 @@ public abstract class NumberFieldMapper<T extends Number> extends AbstractFieldM
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Utility method to convert a long to a doc values field using {@link NumericUtils} encoding.
|
|
||||||
*/
|
|
||||||
protected final Field toDocValues(long l) {
|
|
||||||
final BytesRef bytes = new BytesRef();
|
|
||||||
NumericUtils.longToPrefixCoded(l, 0, bytes);
|
|
||||||
return new SortedSetDocValuesField(names().indexName(), bytes);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Utility method to convert an int to a doc values field using {@link NumericUtils} encoding.
|
|
||||||
*/
|
|
||||||
protected final Field toDocValues(int i) {
|
|
||||||
final BytesRef bytes = new BytesRef();
|
|
||||||
NumericUtils.intToPrefixCoded(i, 0, bytes);
|
|
||||||
return new SortedSetDocValuesField(names().indexName(), bytes);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Utility method to convert a float to a doc values field using {@link NumericUtils} encoding.
|
|
||||||
*/
|
|
||||||
protected final Field toDocValues(float f) {
|
|
||||||
return toDocValues(NumericUtils.floatToSortableInt(f));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Utility method to convert a double to a doc values field using {@link NumericUtils} encoding.
|
|
||||||
*/
|
|
||||||
protected final Field toDocValues(double d) {
|
|
||||||
return toDocValues(NumericUtils.doubleToSortableLong(d));
|
|
||||||
}
|
|
||||||
|
|
||||||
protected abstract void innerParseCreateField(ParseContext context, List<Field> fields) throws IOException;
|
protected abstract void innerParseCreateField(ParseContext context, List<Field> fields) throws IOException;
|
||||||
|
|
||||||
|
protected final void addDocValue(ParseContext context, long value) {
|
||||||
|
CustomLongNumericDocValuesField field = (CustomLongNumericDocValuesField) context.doc().getByKey(names().indexName());
|
||||||
|
if (field != null) {
|
||||||
|
field.add(value);
|
||||||
|
} else {
|
||||||
|
field = new CustomLongNumericDocValuesField(names().indexName(), value);
|
||||||
|
context.doc().addWithKey(names().indexName(), field);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Use the field query created here when matching on numbers.
|
* Use the field query created here when matching on numbers.
|
||||||
*/
|
*/
|
||||||
|
@ -386,6 +372,96 @@ public abstract class NumberFieldMapper<T extends Number> extends AbstractFieldM
|
||||||
public abstract String numericAsString();
|
public abstract String numericAsString();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static abstract class CustomNumericDocValuesField implements IndexableField {
|
||||||
|
|
||||||
|
public static final FieldType TYPE = new FieldType();
|
||||||
|
static {
|
||||||
|
TYPE.setDocValueType(FieldInfo.DocValuesType.BINARY);
|
||||||
|
TYPE.freeze();
|
||||||
|
}
|
||||||
|
|
||||||
|
private final String name;
|
||||||
|
|
||||||
|
public CustomNumericDocValuesField(String name) {
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String name() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public IndexableFieldType fieldType() {
|
||||||
|
return TYPE;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public float boost() {
|
||||||
|
return 1f;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String stringValue() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Reader readerValue() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Number numericValue() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public TokenStream tokenStream(Analyzer analyzer) throws IOException {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class CustomLongNumericDocValuesField extends CustomNumericDocValuesField {
|
||||||
|
|
||||||
|
public static final FieldType TYPE = new FieldType();
|
||||||
|
static {
|
||||||
|
TYPE.setDocValueType(FieldInfo.DocValuesType.BINARY);
|
||||||
|
TYPE.freeze();
|
||||||
|
}
|
||||||
|
|
||||||
|
private final LongArrayList values;
|
||||||
|
|
||||||
|
public CustomLongNumericDocValuesField(String name, long value) {
|
||||||
|
super(name);
|
||||||
|
values = new LongArrayList();
|
||||||
|
add(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void add(long value) {
|
||||||
|
values.add(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public BytesRef binaryValue() {
|
||||||
|
CollectionUtils.sortAndDedup(values);
|
||||||
|
|
||||||
|
// here is the trick:
|
||||||
|
// - the first value is zig-zag encoded so that eg. -5 would become positive and would be better compressed by vLong
|
||||||
|
// - for other values, we only encode deltas using vLong
|
||||||
|
final byte[] bytes = new byte[values.size() * ByteUtils.MAX_BYTES_VLONG];
|
||||||
|
final ByteArrayDataOutput out = new ByteArrayDataOutput(bytes);
|
||||||
|
ByteUtils.writeVLong(out, ByteUtils.zigZagEncode(values.get(0)));
|
||||||
|
for (int i = 1; i < values.size(); ++i) {
|
||||||
|
final long delta = values.get(i) - values.get(i - 1);
|
||||||
|
ByteUtils.writeVLong(out, delta);
|
||||||
|
}
|
||||||
|
return new BytesRef(bytes, 0, out.getPosition());
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
|
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
|
||||||
super.doXContentBody(builder, includeDefaults, params);
|
super.doXContentBody(builder, includeDefaults, params);
|
||||||
|
|
|
@ -323,7 +323,7 @@ public class ShortFieldMapper extends NumberFieldMapper<Short> {
|
||||||
fields.add(field);
|
fields.add(field);
|
||||||
}
|
}
|
||||||
if (hasDocValues()) {
|
if (hasDocValues()) {
|
||||||
fields.add(toDocValues((int) value));
|
addDocValue(context, value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -136,7 +136,7 @@ public class TokenCountFieldMapper extends IntegerFieldMapper {
|
||||||
} else {
|
} else {
|
||||||
count = countPositions(analyzer.analyzer().tokenStream(name(), valueAndBoost.value()));
|
count = countPositions(analyzer.analyzer().tokenStream(name(), valueAndBoost.value()));
|
||||||
}
|
}
|
||||||
addIntegerFields(fields, count, valueAndBoost.boost());
|
addIntegerFields(context, fields, count, valueAndBoost.boost());
|
||||||
}
|
}
|
||||||
if (fields.isEmpty()) {
|
if (fields.isEmpty()) {
|
||||||
context.ignoredValue(names.indexName(), valueAndBoost.value());
|
context.ignoredValue(names.indexName(), valueAndBoost.value());
|
||||||
|
|
|
@ -20,7 +20,6 @@
|
||||||
package org.elasticsearch.index.mapper.internal;
|
package org.elasticsearch.index.mapper.internal;
|
||||||
|
|
||||||
import org.apache.lucene.document.BinaryDocValuesField;
|
import org.apache.lucene.document.BinaryDocValuesField;
|
||||||
import org.apache.lucene.document.Document;
|
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
import org.apache.lucene.document.FieldType;
|
import org.apache.lucene.document.FieldType;
|
||||||
import org.apache.lucene.index.FieldInfo;
|
import org.apache.lucene.index.FieldInfo;
|
||||||
|
@ -38,6 +37,7 @@ import org.elasticsearch.index.codec.postingsformat.PostingsFormatProvider;
|
||||||
import org.elasticsearch.index.codec.postingsformat.PostingsFormatService;
|
import org.elasticsearch.index.codec.postingsformat.PostingsFormatService;
|
||||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||||
import org.elasticsearch.index.mapper.*;
|
import org.elasticsearch.index.mapper.*;
|
||||||
|
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||||
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
|
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
|
@ -19,7 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.mapper.internal;
|
package org.elasticsearch.index.mapper.internal;
|
||||||
|
|
||||||
import org.apache.lucene.document.Document;
|
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
import org.apache.lucene.document.FieldType;
|
import org.apache.lucene.document.FieldType;
|
||||||
import org.apache.lucene.document.NumericDocValuesField;
|
import org.apache.lucene.document.NumericDocValuesField;
|
||||||
|
@ -30,6 +29,7 @@ import org.elasticsearch.index.codec.docvaluesformat.DocValuesFormatProvider;
|
||||||
import org.elasticsearch.index.codec.docvaluesformat.DocValuesFormatService;
|
import org.elasticsearch.index.codec.docvaluesformat.DocValuesFormatService;
|
||||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||||
import org.elasticsearch.index.mapper.*;
|
import org.elasticsearch.index.mapper.*;
|
||||||
|
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||||
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
|
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
|
@ -300,7 +300,7 @@ public class IpFieldMapper extends NumberFieldMapper<Long> {
|
||||||
fields.add(field);
|
fields.add(field);
|
||||||
}
|
}
|
||||||
if (hasDocValues()) {
|
if (hasDocValues()) {
|
||||||
fields.add(toDocValues(value));
|
addDocValue(context, value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -20,7 +20,6 @@
|
||||||
package org.elasticsearch.index.mapper.object;
|
package org.elasticsearch.index.mapper.object;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||||
import org.apache.lucene.document.Document;
|
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
import org.apache.lucene.index.IndexableField;
|
import org.apache.lucene.index.IndexableField;
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
|
@ -35,6 +34,7 @@ import org.elasticsearch.common.xcontent.ToXContent;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.index.mapper.*;
|
import org.elasticsearch.index.mapper.*;
|
||||||
|
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||||
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
|
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
|
||||||
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
|
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
|
||||||
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
||||||
|
|
|
@ -25,6 +25,7 @@ import org.apache.lucene.util.BytesRef;
|
||||||
import org.apache.lucene.util.BytesRefHash;
|
import org.apache.lucene.util.BytesRefHash;
|
||||||
import org.apache.lucene.util.InPlaceMergeSorter;
|
import org.apache.lucene.util.InPlaceMergeSorter;
|
||||||
import org.elasticsearch.common.lucene.ReaderContextAware;
|
import org.elasticsearch.common.lucene.ReaderContextAware;
|
||||||
|
import org.elasticsearch.common.util.CollectionUtils;
|
||||||
import org.elasticsearch.index.fielddata.*;
|
import org.elasticsearch.index.fielddata.*;
|
||||||
import org.elasticsearch.index.fielddata.AtomicFieldData.Order;
|
import org.elasticsearch.index.fielddata.AtomicFieldData.Order;
|
||||||
import org.elasticsearch.script.SearchScript;
|
import org.elasticsearch.script.SearchScript;
|
||||||
|
@ -642,22 +643,12 @@ public abstract class FieldDataSource {
|
||||||
@Override
|
@Override
|
||||||
public int setDocument(int docId) {
|
public int setDocument(int docId) {
|
||||||
final int numValues = super.setDocument(docId);
|
final int numValues = super.setDocument(docId);
|
||||||
if (numValues == 0) {
|
|
||||||
return numUniqueValues = 0;
|
|
||||||
}
|
|
||||||
array = ArrayUtil.grow(array, numValues);
|
array = ArrayUtil.grow(array, numValues);
|
||||||
for (int i = 0; i < numValues; ++i) {
|
for (int i = 0; i < numValues; ++i) {
|
||||||
array[i] = super.nextValue();
|
array[i] = super.nextValue();
|
||||||
}
|
}
|
||||||
sorter.sort(0, numValues);
|
|
||||||
numUniqueValues = 1;
|
|
||||||
for (int i = 1; i < numValues; ++i) {
|
|
||||||
if (array[i] != array[i-1]) {
|
|
||||||
array[numUniqueValues++] = array[i];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pos = 0;
|
pos = 0;
|
||||||
return numUniqueValues;
|
return numUniqueValues = CollectionUtils.sortAndDedup(array, numValues);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -699,22 +690,12 @@ public abstract class FieldDataSource {
|
||||||
@Override
|
@Override
|
||||||
public int setDocument(int docId) {
|
public int setDocument(int docId) {
|
||||||
final int numValues = super.setDocument(docId);
|
final int numValues = super.setDocument(docId);
|
||||||
if (numValues == 0) {
|
|
||||||
return numUniqueValues = 0;
|
|
||||||
}
|
|
||||||
array = ArrayUtil.grow(array, numValues);
|
array = ArrayUtil.grow(array, numValues);
|
||||||
for (int i = 0; i < numValues; ++i) {
|
for (int i = 0; i < numValues; ++i) {
|
||||||
array[i] = super.nextValue();
|
array[i] = super.nextValue();
|
||||||
}
|
}
|
||||||
sorter.sort(0, numValues);
|
|
||||||
numUniqueValues = 1;
|
|
||||||
for (int i = 1; i < numValues; ++i) {
|
|
||||||
if (array[i] != array[i-1]) {
|
|
||||||
array[numUniqueValues++] = array[i];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pos = 0;
|
pos = 0;
|
||||||
return numUniqueValues;
|
return numUniqueValues = CollectionUtils.sortAndDedup(array, numValues);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -0,0 +1,109 @@
|
||||||
|
/*
|
||||||
|
* Licensed to ElasticSearch and Shay Banon under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. ElasticSearch licenses this
|
||||||
|
* file to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.common.util;
|
||||||
|
|
||||||
|
import org.apache.lucene.store.ByteArrayDataInput;
|
||||||
|
import org.apache.lucene.store.ByteArrayDataOutput;
|
||||||
|
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
public class ByteUtilsTests extends ElasticsearchTestCase {
|
||||||
|
|
||||||
|
public void testZigZag(long l) {
|
||||||
|
assertEquals(l, ByteUtils.zigZagDecode(ByteUtils.zigZagEncode(l)));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testZigZag() {
|
||||||
|
testZigZag(0);
|
||||||
|
testZigZag(1);
|
||||||
|
testZigZag(-1);
|
||||||
|
testZigZag(Long.MAX_VALUE);
|
||||||
|
testZigZag(Long.MIN_VALUE);
|
||||||
|
for (int i = 0; i < 1000; ++i) {
|
||||||
|
testZigZag(randomLong());
|
||||||
|
assertTrue(ByteUtils.zigZagEncode(randomInt(1000)) >= 0);
|
||||||
|
assertTrue(ByteUtils.zigZagEncode(-randomInt(1000)) >= 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testFloat() throws IOException {
|
||||||
|
final float[] data = new float[atLeast(1000)];
|
||||||
|
final byte[] encoded = new byte[data.length * 4];
|
||||||
|
for (int i = 0; i < data.length; ++i) {
|
||||||
|
data[i] = randomFloat();
|
||||||
|
ByteUtils.writeFloatLE(data[i], encoded, i * 4);
|
||||||
|
}
|
||||||
|
for (int i = 0; i < data.length; ++i) {
|
||||||
|
assertEquals(data[i], ByteUtils.readFloatLE(encoded, i * 4), Float.MIN_VALUE);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDouble() throws IOException {
|
||||||
|
final double[] data = new double[atLeast(1000)];
|
||||||
|
final byte[] encoded = new byte[data.length * 8];
|
||||||
|
for (int i = 0; i < data.length; ++i) {
|
||||||
|
data[i] = randomDouble();
|
||||||
|
ByteUtils.writeDoubleLE(data[i], encoded, i * 8);
|
||||||
|
}
|
||||||
|
for (int i = 0; i < data.length; ++i) {
|
||||||
|
assertEquals(data[i], ByteUtils.readDoubleLE(encoded, i * 8), Double.MIN_VALUE);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testVLong() throws IOException {
|
||||||
|
final long[] data = new long[atLeast(1000)];
|
||||||
|
for (int i = 0; i < data.length; ++i) {
|
||||||
|
switch (randomInt(4)) {
|
||||||
|
case 0:
|
||||||
|
data[i] = 0;
|
||||||
|
break;
|
||||||
|
case 1:
|
||||||
|
data[i] = Long.MAX_VALUE;
|
||||||
|
break;
|
||||||
|
case 2:
|
||||||
|
data[i] = Long.MIN_VALUE;
|
||||||
|
break;
|
||||||
|
case 3:
|
||||||
|
data[i] = randomInt(1 << randomIntBetween(2,30));
|
||||||
|
break;
|
||||||
|
case 4:
|
||||||
|
data[i] = randomLong();
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
throw new AssertionError();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
final byte[] encoded = new byte[ByteUtils.MAX_BYTES_VLONG * data.length];
|
||||||
|
ByteArrayDataOutput out = new ByteArrayDataOutput(encoded);
|
||||||
|
for (int i = 0; i < data.length; ++i) {
|
||||||
|
final int pos = out.getPosition();
|
||||||
|
ByteUtils.writeVLong(out, data[i]);
|
||||||
|
if (data[i] < 0) {
|
||||||
|
assertEquals(ByteUtils.MAX_BYTES_VLONG, out.getPosition() - pos);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
final ByteArrayDataInput in = new ByteArrayDataInput(encoded);
|
||||||
|
for (int i = 0; i < data.length; ++i) {
|
||||||
|
assertEquals(data[i], ByteUtils.readVLong(in));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -20,7 +20,6 @@
|
||||||
package org.elasticsearch.index.engine.robin;
|
package org.elasticsearch.index.engine.robin;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.Analyzer;
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
import org.apache.lucene.document.Document;
|
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
import org.apache.lucene.document.NumericDocValuesField;
|
import org.apache.lucene.document.NumericDocValuesField;
|
||||||
import org.apache.lucene.document.TextField;
|
import org.apache.lucene.document.TextField;
|
||||||
|
@ -44,6 +43,7 @@ import org.elasticsearch.index.deletionpolicy.SnapshotIndexCommitExistsMatcher;
|
||||||
import org.elasticsearch.index.engine.*;
|
import org.elasticsearch.index.engine.*;
|
||||||
import org.elasticsearch.index.indexing.ShardIndexingService;
|
import org.elasticsearch.index.indexing.ShardIndexingService;
|
||||||
import org.elasticsearch.index.indexing.slowlog.ShardSlowLogIndexingService;
|
import org.elasticsearch.index.indexing.slowlog.ShardSlowLogIndexingService;
|
||||||
|
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||||
import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
|
import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
|
||||||
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
||||||
|
|
|
@ -19,19 +19,26 @@
|
||||||
package org.elasticsearch.index.fielddata;
|
package org.elasticsearch.index.fielddata;
|
||||||
|
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
import org.apache.lucene.document.*;
|
import org.apache.lucene.document.Document;
|
||||||
|
import org.apache.lucene.document.Field;
|
||||||
|
import org.apache.lucene.document.SortedSetDocValuesField;
|
||||||
|
import org.apache.lucene.document.StringField;
|
||||||
import org.apache.lucene.index.AtomicReaderContext;
|
import org.apache.lucene.index.AtomicReaderContext;
|
||||||
import org.apache.lucene.index.CompositeReaderContext;
|
import org.apache.lucene.index.CompositeReaderContext;
|
||||||
import org.apache.lucene.index.DirectoryReader;
|
import org.apache.lucene.index.DirectoryReader;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.apache.lucene.util.English;
|
import org.apache.lucene.util.English;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.NumericUtils;
|
|
||||||
import org.elasticsearch.common.geo.GeoDistance;
|
import org.elasticsearch.common.geo.GeoDistance;
|
||||||
import org.elasticsearch.common.geo.GeoPoint;
|
import org.elasticsearch.common.geo.GeoPoint;
|
||||||
import org.elasticsearch.common.settings.ImmutableSettings;
|
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||||
import org.elasticsearch.common.unit.DistanceUnit;
|
import org.elasticsearch.common.unit.DistanceUnit;
|
||||||
import org.elasticsearch.common.unit.DistanceUnit.Distance;
|
import org.elasticsearch.common.unit.DistanceUnit.Distance;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
|
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||||
|
import org.elasticsearch.index.mapper.MapperTestUtils;
|
||||||
|
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
|
@ -52,49 +59,35 @@ public class DuelFieldDataTests extends AbstractFieldDataTests {
|
||||||
return min + random.nextInt(max - min);
|
return min + random.nextInt(max - min);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static SortedSetDocValuesField longDV(String name, long l) {
|
|
||||||
final BytesRef bytes = new BytesRef();
|
|
||||||
NumericUtils.longToPrefixCodedBytes(l, 0, bytes);
|
|
||||||
return new SortedSetDocValuesField(name, bytes);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static SortedSetDocValuesField intDV(String name, int i) {
|
|
||||||
final BytesRef bytes = new BytesRef();
|
|
||||||
NumericUtils.intToPrefixCodedBytes(i, 0, bytes);
|
|
||||||
return new SortedSetDocValuesField(name, bytes);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static SortedSetDocValuesField floatDV(String name, float f) {
|
|
||||||
return intDV(name, NumericUtils.floatToSortableInt(f));
|
|
||||||
}
|
|
||||||
|
|
||||||
private static SortedSetDocValuesField doubleDV(String name, double f) {
|
|
||||||
return longDV(name, NumericUtils.doubleToSortableLong(f));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testDuelAllTypesSingleValue() throws Exception {
|
public void testDuelAllTypesSingleValue() throws Exception {
|
||||||
|
final String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties")
|
||||||
|
.startObject("bytes").field("type", "string").field("index", "not_analyzed").startObject("fielddata").field("format", LuceneTestCase.defaultCodecSupportsSortedSet() ? "doc_values" : "fst").endObject().endObject()
|
||||||
|
.startObject("byte").field("type", "byte").startObject("fielddata").field("format", "doc_values").endObject().endObject()
|
||||||
|
.startObject("short").field("type", "short").startObject("fielddata").field("format", "doc_values").endObject().endObject()
|
||||||
|
.startObject("integer").field("type", "integer").startObject("fielddata").field("format", "doc_values").endObject().endObject()
|
||||||
|
.startObject("long").field("type", "long").startObject("fielddata").field("format", "doc_values").endObject().endObject()
|
||||||
|
.startObject("float").field("type", "float").startObject("fielddata").field("format", "doc_values").endObject().endObject()
|
||||||
|
.startObject("double").field("type", "double").startObject("fielddata").field("format", "doc_values").endObject().endObject()
|
||||||
|
.endObject().endObject().endObject().string();
|
||||||
|
final DocumentMapper mapper = MapperTestUtils.newParser().parse(mapping);
|
||||||
Random random = getRandom();
|
Random random = getRandom();
|
||||||
int atLeast = atLeast(random, 1000);
|
int atLeast = atLeast(random, 1000);
|
||||||
for (int i = 0; i < atLeast; i++) {
|
for (int i = 0; i < atLeast; i++) {
|
||||||
int v = (random.nextBoolean() ? -1 * random.nextInt(Byte.MAX_VALUE) : random.nextInt(Byte.MAX_VALUE));
|
String s = Integer.toString(randomByte());
|
||||||
Document d = new Document();
|
|
||||||
d.add(new StringField("_id", "" + i, Field.Store.NO));
|
XContentBuilder doc = XContentFactory.jsonBuilder().startObject();
|
||||||
if (random.nextInt(15) != 0) {
|
for (String fieldName : Arrays.asList("bytes", "byte", "short", "integer", "long", "float", "double")) {
|
||||||
d.add(new LongField("long", v, Field.Store.NO ));
|
doc = doc.field(fieldName, s);
|
||||||
d.add(new IntField("integer", v, Field.Store.NO));
|
|
||||||
d.add(new DoubleField("double", v, Field.Store.NO));
|
|
||||||
d.add(new FloatField("float", v, Field.Store.NO));
|
|
||||||
d.add(new StringField("bytes","" + v, Field.Store.NO));
|
|
||||||
if (LuceneTestCase.defaultCodecSupportsSortedSet()) {
|
|
||||||
d.add(longDV("long", v));
|
|
||||||
d.add(intDV("integer", v));
|
|
||||||
d.add(doubleDV("double", v));
|
|
||||||
d.add(floatDV("float", v));
|
|
||||||
d.add(new SortedSetDocValuesField("bytes", new BytesRef("" + v)));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
writer.addDocument(d);
|
|
||||||
|
doc = doc.endObject();
|
||||||
|
|
||||||
|
final ParsedDocument d = mapper.parse("type", Integer.toString(i), doc.bytes());
|
||||||
|
|
||||||
|
writer.addDocument(d.rootDoc());
|
||||||
|
|
||||||
if (random.nextInt(10) == 0) {
|
if (random.nextInt(10) == 0) {
|
||||||
refreshReader();
|
refreshReader();
|
||||||
}
|
}
|
||||||
|
@ -109,14 +102,14 @@ public class DuelFieldDataTests extends AbstractFieldDataTests {
|
||||||
typeMap.put(new FieldDataType("long", ImmutableSettings.builder().put("format", "array")), Type.Long);
|
typeMap.put(new FieldDataType("long", ImmutableSettings.builder().put("format", "array")), Type.Long);
|
||||||
typeMap.put(new FieldDataType("double", ImmutableSettings.builder().put("format", "array")), Type.Double);
|
typeMap.put(new FieldDataType("double", ImmutableSettings.builder().put("format", "array")), Type.Double);
|
||||||
typeMap.put(new FieldDataType("float", ImmutableSettings.builder().put("format", "array")), Type.Float);
|
typeMap.put(new FieldDataType("float", ImmutableSettings.builder().put("format", "array")), Type.Float);
|
||||||
|
typeMap.put(new FieldDataType("byte", ImmutableSettings.builder().put("format", "doc_values")), Type.Integer);
|
||||||
|
typeMap.put(new FieldDataType("short", ImmutableSettings.builder().put("format", "doc_values")), Type.Integer);
|
||||||
|
typeMap.put(new FieldDataType("int", ImmutableSettings.builder().put("format", "doc_values")), Type.Integer);
|
||||||
|
typeMap.put(new FieldDataType("long", ImmutableSettings.builder().put("format", "doc_values")), Type.Long);
|
||||||
|
typeMap.put(new FieldDataType("double", ImmutableSettings.builder().put("format", "doc_values")), Type.Double);
|
||||||
|
typeMap.put(new FieldDataType("float", ImmutableSettings.builder().put("format", "doc_values")), Type.Float);
|
||||||
if (LuceneTestCase.defaultCodecSupportsSortedSet()) {
|
if (LuceneTestCase.defaultCodecSupportsSortedSet()) {
|
||||||
typeMap.put(new FieldDataType("string", ImmutableSettings.builder().put("format", "doc_values")), Type.Bytes);
|
typeMap.put(new FieldDataType("string", ImmutableSettings.builder().put("format", "doc_values")), Type.Bytes);
|
||||||
typeMap.put(new FieldDataType("byte", ImmutableSettings.builder().put("format", "doc_values")), Type.Integer);
|
|
||||||
typeMap.put(new FieldDataType("short", ImmutableSettings.builder().put("format", "doc_values")), Type.Integer);
|
|
||||||
typeMap.put(new FieldDataType("int", ImmutableSettings.builder().put("format", "doc_values")), Type.Integer);
|
|
||||||
typeMap.put(new FieldDataType("long", ImmutableSettings.builder().put("format", "doc_values")), Type.Long);
|
|
||||||
typeMap.put(new FieldDataType("double", ImmutableSettings.builder().put("format", "doc_values")), Type.Double);
|
|
||||||
typeMap.put(new FieldDataType("float", ImmutableSettings.builder().put("format", "doc_values")), Type.Float);
|
|
||||||
}
|
}
|
||||||
ArrayList<Entry<FieldDataType, Type>> list = new ArrayList<Entry<FieldDataType, Type>>(typeMap.entrySet());
|
ArrayList<Entry<FieldDataType, Type>> list = new ArrayList<Entry<FieldDataType, Type>>(typeMap.entrySet());
|
||||||
Preprocessor pre = new ToDoublePreprocessor();
|
Preprocessor pre = new ToDoublePreprocessor();
|
||||||
|
@ -149,23 +142,42 @@ public class DuelFieldDataTests extends AbstractFieldDataTests {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testDuelIntegers() throws Exception {
|
public void testDuelIntegers() throws Exception {
|
||||||
|
final String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties")
|
||||||
|
.startObject("byte").field("type", "byte").startObject("fielddata").field("format", "doc_values").endObject().endObject()
|
||||||
|
.startObject("short").field("type", "short").startObject("fielddata").field("format", "doc_values").endObject().endObject()
|
||||||
|
.startObject("integer").field("type", "integer").startObject("fielddata").field("format", "doc_values").endObject().endObject()
|
||||||
|
.startObject("long").field("type", "long").startObject("fielddata").field("format", "doc_values").endObject().endObject()
|
||||||
|
.endObject().endObject().endObject().string();
|
||||||
|
|
||||||
|
final DocumentMapper mapper = MapperTestUtils.newParser().parse(mapping);
|
||||||
Random random = getRandom();
|
Random random = getRandom();
|
||||||
int atLeast = atLeast(random, 1000);
|
int atLeast = atLeast(random, 1000);
|
||||||
|
final int maxNumValues = randomBoolean() ? 1 : randomIntBetween(2, 40);
|
||||||
|
byte[] values = new byte[maxNumValues];
|
||||||
for (int i = 0; i < atLeast; i++) {
|
for (int i = 0; i < atLeast; i++) {
|
||||||
Document d = new Document();
|
final int numValues = randomInt(maxNumValues);
|
||||||
d.add(new StringField("_id", "" + i, Field.Store.NO));
|
for (int j = 0; j < numValues; ++j) {
|
||||||
if (random.nextInt(15) != 0) {
|
if (randomBoolean()) {
|
||||||
int[] numbers = getNumbers(random, Byte.MAX_VALUE);
|
values[j] = 1; // test deduplication
|
||||||
for (int j : numbers) {
|
} else {
|
||||||
d.add(new LongField("long", j, Field.Store.NO ));
|
values[j] = randomByte();
|
||||||
d.add(new IntField("integer", j, Field.Store.NO));
|
|
||||||
if (LuceneTestCase.defaultCodecSupportsSortedSet()) {
|
|
||||||
d.add(longDV("long", j));
|
|
||||||
d.add(intDV("integer", j));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
writer.addDocument(d);
|
|
||||||
|
XContentBuilder doc = XContentFactory.jsonBuilder().startObject();
|
||||||
|
for (String fieldName : Arrays.asList("byte", "short", "integer", "long")) {
|
||||||
|
doc = doc.startArray(fieldName);
|
||||||
|
for (int j = 0; j < numValues; ++j) {
|
||||||
|
doc = doc.value(values[j]);
|
||||||
|
}
|
||||||
|
doc = doc.endArray();
|
||||||
|
}
|
||||||
|
doc = doc.endObject();
|
||||||
|
|
||||||
|
final ParsedDocument d = mapper.parse("type", Integer.toString(i), doc.bytes());
|
||||||
|
|
||||||
|
writer.addDocument(d.rootDoc());
|
||||||
if (random.nextInt(10) == 0) {
|
if (random.nextInt(10) == 0) {
|
||||||
refreshReader();
|
refreshReader();
|
||||||
}
|
}
|
||||||
|
@ -176,12 +188,10 @@ public class DuelFieldDataTests extends AbstractFieldDataTests {
|
||||||
typeMap.put(new FieldDataType("short", ImmutableSettings.builder().put("format", "array")), Type.Integer);
|
typeMap.put(new FieldDataType("short", ImmutableSettings.builder().put("format", "array")), Type.Integer);
|
||||||
typeMap.put(new FieldDataType("int", ImmutableSettings.builder().put("format", "array")), Type.Integer);
|
typeMap.put(new FieldDataType("int", ImmutableSettings.builder().put("format", "array")), Type.Integer);
|
||||||
typeMap.put(new FieldDataType("long", ImmutableSettings.builder().put("format", "array")), Type.Long);
|
typeMap.put(new FieldDataType("long", ImmutableSettings.builder().put("format", "array")), Type.Long);
|
||||||
if (LuceneTestCase.defaultCodecSupportsSortedSet()) {
|
typeMap.put(new FieldDataType("byte", ImmutableSettings.builder().put("format", "doc_values")), Type.Integer);
|
||||||
typeMap.put(new FieldDataType("byte", ImmutableSettings.builder().put("format", "doc_values")), Type.Integer);
|
typeMap.put(new FieldDataType("short", ImmutableSettings.builder().put("format", "doc_values")), Type.Integer);
|
||||||
typeMap.put(new FieldDataType("short", ImmutableSettings.builder().put("format", "doc_values")), Type.Integer);
|
typeMap.put(new FieldDataType("int", ImmutableSettings.builder().put("format", "doc_values")), Type.Integer);
|
||||||
typeMap.put(new FieldDataType("int", ImmutableSettings.builder().put("format", "doc_values")), Type.Integer);
|
typeMap.put(new FieldDataType("long", ImmutableSettings.builder().put("format", "doc_values")), Type.Long);
|
||||||
typeMap.put(new FieldDataType("long", ImmutableSettings.builder().put("format", "doc_values")), Type.Long);
|
|
||||||
}
|
|
||||||
ArrayList<Entry<FieldDataType, Type>> list = new ArrayList<Entry<FieldDataType, Type>>(typeMap.entrySet());
|
ArrayList<Entry<FieldDataType, Type>> list = new ArrayList<Entry<FieldDataType, Type>>(typeMap.entrySet());
|
||||||
while (!list.isEmpty()) {
|
while (!list.isEmpty()) {
|
||||||
Entry<FieldDataType, Type> left;
|
Entry<FieldDataType, Type> left;
|
||||||
|
@ -212,23 +222,41 @@ public class DuelFieldDataTests extends AbstractFieldDataTests {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testDuelDoubles() throws Exception {
|
public void testDuelDoubles() throws Exception {
|
||||||
|
final String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties")
|
||||||
|
.startObject("float").field("type", "float").startObject("fielddata").field("format", "doc_values").endObject().endObject()
|
||||||
|
.startObject("double").field("type", "double").startObject("fielddata").field("format", "doc_values").endObject().endObject()
|
||||||
|
.endObject().endObject().endObject().string();
|
||||||
|
|
||||||
|
final DocumentMapper mapper = MapperTestUtils.newParser().parse(mapping);
|
||||||
Random random = getRandom();
|
Random random = getRandom();
|
||||||
int atLeast = atLeast(random, 1000);
|
int atLeast = atLeast(random, 1000);
|
||||||
|
final int maxNumValues = randomBoolean() ? 1 : randomIntBetween(2, 40);
|
||||||
|
float[] values = new float[maxNumValues];
|
||||||
for (int i = 0; i < atLeast; i++) {
|
for (int i = 0; i < atLeast; i++) {
|
||||||
Document d = new Document();
|
final int numValues = randomInt(maxNumValues);
|
||||||
d.add(new StringField("_id", "" + i, Field.Store.NO));
|
float def = randomBoolean() ? randomFloat() : Float.NaN;
|
||||||
if (random.nextInt(15) != 0) {
|
for (int j = 0; j < numValues; ++j) {
|
||||||
int[] numbers = getNumbers(random, Short.MAX_VALUE);
|
if (randomBoolean()) {
|
||||||
for (int j : numbers) {
|
values[j] = def;
|
||||||
d.add(new FloatField("float", j, Field.Store.NO ));
|
} else {
|
||||||
d.add(new DoubleField("double", j, Field.Store.NO));
|
values[j] = randomFloat();
|
||||||
if (LuceneTestCase.defaultCodecSupportsSortedSet()) {
|
|
||||||
d.add(doubleDV("double", j));
|
|
||||||
d.add(floatDV("float", j));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
writer.addDocument(d);
|
|
||||||
|
XContentBuilder doc = XContentFactory.jsonBuilder().startObject().startArray("float");
|
||||||
|
for (int j = 0; j < numValues; ++j) {
|
||||||
|
doc = doc.value(values[j]);
|
||||||
|
}
|
||||||
|
doc = doc.endArray().startArray("double");
|
||||||
|
for (int j = 0; j < numValues; ++j) {
|
||||||
|
doc = doc.value(values[j]);
|
||||||
|
}
|
||||||
|
doc = doc.endArray().endObject();
|
||||||
|
|
||||||
|
final ParsedDocument d = mapper.parse("type", Integer.toString(i), doc.bytes());
|
||||||
|
|
||||||
|
writer.addDocument(d.rootDoc());
|
||||||
if (random.nextInt(10) == 0) {
|
if (random.nextInt(10) == 0) {
|
||||||
refreshReader();
|
refreshReader();
|
||||||
}
|
}
|
||||||
|
@ -237,10 +265,8 @@ public class DuelFieldDataTests extends AbstractFieldDataTests {
|
||||||
Map<FieldDataType, Type> typeMap = new HashMap<FieldDataType, Type>();
|
Map<FieldDataType, Type> typeMap = new HashMap<FieldDataType, Type>();
|
||||||
typeMap.put(new FieldDataType("double", ImmutableSettings.builder().put("format", "array")), Type.Double);
|
typeMap.put(new FieldDataType("double", ImmutableSettings.builder().put("format", "array")), Type.Double);
|
||||||
typeMap.put(new FieldDataType("float", ImmutableSettings.builder().put("format", "array")), Type.Float);
|
typeMap.put(new FieldDataType("float", ImmutableSettings.builder().put("format", "array")), Type.Float);
|
||||||
if (LuceneTestCase.defaultCodecSupportsSortedSet()) {
|
typeMap.put(new FieldDataType("double", ImmutableSettings.builder().put("format", "doc_values")), Type.Double);
|
||||||
typeMap.put(new FieldDataType("double", ImmutableSettings.builder().put("format", "doc_values")), Type.Double);
|
typeMap.put(new FieldDataType("float", ImmutableSettings.builder().put("format", "doc_values")), Type.Float);
|
||||||
typeMap.put(new FieldDataType("float", ImmutableSettings.builder().put("format", "doc_values")), Type.Float);
|
|
||||||
}
|
|
||||||
ArrayList<Entry<FieldDataType, Type>> list = new ArrayList<Entry<FieldDataType, Type>>(typeMap.entrySet());
|
ArrayList<Entry<FieldDataType, Type>> list = new ArrayList<Entry<FieldDataType, Type>>(typeMap.entrySet());
|
||||||
while (!list.isEmpty()) {
|
while (!list.isEmpty()) {
|
||||||
Entry<FieldDataType, Type> left;
|
Entry<FieldDataType, Type> left;
|
||||||
|
@ -423,6 +449,7 @@ public class DuelFieldDataTests extends AbstractFieldDataTests {
|
||||||
BytesValues rightBytesValues = rightData.getBytesValues(random.nextBoolean());
|
BytesValues rightBytesValues = rightData.getBytesValues(random.nextBoolean());
|
||||||
BytesRef leftSpare = new BytesRef();
|
BytesRef leftSpare = new BytesRef();
|
||||||
BytesRef rightSpare = new BytesRef();
|
BytesRef rightSpare = new BytesRef();
|
||||||
|
|
||||||
for (int i = 0; i < numDocs; i++) {
|
for (int i = 0; i < numDocs; i++) {
|
||||||
int numValues = 0;
|
int numValues = 0;
|
||||||
assertThat((numValues = leftBytesValues.setDocument(i)), equalTo(rightBytesValues.setDocument(i)));
|
assertThat((numValues = leftBytesValues.setDocument(i)), equalTo(rightBytesValues.setDocument(i)));
|
||||||
|
@ -462,8 +489,12 @@ public class DuelFieldDataTests extends AbstractFieldDataTests {
|
||||||
assertThat((numValues = leftDoubleValues.setDocument(i)), equalTo(rightDoubleValues.setDocument(i)));
|
assertThat((numValues = leftDoubleValues.setDocument(i)), equalTo(rightDoubleValues.setDocument(i)));
|
||||||
double previous = 0;
|
double previous = 0;
|
||||||
for (int j = 0; j < numValues; j++) {
|
for (int j = 0; j < numValues; j++) {
|
||||||
double current;
|
double current = rightDoubleValues.nextValue();
|
||||||
assertThat(leftDoubleValues.nextValue(), equalTo(current = rightDoubleValues.nextValue()));
|
if (Double.isNaN(current)) {
|
||||||
|
assertTrue(Double.isNaN(leftDoubleValues.nextValue()));
|
||||||
|
} else {
|
||||||
|
assertThat(leftDoubleValues.nextValue(), closeTo(current, 0.0001));
|
||||||
|
}
|
||||||
if (j > 0) {
|
if (j > 0) {
|
||||||
assertThat(Double.compare(previous,current), lessThan(0));
|
assertThat(Double.compare(previous,current), lessThan(0));
|
||||||
}
|
}
|
||||||
|
@ -472,7 +503,7 @@ public class DuelFieldDataTests extends AbstractFieldDataTests {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void duelFieldDataLong(Random random, AtomicReaderContext context, IndexNumericFieldData left, IndexNumericFieldData right) throws Exception {
|
private static void duelFieldDataLong(Random random, AtomicReaderContext context, IndexNumericFieldData<?> left, IndexNumericFieldData right) throws Exception {
|
||||||
AtomicNumericFieldData leftData = random.nextBoolean() ? left.load(context) : left.loadDirect(context);
|
AtomicNumericFieldData leftData = random.nextBoolean() ? left.load(context) : left.loadDirect(context);
|
||||||
AtomicNumericFieldData rightData = random.nextBoolean() ? right.load(context) : right.loadDirect(context);
|
AtomicNumericFieldData rightData = random.nextBoolean() ? right.load(context) : right.loadDirect(context);
|
||||||
|
|
||||||
|
|
|
@ -70,7 +70,7 @@ public class IndexFieldDataServiceTests extends ElasticsearchTestCase {
|
||||||
ifdService.clear();
|
ifdService.clear();
|
||||||
fd = ifdService.getForField(mapper);
|
fd = ifdService.getForField(mapper);
|
||||||
if (docValues) {
|
if (docValues) {
|
||||||
assertTrue(fd instanceof SortedSetDVNumericIndexFieldData);
|
assertTrue(fd instanceof BinaryDVNumericIndexFieldData);
|
||||||
} else {
|
} else {
|
||||||
assertTrue(fd instanceof PackedArrayIndexFieldData);
|
assertTrue(fd instanceof PackedArrayIndexFieldData);
|
||||||
}
|
}
|
||||||
|
@ -80,7 +80,7 @@ public class IndexFieldDataServiceTests extends ElasticsearchTestCase {
|
||||||
ifdService.clear();
|
ifdService.clear();
|
||||||
fd = ifdService.getForField(floatMapper);
|
fd = ifdService.getForField(floatMapper);
|
||||||
if (docValues) {
|
if (docValues) {
|
||||||
assertTrue(fd instanceof SortedSetDVNumericIndexFieldData);
|
assertTrue(fd instanceof BinaryDVNumericIndexFieldData);
|
||||||
} else {
|
} else {
|
||||||
assertTrue(fd instanceof FloatArrayIndexFieldData);
|
assertTrue(fd instanceof FloatArrayIndexFieldData);
|
||||||
}
|
}
|
||||||
|
@ -89,7 +89,7 @@ public class IndexFieldDataServiceTests extends ElasticsearchTestCase {
|
||||||
ifdService.clear();
|
ifdService.clear();
|
||||||
fd = ifdService.getForField(doubleMapper);
|
fd = ifdService.getForField(doubleMapper);
|
||||||
if (docValues) {
|
if (docValues) {
|
||||||
assertTrue(fd instanceof SortedSetDVNumericIndexFieldData);
|
assertTrue(fd instanceof BinaryDVNumericIndexFieldData);
|
||||||
} else {
|
} else {
|
||||||
assertTrue(fd instanceof DoubleArrayIndexFieldData);
|
assertTrue(fd instanceof DoubleArrayIndexFieldData);
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.mapper.all;
|
package org.elasticsearch.index.mapper.all;
|
||||||
|
|
||||||
import org.apache.lucene.document.Document;
|
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
import org.apache.lucene.search.TermQuery;
|
import org.apache.lucene.search.TermQuery;
|
||||||
import org.elasticsearch.common.bytes.BytesArray;
|
import org.elasticsearch.common.bytes.BytesArray;
|
||||||
|
@ -30,13 +29,13 @@ import org.elasticsearch.common.lucene.all.AllTokenStream;
|
||||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||||
import org.elasticsearch.index.mapper.FieldMapper;
|
import org.elasticsearch.index.mapper.FieldMapper;
|
||||||
import org.elasticsearch.index.mapper.MapperTestUtils;
|
import org.elasticsearch.index.mapper.MapperTestUtils;
|
||||||
|
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||||
import org.elasticsearch.test.ElasticsearchTestCase;
|
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||||
import org.hamcrest.Matchers;
|
import org.hamcrest.Matchers;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import static org.elasticsearch.common.io.Streams.copyToBytesFromClasspath;
|
import static org.elasticsearch.common.io.Streams.copyToBytesFromClasspath;
|
||||||
import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath;
|
import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath;
|
||||||
import static org.hamcrest.MatcherAssert.assertThat;
|
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -19,17 +19,16 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.mapper.boost;
|
package org.elasticsearch.index.mapper.boost;
|
||||||
|
|
||||||
import org.apache.lucene.document.Document;
|
|
||||||
import org.apache.lucene.index.IndexableField;
|
import org.apache.lucene.index.IndexableField;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||||
import org.elasticsearch.index.mapper.MapperTestUtils;
|
import org.elasticsearch.index.mapper.MapperTestUtils;
|
||||||
|
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||||
import org.elasticsearch.test.ElasticsearchTestCase;
|
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import static org.hamcrest.MatcherAssert.assertThat;
|
|
||||||
import static org.hamcrest.Matchers.closeTo;
|
import static org.hamcrest.Matchers.closeTo;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -19,18 +19,17 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.mapper.dynamictemplate.genericstore;
|
package org.elasticsearch.index.mapper.dynamictemplate.genericstore;
|
||||||
|
|
||||||
import org.apache.lucene.document.Document;
|
|
||||||
import org.apache.lucene.index.IndexableField;
|
import org.apache.lucene.index.IndexableField;
|
||||||
import org.elasticsearch.common.bytes.BytesArray;
|
import org.elasticsearch.common.bytes.BytesArray;
|
||||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||||
import org.elasticsearch.index.mapper.FieldMappers;
|
import org.elasticsearch.index.mapper.FieldMappers;
|
||||||
import org.elasticsearch.index.mapper.MapperTestUtils;
|
import org.elasticsearch.index.mapper.MapperTestUtils;
|
||||||
|
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||||
import org.elasticsearch.test.ElasticsearchTestCase;
|
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import static org.elasticsearch.common.io.Streams.copyToBytesFromClasspath;
|
import static org.elasticsearch.common.io.Streams.copyToBytesFromClasspath;
|
||||||
import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath;
|
import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath;
|
||||||
import static org.hamcrest.MatcherAssert.assertThat;
|
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -19,18 +19,17 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.mapper.dynamictemplate.pathmatch;
|
package org.elasticsearch.index.mapper.dynamictemplate.pathmatch;
|
||||||
|
|
||||||
import org.apache.lucene.document.Document;
|
|
||||||
import org.apache.lucene.index.IndexableField;
|
import org.apache.lucene.index.IndexableField;
|
||||||
import org.elasticsearch.common.bytes.BytesArray;
|
import org.elasticsearch.common.bytes.BytesArray;
|
||||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||||
import org.elasticsearch.index.mapper.FieldMappers;
|
import org.elasticsearch.index.mapper.FieldMappers;
|
||||||
import org.elasticsearch.index.mapper.MapperTestUtils;
|
import org.elasticsearch.index.mapper.MapperTestUtils;
|
||||||
|
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||||
import org.elasticsearch.test.ElasticsearchTestCase;
|
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import static org.elasticsearch.common.io.Streams.copyToBytesFromClasspath;
|
import static org.elasticsearch.common.io.Streams.copyToBytesFromClasspath;
|
||||||
import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath;
|
import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath;
|
||||||
import static org.hamcrest.MatcherAssert.assertThat;
|
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -19,22 +19,18 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.mapper.dynamictemplate.simple;
|
package org.elasticsearch.index.mapper.dynamictemplate.simple;
|
||||||
|
|
||||||
import org.apache.lucene.document.Document;
|
|
||||||
import org.apache.lucene.index.IndexableField;
|
import org.apache.lucene.index.IndexableField;
|
||||||
import org.elasticsearch.common.bytes.BytesArray;
|
import org.elasticsearch.common.bytes.BytesArray;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||||
import org.elasticsearch.index.mapper.DocumentFieldMappers;
|
import org.elasticsearch.index.mapper.*;
|
||||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||||
import org.elasticsearch.index.mapper.FieldMappers;
|
|
||||||
import org.elasticsearch.index.mapper.MapperTestUtils;
|
|
||||||
import org.elasticsearch.test.ElasticsearchTestCase;
|
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||||
import org.hamcrest.Matchers;
|
import org.hamcrest.Matchers;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import static org.elasticsearch.common.io.Streams.copyToBytesFromClasspath;
|
import static org.elasticsearch.common.io.Streams.copyToBytesFromClasspath;
|
||||||
import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath;
|
import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath;
|
||||||
import static org.hamcrest.MatcherAssert.assertThat;
|
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -19,20 +19,19 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.mapper.multifield;
|
package org.elasticsearch.index.mapper.multifield;
|
||||||
|
|
||||||
import org.apache.lucene.document.Document;
|
|
||||||
import org.apache.lucene.index.IndexableField;
|
import org.apache.lucene.index.IndexableField;
|
||||||
import org.elasticsearch.common.bytes.BytesArray;
|
import org.elasticsearch.common.bytes.BytesArray;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||||
import org.elasticsearch.index.mapper.MapperTestUtils;
|
import org.elasticsearch.index.mapper.MapperTestUtils;
|
||||||
|
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||||
import org.elasticsearch.test.ElasticsearchTestCase;
|
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import static org.elasticsearch.common.io.Streams.copyToBytesFromClasspath;
|
import static org.elasticsearch.common.io.Streams.copyToBytesFromClasspath;
|
||||||
import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath;
|
import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath;
|
||||||
import static org.elasticsearch.index.mapper.MapperBuilders.*;
|
import static org.elasticsearch.index.mapper.MapperBuilders.*;
|
||||||
import static org.hamcrest.MatcherAssert.assertThat;
|
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -19,13 +19,13 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.mapper.multifield.merge;
|
package org.elasticsearch.index.mapper.multifield.merge;
|
||||||
|
|
||||||
import org.apache.lucene.document.Document;
|
|
||||||
import org.apache.lucene.index.IndexableField;
|
import org.apache.lucene.index.IndexableField;
|
||||||
import org.elasticsearch.common.bytes.BytesArray;
|
import org.elasticsearch.common.bytes.BytesArray;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||||
import org.elasticsearch.index.mapper.MapperTestUtils;
|
import org.elasticsearch.index.mapper.MapperTestUtils;
|
||||||
|
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||||
import org.elasticsearch.test.ElasticsearchTestCase;
|
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
|
@ -34,7 +34,6 @@ import java.util.Arrays;
|
||||||
import static org.elasticsearch.common.io.Streams.copyToBytesFromClasspath;
|
import static org.elasticsearch.common.io.Streams.copyToBytesFromClasspath;
|
||||||
import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath;
|
import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath;
|
||||||
import static org.elasticsearch.index.mapper.DocumentMapper.MergeFlags.mergeFlags;
|
import static org.elasticsearch.index.mapper.DocumentMapper.MergeFlags.mergeFlags;
|
||||||
import static org.hamcrest.MatcherAssert.assertThat;
|
|
||||||
import static org.hamcrest.Matchers.*;
|
import static org.hamcrest.Matchers.*;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -28,7 +28,6 @@ import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||||
import org.elasticsearch.test.ElasticsearchTestCase;
|
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import static org.hamcrest.MatcherAssert.assertThat;
|
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
import static org.hamcrest.Matchers.nullValue;
|
import static org.hamcrest.Matchers.nullValue;
|
||||||
|
|
||||||
|
|
|
@ -19,17 +19,19 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.mapper.numeric;
|
package org.elasticsearch.index.mapper.numeric;
|
||||||
|
|
||||||
|
import org.apache.lucene.index.FieldInfo.DocValuesType;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
import org.elasticsearch.index.mapper.*;
|
import org.elasticsearch.index.mapper.*;
|
||||||
|
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||||
import org.elasticsearch.index.mapper.core.DoubleFieldMapper;
|
import org.elasticsearch.index.mapper.core.DoubleFieldMapper;
|
||||||
import org.elasticsearch.index.mapper.core.LongFieldMapper;
|
import org.elasticsearch.index.mapper.core.LongFieldMapper;
|
||||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||||
|
import org.elasticsearch.index.mapper.string.SimpleStringMappingTests;
|
||||||
import org.elasticsearch.test.ElasticsearchTestCase;
|
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder;
|
import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder;
|
||||||
import static org.hamcrest.MatcherAssert.assertThat;
|
|
||||||
import static org.hamcrest.Matchers.*;
|
import static org.hamcrest.Matchers.*;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -142,4 +144,83 @@ public class SimpleNumericTests extends ElasticsearchTestCase {
|
||||||
assertThat(e.getCause(), instanceOf(NumberFormatException.class));
|
assertThat(e.getCause(), instanceOf(NumberFormatException.class));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testDocValues() throws Exception {
|
||||||
|
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties")
|
||||||
|
.startObject("int")
|
||||||
|
.field("type", "integer")
|
||||||
|
.startObject("fielddata")
|
||||||
|
.field("format", "doc_values")
|
||||||
|
.endObject()
|
||||||
|
.endObject()
|
||||||
|
.startObject("double")
|
||||||
|
.field("type", "double")
|
||||||
|
.startObject("fielddata")
|
||||||
|
.field("format", "doc_values")
|
||||||
|
.endObject()
|
||||||
|
.endObject()
|
||||||
|
.endObject()
|
||||||
|
.endObject().endObject().string();
|
||||||
|
|
||||||
|
DocumentMapper defaultMapper = MapperTestUtils.newParser().parse(mapping);
|
||||||
|
|
||||||
|
ParsedDocument parsedDoc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
|
||||||
|
.startObject()
|
||||||
|
.field("int", "1234")
|
||||||
|
.field("double", "1234")
|
||||||
|
.endObject()
|
||||||
|
.bytes());
|
||||||
|
final Document doc = parsedDoc.rootDoc();
|
||||||
|
assertEquals(DocValuesType.BINARY, SimpleStringMappingTests.docValuesType(doc, "int"));
|
||||||
|
assertEquals(DocValuesType.BINARY, SimpleStringMappingTests.docValuesType(doc, "double"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDocValuesOnNested() throws Exception {
|
||||||
|
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties")
|
||||||
|
.startObject("nested")
|
||||||
|
.field("type", "nested")
|
||||||
|
.startObject("properties")
|
||||||
|
.startObject("int")
|
||||||
|
.field("type", "integer")
|
||||||
|
.startObject("fielddata")
|
||||||
|
.field("format", "doc_values")
|
||||||
|
.endObject()
|
||||||
|
.endObject()
|
||||||
|
.startObject("double")
|
||||||
|
.field("type", "double")
|
||||||
|
.startObject("fielddata")
|
||||||
|
.field("format", "doc_values")
|
||||||
|
.endObject()
|
||||||
|
.endObject()
|
||||||
|
.endObject()
|
||||||
|
.endObject()
|
||||||
|
.endObject()
|
||||||
|
.endObject().endObject().string();
|
||||||
|
|
||||||
|
DocumentMapper defaultMapper = MapperTestUtils.newParser().parse(mapping);
|
||||||
|
|
||||||
|
ParsedDocument parsedDoc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
|
||||||
|
.startObject()
|
||||||
|
.startArray("nested")
|
||||||
|
.startObject()
|
||||||
|
.field("int", "1234")
|
||||||
|
.field("double", "1234")
|
||||||
|
.endObject()
|
||||||
|
.startObject()
|
||||||
|
.field("int", "-1")
|
||||||
|
.field("double", "-2")
|
||||||
|
.endObject()
|
||||||
|
.endArray()
|
||||||
|
.endObject()
|
||||||
|
.bytes());
|
||||||
|
for (Document doc : parsedDoc.docs()) {
|
||||||
|
if (doc == parsedDoc.rootDoc()) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
assertEquals(DocValuesType.BINARY, SimpleStringMappingTests.docValuesType(doc, "nested.int"));
|
||||||
|
assertEquals(DocValuesType.BINARY, SimpleStringMappingTests.docValuesType(doc, "nested.double"));
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,10 +20,10 @@
|
||||||
package org.elasticsearch.index.mapper.simple;
|
package org.elasticsearch.index.mapper.simple;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import com.google.common.base.Charsets;
|
||||||
import org.apache.lucene.document.Document;
|
|
||||||
import org.elasticsearch.common.bytes.BytesArray;
|
import org.elasticsearch.common.bytes.BytesArray;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.index.mapper.*;
|
import org.elasticsearch.index.mapper.*;
|
||||||
|
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||||
import org.elasticsearch.test.ElasticsearchTestCase;
|
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.mapper.string;
|
package org.elasticsearch.index.mapper.string;
|
||||||
|
|
||||||
import org.apache.lucene.document.Document;
|
|
||||||
import org.apache.lucene.index.FieldInfo;
|
import org.apache.lucene.index.FieldInfo;
|
||||||
import org.apache.lucene.index.FieldInfo.DocValuesType;
|
import org.apache.lucene.index.FieldInfo.DocValuesType;
|
||||||
import org.apache.lucene.index.IndexableField;
|
import org.apache.lucene.index.IndexableField;
|
||||||
|
@ -29,6 +28,7 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||||
import org.elasticsearch.index.mapper.*;
|
import org.elasticsearch.index.mapper.*;
|
||||||
import org.elasticsearch.index.mapper.Mapper.BuilderContext;
|
import org.elasticsearch.index.mapper.Mapper.BuilderContext;
|
||||||
|
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||||
import org.elasticsearch.test.ElasticsearchTestCase;
|
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
@ -239,18 +239,6 @@ public class SimpleStringMappingTests extends ElasticsearchTestCase {
|
||||||
.field("format", "doc_values")
|
.field("format", "doc_values")
|
||||||
.endObject()
|
.endObject()
|
||||||
.endObject()
|
.endObject()
|
||||||
.startObject("int")
|
|
||||||
.field("type", "integer")
|
|
||||||
.startObject("fielddata")
|
|
||||||
.field("format", "doc_values")
|
|
||||||
.endObject()
|
|
||||||
.endObject()
|
|
||||||
.startObject("double")
|
|
||||||
.field("type", "double")
|
|
||||||
.startObject("fielddata")
|
|
||||||
.field("format", "doc_values")
|
|
||||||
.endObject()
|
|
||||||
.endObject()
|
|
||||||
.endObject()
|
.endObject()
|
||||||
.endObject().endObject().string();
|
.endObject().endObject().string();
|
||||||
|
|
||||||
|
@ -260,18 +248,14 @@ public class SimpleStringMappingTests extends ElasticsearchTestCase {
|
||||||
.startObject()
|
.startObject()
|
||||||
.field("str1", "1234")
|
.field("str1", "1234")
|
||||||
.field("str2", "1234")
|
.field("str2", "1234")
|
||||||
.field("int", "1234")
|
|
||||||
.field("double", "1234")
|
|
||||||
.endObject()
|
.endObject()
|
||||||
.bytes());
|
.bytes());
|
||||||
final Document doc = parsedDoc.rootDoc();
|
final Document doc = parsedDoc.rootDoc();
|
||||||
assertEquals(null, docValuesType(doc, "str1"));
|
assertEquals(null, docValuesType(doc, "str1"));
|
||||||
assertEquals(DocValuesType.SORTED_SET, docValuesType(doc, "str2"));
|
assertEquals(DocValuesType.SORTED_SET, docValuesType(doc, "str2"));
|
||||||
assertEquals(DocValuesType.SORTED_SET, docValuesType(doc, "int"));
|
|
||||||
assertEquals(DocValuesType.SORTED_SET, docValuesType(doc, "double"));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static DocValuesType docValuesType(Document document, String fieldName) {
|
public static DocValuesType docValuesType(Document document, String fieldName) {
|
||||||
for (IndexableField field : document.getFields(fieldName)) {
|
for (IndexableField field : document.getFields(fieldName)) {
|
||||||
if (field.fieldType().docValueType() != null) {
|
if (field.fieldType().docValueType() != null) {
|
||||||
return field.fieldType().docValueType();
|
return field.fieldType().docValueType();
|
||||||
|
|
Loading…
Reference in New Issue