Fielddata: Don't expose hashes anymore.
Our field data currently exposes hashes of the bytes values. That takes roughly 4 bytes per unique value, which is definitely not negligible on high-cardinality fields. These hashes have been used for 3 different purposes: - term-based aggregations, - parent/child queries, - the percolator _id -> Query cache. Both aggregations and parent/child queries have been moved to ordinals which provide a greater speedup and lower memory usage. In the case of the percolator it is used in conjunction with HashedBytesRef to not recompute the hash value when getting resolving a query given its ID. However, removing this has no impact on PercolatorStressBenchmark. Close #6500
This commit is contained in:
parent
232394e3a8
commit
7bcabf9481
|
@ -41,7 +41,7 @@ public abstract class AbstractAtomicNumericFieldData implements AtomicNumericFie
|
|||
}
|
||||
|
||||
@Override
|
||||
public BytesValues getBytesValues(boolean needsHashes) {
|
||||
public BytesValues getBytesValues() {
|
||||
if (isFloat) {
|
||||
final DoubleValues values = getDoubleValues();
|
||||
return new BytesValues(values.isMultiValued()) {
|
||||
|
|
|
@ -40,13 +40,8 @@ public interface AtomicFieldData<Script extends ScriptDocValues> extends RamUsag
|
|||
|
||||
/**
|
||||
* Use a non thread safe (lightweight) view of the values as bytes.
|
||||
*
|
||||
* @param needsHashes if <code>true</code> the implementation will use pre-build hashes if
|
||||
* {@link org.elasticsearch.index.fielddata.BytesValues#currentValueHash()} is used. if no hashes
|
||||
* are used <code>false</code> should be passed instead.
|
||||
*
|
||||
*/
|
||||
BytesValues getBytesValues(boolean needsHashes);
|
||||
BytesValues getBytesValues();
|
||||
|
||||
/**
|
||||
* Returns a "scripting" based values.
|
||||
|
@ -64,7 +59,7 @@ public interface AtomicFieldData<Script extends ScriptDocValues> extends RamUsag
|
|||
* Use a non thread safe (lightweight) view of the values as bytes.
|
||||
* @param needsHashes
|
||||
*/
|
||||
BytesValues.WithOrdinals getBytesValues(boolean needsHashes);
|
||||
BytesValues.WithOrdinals getBytesValues();
|
||||
|
||||
/**
|
||||
* Returns a terms enum to iterate over all the underlying values.
|
||||
|
|
|
@ -29,7 +29,7 @@ public abstract class AtomicGeoPointFieldData<Script extends ScriptDocValues> im
|
|||
public abstract GeoPointValues getGeoPointValues();
|
||||
|
||||
@Override
|
||||
public BytesValues getBytesValues(boolean needsHashes) {
|
||||
public BytesValues getBytesValues() {
|
||||
final GeoPointValues values = getGeoPointValues();
|
||||
return new BytesValues(values.isMultiValued()) {
|
||||
|
||||
|
|
|
@ -101,15 +101,6 @@ public abstract class BytesValues {
|
|||
*/
|
||||
public abstract BytesRef nextValue();
|
||||
|
||||
/**
|
||||
* Returns the hash value of the previously returned shared {@link BytesRef} instances.
|
||||
*
|
||||
* @return the hash value of the previously returned shared {@link BytesRef} instances.
|
||||
*/
|
||||
public int currentValueHash() {
|
||||
return scratch.hashCode();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the order the values are returned from {@link #nextValue()}.
|
||||
* <p> Note: {@link BytesValues} have {@link AtomicFieldData.Order#BYTES} by default.</p>
|
||||
|
@ -180,10 +171,5 @@ public abstract class BytesValues {
|
|||
throw new ElasticsearchIllegalStateException("Empty BytesValues has no next value");
|
||||
}
|
||||
|
||||
@Override
|
||||
public int currentValueHash() {
|
||||
throw new ElasticsearchIllegalStateException("Empty BytesValues has no hash for the current Value");
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -294,7 +294,7 @@ public final class BytesRefOrdValComparator extends NestedWrappableComparator<By
|
|||
|
||||
@Override
|
||||
public FieldComparator<BytesRef> setNextReader(AtomicReaderContext context) throws IOException {
|
||||
termsIndex = indexFieldData.load(context).getBytesValues(false);
|
||||
termsIndex = indexFieldData.load(context).getBytesValues();
|
||||
assert termsIndex.ordinals() != null;
|
||||
missingOrd = ordInCurrentReader(termsIndex, missingValue);
|
||||
assert consistentInsertedOrd(termsIndex, missingOrd, missingValue);
|
||||
|
|
|
@ -85,7 +85,7 @@ public final class BytesRefValComparator extends NestedWrappableComparator<Bytes
|
|||
|
||||
@Override
|
||||
public FieldComparator<BytesRef> setNextReader(AtomicReaderContext context) throws IOException {
|
||||
docTerms = indexFieldData.load(context).getBytesValues(false);
|
||||
docTerms = indexFieldData.load(context).getBytesValues();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
|
|
@ -67,8 +67,8 @@ final class InternalGlobalOrdinalsIndexFieldData extends GlobalOrdinalsIndexFiel
|
|||
}
|
||||
|
||||
@Override
|
||||
public BytesValues.WithOrdinals getBytesValues(boolean needsHashes) {
|
||||
BytesValues.WithOrdinals values = afd.getBytesValues(false);
|
||||
public BytesValues.WithOrdinals getBytesValues() {
|
||||
BytesValues.WithOrdinals values = afd.getBytesValues();
|
||||
Ordinals.Docs segmentOrdinals = values.ordinals();
|
||||
final Ordinals.Docs globalOrdinals;
|
||||
if (segmentOrdToGlobalOrdLookup != null) {
|
||||
|
@ -78,7 +78,7 @@ final class InternalGlobalOrdinalsIndexFieldData extends GlobalOrdinalsIndexFiel
|
|||
}
|
||||
final BytesValues.WithOrdinals[] bytesValues = new BytesValues.WithOrdinals[atomicReaders.length];
|
||||
for (int i = 0; i < bytesValues.length; i++) {
|
||||
bytesValues[i] = atomicReaders[i].afd.getBytesValues(false);
|
||||
bytesValues[i] = atomicReaders[i].afd.getBytesValues();
|
||||
}
|
||||
return new BytesValues.WithOrdinals(globalOrdinals) {
|
||||
|
||||
|
@ -95,11 +95,6 @@ final class InternalGlobalOrdinalsIndexFieldData extends GlobalOrdinalsIndexFiel
|
|||
public BytesRef copyShared() {
|
||||
return bytesValues[readerIndex].copyShared();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int currentValueHash() {
|
||||
return bytesValues[readerIndex].currentValueHash();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -29,8 +29,8 @@ import org.elasticsearch.common.geo.GeoPoint;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.fielddata.*;
|
||||
import org.elasticsearch.search.MultiValueMode;
|
||||
import org.elasticsearch.index.mapper.FieldMapper.Names;
|
||||
import org.elasticsearch.search.MultiValueMode;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -54,7 +54,7 @@ abstract class AbstractGeoPointIndexFieldData extends AbstractIndexFieldData<Ato
|
|||
}
|
||||
|
||||
@Override
|
||||
public BytesValues getBytesValues(boolean needsHashes) {
|
||||
public BytesValues getBytesValues() {
|
||||
return BytesValues.EMPTY;
|
||||
}
|
||||
|
||||
|
|
|
@ -45,7 +45,7 @@ public class AtomicFieldDataWithOrdinalsTermsEnum extends TermsEnum {
|
|||
private BytesRef currentTerm;
|
||||
|
||||
public AtomicFieldDataWithOrdinalsTermsEnum(AtomicFieldData.WithOrdinals afd) {
|
||||
this.bytesValues = afd.getBytesValues(false);
|
||||
this.bytesValues = afd.getBytesValues();
|
||||
this.ordinals = bytesValues.ordinals();
|
||||
this.maxOrd = ordinals.getMaxOrd();
|
||||
}
|
||||
|
|
|
@ -61,9 +61,7 @@ public class BinaryDVAtomicFieldData implements AtomicFieldData<ScriptDocValues.
|
|||
}
|
||||
|
||||
@Override
|
||||
public BytesValues getBytesValues(boolean needsHashes) {
|
||||
// if you want hashes to be cached, you should rather store them on disk alongside the values rather than loading them into memory
|
||||
// here - not supported for now, and probably not useful since this field data only applies to _id and _uid?
|
||||
public BytesValues getBytesValues() {
|
||||
final BinaryDocValues values;
|
||||
final Bits docsWithField;
|
||||
try {
|
||||
|
@ -99,7 +97,7 @@ public class BinaryDVAtomicFieldData implements AtomicFieldData<ScriptDocValues.
|
|||
|
||||
@Override
|
||||
public Strings getScriptValues() {
|
||||
return new ScriptDocValues.Strings(getBytesValues(false));
|
||||
return new ScriptDocValues.Strings(getBytesValues());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.index.fielddata.plain;
|
||||
|
||||
import org.apache.lucene.index.AtomicReader;
|
||||
import org.apache.lucene.index.BinaryDocValues;
|
||||
import org.apache.lucene.index.DocValues;
|
||||
import org.apache.lucene.store.ByteArrayDataInput;
|
||||
|
@ -33,13 +32,11 @@ import org.elasticsearch.index.fielddata.LongValues;
|
|||
|
||||
final class BinaryDVNumericAtomicFieldData extends AbstractAtomicNumericFieldData {
|
||||
|
||||
private final AtomicReader reader;
|
||||
private final BinaryDocValues values;
|
||||
private final NumericType numericType;
|
||||
|
||||
BinaryDVNumericAtomicFieldData(AtomicReader reader, BinaryDocValues values, NumericType numericType) {
|
||||
BinaryDVNumericAtomicFieldData(BinaryDocValues values, NumericType numericType) {
|
||||
super(numericType.isFloatingPoint());
|
||||
this.reader = reader;
|
||||
this.values = values == null ? DocValues.EMPTY_BINARY : values;
|
||||
this.numericType = numericType;
|
||||
}
|
||||
|
|
|
@ -28,8 +28,8 @@ import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
|||
import org.elasticsearch.index.fielddata.fieldcomparator.DoubleValuesComparatorSource;
|
||||
import org.elasticsearch.index.fielddata.fieldcomparator.FloatValuesComparatorSource;
|
||||
import org.elasticsearch.index.fielddata.fieldcomparator.LongValuesComparatorSource;
|
||||
import org.elasticsearch.search.MultiValueMode;
|
||||
import org.elasticsearch.index.mapper.FieldMapper.Names;
|
||||
import org.elasticsearch.search.MultiValueMode;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -63,7 +63,7 @@ public class BinaryDVNumericIndexFieldData extends DocValuesIndexFieldData imple
|
|||
@Override
|
||||
public BinaryDVNumericAtomicFieldData load(AtomicReaderContext context) {
|
||||
try {
|
||||
return new BinaryDVNumericAtomicFieldData(context.reader(), context.reader().getBinaryDocValues(fieldNames.indexName()), numericType);
|
||||
return new BinaryDVNumericAtomicFieldData(context.reader().getBinaryDocValues(fieldNames.indexName()), numericType);
|
||||
} catch (IOException e) {
|
||||
throw new ElasticsearchIllegalStateException("Cannot load doc values", e);
|
||||
}
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.index.fielddata.plain;
|
||||
|
||||
import org.apache.lucene.index.AtomicReader;
|
||||
import org.apache.lucene.index.BinaryDocValues;
|
||||
import org.apache.lucene.index.DocValues;
|
||||
import org.apache.lucene.store.ByteArrayDataInput;
|
||||
|
@ -30,12 +29,10 @@ import org.elasticsearch.index.fielddata.ScriptDocValues;
|
|||
|
||||
final class BytesBinaryDVAtomicFieldData implements AtomicFieldData<ScriptDocValues> {
|
||||
|
||||
private final AtomicReader reader;
|
||||
private final BinaryDocValues values;
|
||||
|
||||
BytesBinaryDVAtomicFieldData(AtomicReader reader, BinaryDocValues values) {
|
||||
BytesBinaryDVAtomicFieldData(BinaryDocValues values) {
|
||||
super();
|
||||
this.reader = reader;
|
||||
this.values = values == null ? DocValues.EMPTY_BINARY : values;
|
||||
}
|
||||
|
||||
|
@ -55,7 +52,7 @@ final class BytesBinaryDVAtomicFieldData implements AtomicFieldData<ScriptDocVal
|
|||
}
|
||||
|
||||
@Override
|
||||
public BytesValues getBytesValues(boolean needsHashes) {
|
||||
public BytesValues getBytesValues() {
|
||||
return new BytesValues(true) {
|
||||
|
||||
final BytesRef bytes = new BytesRef();
|
||||
|
|
|
@ -28,12 +28,12 @@ import org.elasticsearch.index.Index;
|
|||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
|
||||
import org.elasticsearch.search.MultiValueMode;
|
||||
import org.elasticsearch.index.fielddata.ordinals.GlobalOrdinalsBuilder;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.FieldMapper.Names;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.indices.fielddata.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.search.MultiValueMode;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -56,7 +56,7 @@ public class BytesBinaryDVIndexFieldData extends DocValuesIndexFieldData impleme
|
|||
@Override
|
||||
public BytesBinaryDVAtomicFieldData load(AtomicReaderContext context) {
|
||||
try {
|
||||
return new BytesBinaryDVAtomicFieldData(context.reader(), context.reader().getBinaryDocValues(fieldNames.indexName()));
|
||||
return new BytesBinaryDVAtomicFieldData(context.reader().getBinaryDocValues(fieldNames.indexName()));
|
||||
} catch (IOException e) {
|
||||
throw new ElasticsearchIllegalStateException("Cannot load doc values", e);
|
||||
}
|
||||
|
|
|
@ -75,7 +75,7 @@ public abstract class DoubleArrayAtomicFieldData extends AbstractAtomicNumericFi
|
|||
}
|
||||
|
||||
@Override
|
||||
public BytesValues getBytesValues(boolean needsHashes) {
|
||||
public BytesValues getBytesValues() {
|
||||
return BytesValues.EMPTY;
|
||||
}
|
||||
|
||||
|
|
|
@ -48,9 +48,4 @@ final class EmptyByteValuesWithOrdinals extends BytesValues.WithOrdinals {
|
|||
throw new ElasticsearchIllegalStateException("Empty BytesValues has no next value");
|
||||
}
|
||||
|
||||
@Override
|
||||
public int currentValueHash() {
|
||||
throw new ElasticsearchIllegalStateException("Empty BytesValues has no hash for the current value");
|
||||
}
|
||||
|
||||
}
|
|
@ -21,16 +21,14 @@ package org.elasticsearch.index.fielddata.plain;
|
|||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.IntsRef;
|
||||
import org.apache.lucene.util.fst.*;
|
||||
import org.apache.lucene.util.fst.FST;
|
||||
import org.apache.lucene.util.fst.FST.Arc;
|
||||
import org.apache.lucene.util.fst.FST.BytesReader;
|
||||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.common.util.IntArray;
|
||||
import org.apache.lucene.util.fst.Util;
|
||||
import org.elasticsearch.index.fielddata.AtomicFieldData;
|
||||
import org.elasticsearch.index.fielddata.ScriptDocValues;
|
||||
import org.elasticsearch.index.fielddata.ordinals.EmptyOrdinals;
|
||||
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
||||
import org.elasticsearch.index.fielddata.ordinals.Ordinals.Docs;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -45,7 +43,6 @@ public class FSTBytesAtomicFieldData implements AtomicFieldData.WithOrdinals<Scr
|
|||
// 0 ordinal in values means no value (its null)
|
||||
protected final Ordinals ordinals;
|
||||
|
||||
private volatile IntArray hashes;
|
||||
private long size = -1;
|
||||
|
||||
private final FST<Long> fst;
|
||||
|
@ -81,33 +78,15 @@ public class FSTBytesAtomicFieldData implements AtomicFieldData.WithOrdinals<Scr
|
|||
}
|
||||
|
||||
@Override
|
||||
public BytesValues.WithOrdinals getBytesValues(boolean needsHashes) {
|
||||
public BytesValues.WithOrdinals getBytesValues() {
|
||||
assert fst != null;
|
||||
if (needsHashes) {
|
||||
if (hashes == null) {
|
||||
BytesRefFSTEnum<Long> fstEnum = new BytesRefFSTEnum<>(fst);
|
||||
IntArray hashes = BigArrays.NON_RECYCLING_INSTANCE.newIntArray(ordinals.getMaxOrd());
|
||||
try {
|
||||
for (long i = 0, maxOrd = ordinals.getMaxOrd(); i < maxOrd; ++i) {
|
||||
hashes.set(i, fstEnum.next().input.hashCode());
|
||||
}
|
||||
assert fstEnum.next() == null;
|
||||
} catch (IOException e) {
|
||||
throw new AssertionError("Cannot happen", e);
|
||||
}
|
||||
this.hashes = hashes;
|
||||
}
|
||||
return new HashedBytesValues(fst, ordinals.ordinals(), hashes);
|
||||
} else {
|
||||
return new BytesValues(fst, ordinals.ordinals());
|
||||
}
|
||||
return new BytesValues(fst, ordinals.ordinals());
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public ScriptDocValues.Strings getScriptValues() {
|
||||
assert fst != null;
|
||||
return new ScriptDocValues.Strings(getBytesValues(false));
|
||||
return new ScriptDocValues.Strings(getBytesValues());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -150,22 +129,6 @@ public class FSTBytesAtomicFieldData implements AtomicFieldData.WithOrdinals<Scr
|
|||
}
|
||||
|
||||
}
|
||||
|
||||
static final class HashedBytesValues extends BytesValues {
|
||||
private final IntArray hashes;
|
||||
|
||||
HashedBytesValues(FST<Long> fst, Docs ordinals, IntArray hashes) {
|
||||
super(fst, ordinals);
|
||||
this.hashes = hashes;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int currentValueHash() {
|
||||
assert ordinals.currentOrd() >= 0;
|
||||
return hashes.get(ordinals.currentOrd());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
final static class Empty extends FSTBytesAtomicFieldData {
|
||||
|
||||
|
@ -179,7 +142,7 @@ public class FSTBytesAtomicFieldData implements AtomicFieldData.WithOrdinals<Scr
|
|||
}
|
||||
|
||||
@Override
|
||||
public BytesValues.WithOrdinals getBytesValues(boolean needsHashes) {
|
||||
public BytesValues.WithOrdinals getBytesValues() {
|
||||
return new EmptyByteValuesWithOrdinals(ordinals.ordinals());
|
||||
}
|
||||
|
||||
|
|
|
@ -74,7 +74,7 @@ public abstract class FloatArrayAtomicFieldData extends AbstractAtomicNumericFie
|
|||
}
|
||||
|
||||
@Override
|
||||
public BytesValues getBytesValues(boolean needsHashes) {
|
||||
public BytesValues getBytesValues() {
|
||||
return BytesValues.EMPTY;
|
||||
}
|
||||
|
||||
|
|
|
@ -81,17 +81,9 @@ public class IndexIndexFieldData implements IndexFieldData.WithOrdinals<AtomicFi
|
|||
|
||||
private static class IndexBytesValues extends BytesValues.WithOrdinals {
|
||||
|
||||
final int hash;
|
||||
|
||||
protected IndexBytesValues(String index) {
|
||||
super(INDEX_ORDINALS);
|
||||
scratch.copyChars(index);
|
||||
hash = scratch.hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int currentValueHash() {
|
||||
return hash;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -125,13 +117,13 @@ public class IndexIndexFieldData implements IndexFieldData.WithOrdinals<AtomicFi
|
|||
}
|
||||
|
||||
@Override
|
||||
public BytesValues.WithOrdinals getBytesValues(boolean needsHashes) {
|
||||
public BytesValues.WithOrdinals getBytesValues() {
|
||||
return new IndexBytesValues(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ScriptDocValues getScriptValues() {
|
||||
return new ScriptDocValues.Strings(getBytesValues(false));
|
||||
return new ScriptDocValues.Strings(getBytesValues());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -74,7 +74,7 @@ public abstract class PackedArrayAtomicFieldData extends AbstractAtomicNumericFi
|
|||
}
|
||||
|
||||
@Override
|
||||
public BytesValues getBytesValues(boolean needsHashes) {
|
||||
public BytesValues getBytesValues() {
|
||||
return BytesValues.EMPTY;
|
||||
}
|
||||
|
||||
|
|
|
@ -21,15 +21,11 @@ package org.elasticsearch.index.fielddata.plain;
|
|||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.PagedBytes;
|
||||
import org.apache.lucene.util.PagedBytes.Reader;
|
||||
import org.apache.lucene.util.packed.MonotonicAppendingLongBuffer;
|
||||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.common.util.IntArray;
|
||||
import org.elasticsearch.index.fielddata.AtomicFieldData;
|
||||
import org.elasticsearch.index.fielddata.ScriptDocValues;
|
||||
import org.elasticsearch.index.fielddata.ordinals.EmptyOrdinals;
|
||||
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
||||
import org.elasticsearch.index.fielddata.ordinals.Ordinals.Docs;
|
||||
|
||||
/**
|
||||
*/
|
||||
|
@ -43,7 +39,6 @@ public class PagedBytesAtomicFieldData implements AtomicFieldData.WithOrdinals<S
|
|||
private final MonotonicAppendingLongBuffer termOrdToBytesOffset;
|
||||
protected final Ordinals ordinals;
|
||||
|
||||
private volatile IntArray hashes;
|
||||
private long size = -1;
|
||||
private final long readerBytesSize;
|
||||
|
||||
|
@ -81,33 +76,14 @@ public class PagedBytesAtomicFieldData implements AtomicFieldData.WithOrdinals<S
|
|||
return size;
|
||||
}
|
||||
|
||||
private final IntArray getHashes() {
|
||||
if (hashes == null) {
|
||||
long numberOfValues = termOrdToBytesOffset.size();
|
||||
IntArray hashes = BigArrays.NON_RECYCLING_INSTANCE.newIntArray(numberOfValues);
|
||||
BytesRef scratch = new BytesRef();
|
||||
for (long i = 0; i < numberOfValues; i++) {
|
||||
bytes.fill(scratch, termOrdToBytesOffset.get(i));
|
||||
hashes.set(i, scratch.hashCode());
|
||||
}
|
||||
this.hashes = hashes;
|
||||
}
|
||||
return hashes;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesValues.WithOrdinals getBytesValues(boolean needsHashes) {
|
||||
if (needsHashes) {
|
||||
final IntArray hashes = getHashes();
|
||||
return new BytesValues.HashedBytesValues(hashes, bytes, termOrdToBytesOffset, ordinals.ordinals());
|
||||
} else {
|
||||
return new BytesValues(bytes, termOrdToBytesOffset, ordinals.ordinals());
|
||||
}
|
||||
public BytesValues.WithOrdinals getBytesValues() {
|
||||
return new BytesValues(bytes, termOrdToBytesOffset, ordinals.ordinals());
|
||||
}
|
||||
|
||||
@Override
|
||||
public ScriptDocValues.Strings getScriptValues() {
|
||||
return new ScriptDocValues.Strings(getBytesValues(false));
|
||||
return new ScriptDocValues.Strings(getBytesValues());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -153,22 +129,6 @@ public class PagedBytesAtomicFieldData implements AtomicFieldData.WithOrdinals<S
|
|||
return scratch;
|
||||
}
|
||||
|
||||
static final class HashedBytesValues extends BytesValues {
|
||||
private final IntArray hashes;
|
||||
|
||||
|
||||
HashedBytesValues(IntArray hashes, Reader bytes, MonotonicAppendingLongBuffer termOrdToBytesOffset, Docs ordinals) {
|
||||
super(bytes, termOrdToBytesOffset, ordinals);
|
||||
this.hashes = hashes;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int currentValueHash() {
|
||||
assert ordinals.currentOrd() >= 0;
|
||||
return hashes.get(ordinals.currentOrd());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private final static class Empty extends PagedBytesAtomicFieldData {
|
||||
|
@ -194,7 +154,7 @@ public class PagedBytesAtomicFieldData implements AtomicFieldData.WithOrdinals<S
|
|||
}
|
||||
|
||||
@Override
|
||||
public BytesValues.WithOrdinals getBytesValues(boolean needsHashes) {
|
||||
public BytesValues.WithOrdinals getBytesValues() {
|
||||
return new EmptyByteValuesWithOrdinals(ordinals.ordinals());
|
||||
}
|
||||
|
||||
|
|
|
@ -64,11 +64,11 @@ public class ParentChildAtomicFieldData implements AtomicFieldData {
|
|||
}
|
||||
|
||||
@Override
|
||||
public BytesValues getBytesValues(boolean needsHashes) {
|
||||
public BytesValues getBytesValues() {
|
||||
final BytesValues[] bytesValues = new BytesValues[typeToIds.size()];
|
||||
int index = 0;
|
||||
for (ObjectCursor<PagedBytesAtomicFieldData> cursor : typeToIds.values()) {
|
||||
bytesValues[index++] = cursor.value.getBytesValues(needsHashes);
|
||||
bytesValues[index++] = cursor.value.getBytesValues();
|
||||
}
|
||||
return new BytesValues(true) {
|
||||
|
||||
|
@ -116,7 +116,7 @@ public class ParentChildAtomicFieldData implements AtomicFieldData {
|
|||
public BytesValues.WithOrdinals getBytesValues(String type) {
|
||||
WithOrdinals atomicFieldData = typeToIds.get(type);
|
||||
if (atomicFieldData != null) {
|
||||
return atomicFieldData.getBytesValues(true);
|
||||
return atomicFieldData.getBytesValues();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
|
@ -128,7 +128,7 @@ public class ParentChildAtomicFieldData implements AtomicFieldData {
|
|||
|
||||
@Override
|
||||
public ScriptDocValues getScriptValues() {
|
||||
return new ScriptDocValues.Strings(getBytesValues(false));
|
||||
return new ScriptDocValues.Strings(getBytesValues());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -19,12 +19,13 @@
|
|||
|
||||
package org.elasticsearch.index.fielddata.plain;
|
||||
|
||||
import org.apache.lucene.index.*;
|
||||
import org.apache.lucene.index.AtomicReader;
|
||||
import org.apache.lucene.index.DocValues;
|
||||
import org.apache.lucene.index.SortedSetDocValues;
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.util.ArrayUtil;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.ElasticsearchIllegalStateException;
|
||||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.common.util.IntArray;
|
||||
import org.elasticsearch.index.fielddata.AtomicFieldData;
|
||||
import org.elasticsearch.index.fielddata.BytesValues;
|
||||
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
||||
|
@ -40,7 +41,6 @@ abstract class SortedSetDVAtomicFieldData {
|
|||
|
||||
private final AtomicReader reader;
|
||||
private final String field;
|
||||
private volatile IntArray hashes;
|
||||
|
||||
SortedSetDVAtomicFieldData(AtomicReader reader, String field) {
|
||||
this.reader = reader;
|
||||
|
@ -68,30 +68,11 @@ abstract class SortedSetDVAtomicFieldData {
|
|||
// no-op
|
||||
}
|
||||
|
||||
public org.elasticsearch.index.fielddata.BytesValues.WithOrdinals getBytesValues(boolean needsHashes) {
|
||||
public org.elasticsearch.index.fielddata.BytesValues.WithOrdinals getBytesValues() {
|
||||
final SortedSetDocValues values = getValuesNoException(reader, field);
|
||||
return new SortedSetValues(reader, field, values);
|
||||
}
|
||||
|
||||
public org.elasticsearch.index.fielddata.BytesValues.WithOrdinals getHashedBytesValues() {
|
||||
final SortedSetDocValues values = getValuesNoException(reader, field);
|
||||
if (hashes == null) {
|
||||
synchronized (this) {
|
||||
if (hashes == null) {
|
||||
final long valueCount = values.getValueCount();
|
||||
final IntArray hashes = BigArrays.NON_RECYCLING_INSTANCE.newIntArray(valueCount);
|
||||
BytesRef scratch = new BytesRef(16);
|
||||
for (long i = 0; i < valueCount; ++i) {
|
||||
values.lookupOrd(i, scratch);
|
||||
hashes.set(i, scratch.hashCode());
|
||||
}
|
||||
this.hashes = hashes;
|
||||
}
|
||||
}
|
||||
}
|
||||
return new SortedSetHashedValues(reader, field, values, hashes);
|
||||
}
|
||||
|
||||
public TermsEnum getTermsEnum() {
|
||||
return getValuesNoException(reader, field).termsEnum();
|
||||
}
|
||||
|
@ -133,22 +114,6 @@ abstract class SortedSetDVAtomicFieldData {
|
|||
}
|
||||
}
|
||||
|
||||
static final class SortedSetHashedValues extends SortedSetValues {
|
||||
|
||||
private final IntArray hashes;
|
||||
|
||||
SortedSetHashedValues(AtomicReader reader, String field, SortedSetDocValues values, IntArray hashes) {
|
||||
super(reader, field, values);
|
||||
this.hashes = hashes;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int currentValueHash() {
|
||||
assert ordinals.currentOrd() >= 0;
|
||||
return hashes.get(ordinals.currentOrd());
|
||||
}
|
||||
}
|
||||
|
||||
static final class SortedSetOrdinals implements Ordinals {
|
||||
|
||||
// We don't store SortedSetDocValues as a member because Ordinals must be thread-safe
|
||||
|
|
|
@ -39,7 +39,7 @@ public final class SortedSetDVBytesAtomicFieldData extends SortedSetDVAtomicFiel
|
|||
|
||||
@Override
|
||||
public Strings getScriptValues() {
|
||||
return new ScriptDocValues.Strings(getBytesValues(false));
|
||||
return new ScriptDocValues.Strings(getBytesValues());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -26,7 +26,6 @@ import org.apache.lucene.util.BytesRef;
|
|||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.HashedBytesRef;
|
||||
import org.elasticsearch.common.lucene.search.XConstantScoreQuery;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
|
||||
|
@ -76,7 +75,7 @@ public class PercolatorQueriesRegistry extends AbstractIndexShardComponent {
|
|||
private final ShardIndexingService indexingService;
|
||||
private final ShardPercolateService shardPercolateService;
|
||||
|
||||
private final ConcurrentMap<HashedBytesRef, Query> percolateQueries = ConcurrentCollections.newConcurrentMapWithAggressiveConcurrency();
|
||||
private final ConcurrentMap<BytesRef, Query> percolateQueries = ConcurrentCollections.newConcurrentMapWithAggressiveConcurrency();
|
||||
private final ShardLifecycleListener shardLifecycleListener = new ShardLifecycleListener();
|
||||
private final RealTimePercolatorOperationListener realTimePercolatorOperationListener = new RealTimePercolatorOperationListener();
|
||||
private final PercolateTypeListener percolateTypeListener = new PercolateTypeListener();
|
||||
|
@ -99,7 +98,7 @@ public class PercolatorQueriesRegistry extends AbstractIndexShardComponent {
|
|||
mapperService.addTypeListener(percolateTypeListener);
|
||||
}
|
||||
|
||||
public ConcurrentMap<HashedBytesRef, Query> percolateQueries() {
|
||||
public ConcurrentMap<BytesRef, Query> percolateQueries() {
|
||||
return percolateQueries;
|
||||
}
|
||||
|
||||
|
@ -128,13 +127,13 @@ public class PercolatorQueriesRegistry extends AbstractIndexShardComponent {
|
|||
|
||||
public void addPercolateQuery(String idAsString, BytesReference source) {
|
||||
Query newquery = parsePercolatorDocument(idAsString, source);
|
||||
HashedBytesRef id = new HashedBytesRef(new BytesRef(idAsString));
|
||||
BytesRef id = new BytesRef(idAsString);
|
||||
Query previousQuery = percolateQueries.put(id, newquery);
|
||||
shardPercolateService.addedQuery(id, previousQuery, newquery);
|
||||
}
|
||||
|
||||
public void removePercolateQuery(String idAsString) {
|
||||
HashedBytesRef id = new HashedBytesRef(idAsString);
|
||||
BytesRef id = new BytesRef(idAsString);
|
||||
Query query = percolateQueries.remove(id);
|
||||
if (query != null) {
|
||||
shardPercolateService.removedQuery(id, query);
|
||||
|
@ -265,8 +264,8 @@ public class PercolatorQueriesRegistry extends AbstractIndexShardComponent {
|
|||
);
|
||||
QueriesLoaderCollector queryCollector = new QueriesLoaderCollector(PercolatorQueriesRegistry.this, logger, mapperService, indexFieldDataService);
|
||||
searcher.searcher().search(query, queryCollector);
|
||||
Map<HashedBytesRef, Query> queries = queryCollector.queries();
|
||||
for (Map.Entry<HashedBytesRef, Query> entry : queries.entrySet()) {
|
||||
Map<BytesRef, Query> queries = queryCollector.queries();
|
||||
for (Map.Entry<BytesRef, Query> entry : queries.entrySet()) {
|
||||
Query previousQuery = percolateQueries.put(entry.getKey(), entry.getValue());
|
||||
shardPercolateService.addedQuery(entry.getKey(), previousQuery, entry.getValue());
|
||||
}
|
||||
|
|
|
@ -26,7 +26,6 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.search.Scorer;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.lucene.HashedBytesRef;
|
||||
import org.elasticsearch.index.fielddata.BytesValues;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
|
@ -42,7 +41,7 @@ import java.util.Map;
|
|||
*/
|
||||
final class QueriesLoaderCollector extends Collector {
|
||||
|
||||
private final Map<HashedBytesRef, Query> queries = Maps.newHashMap();
|
||||
private final Map<BytesRef, Query> queries = Maps.newHashMap();
|
||||
private final JustSourceFieldsVisitor fieldsVisitor = new JustSourceFieldsVisitor();
|
||||
private final PercolatorQueriesRegistry percolator;
|
||||
private final IndexFieldData idFieldData;
|
||||
|
@ -58,7 +57,7 @@ final class QueriesLoaderCollector extends Collector {
|
|||
this.idFieldData = indexFieldDataService.getForField(idMapper);
|
||||
}
|
||||
|
||||
public Map<HashedBytesRef, Query> queries() {
|
||||
public Map<BytesRef, Query> queries() {
|
||||
return this.queries;
|
||||
}
|
||||
|
||||
|
@ -75,7 +74,7 @@ final class QueriesLoaderCollector extends Collector {
|
|||
// id is only used for logging, if we fail we log the id in the catch statement
|
||||
final Query parseQuery = percolator.parsePercolatorDocument(null, fieldsVisitor.source());
|
||||
if (parseQuery != null) {
|
||||
queries.put(new HashedBytesRef(idValues.copyShared(), idValues.currentValueHash()), parseQuery);
|
||||
queries.put(idValues.copyShared(), parseQuery);
|
||||
} else {
|
||||
logger.warn("failed to add query [{}] - parser returned null", id);
|
||||
}
|
||||
|
@ -89,7 +88,7 @@ final class QueriesLoaderCollector extends Collector {
|
|||
@Override
|
||||
public void setNextReader(AtomicReaderContext context) throws IOException {
|
||||
reader = context.reader();
|
||||
idValues = idFieldData.load(context).getBytesValues(true);
|
||||
idValues = idFieldData.load(context).getBytesValues();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -20,8 +20,8 @@
|
|||
package org.elasticsearch.index.percolator.stats;
|
||||
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.HashedBytesRef;
|
||||
import org.elasticsearch.common.metrics.CounterMetric;
|
||||
import org.elasticsearch.common.metrics.MeanMetric;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -60,11 +60,11 @@ public class ShardPercolateService extends AbstractIndexShardComponent {
|
|||
percolateMetric.inc(tookInNanos);
|
||||
}
|
||||
|
||||
public void addedQuery(HashedBytesRef id, Query previousQuery, Query newQuery) {
|
||||
public void addedQuery(BytesRef id, Query previousQuery, Query newQuery) {
|
||||
numberOfQueries.inc();
|
||||
}
|
||||
|
||||
public void removedQuery(HashedBytesRef id, Query query) {
|
||||
public void removedQuery(BytesRef id, Query query) {
|
||||
numberOfQueries.dec();
|
||||
}
|
||||
|
||||
|
|
|
@ -129,7 +129,7 @@ public abstract class FieldDataTermsFilter extends Filter {
|
|||
// make sure there are terms to filter on
|
||||
if (terms == null || terms.isEmpty()) return null;
|
||||
|
||||
final BytesValues values = fieldData.load(context).getBytesValues(false); // load fielddata
|
||||
final BytesValues values = fieldData.load(context).getBytesValues(); // load fielddata
|
||||
return new MatchDocIdSet(context.reader().maxDoc(), acceptDocs) {
|
||||
@Override
|
||||
protected boolean matchDoc(int doc) {
|
||||
|
|
|
@ -106,7 +106,7 @@ public class ChildrenConstantScoreQuery extends Query {
|
|||
return Queries.newMatchNoDocsQuery().createWeight(searcher);
|
||||
} else {
|
||||
AtomicFieldData.WithOrdinals afd = globalIfd.load(leaves.get(0));
|
||||
BytesValues.WithOrdinals globalValues = afd.getBytesValues(false);
|
||||
BytesValues.WithOrdinals globalValues = afd.getBytesValues();
|
||||
Ordinals.Docs globalOrdinals = globalValues.ordinals();
|
||||
maxOrd = globalOrdinals.getMaxOrd();
|
||||
}
|
||||
|
@ -240,7 +240,7 @@ public class ChildrenConstantScoreQuery extends Query {
|
|||
DocIdSetIterator innerIterator = parentDocIdSet.iterator();
|
||||
if (innerIterator != null) {
|
||||
LongBitSet parentOrds = collector.parentOrds;
|
||||
BytesValues.WithOrdinals globalValues = globalIfd.load(context).getBytesValues(false);
|
||||
BytesValues.WithOrdinals globalValues = globalIfd.load(context).getBytesValues();
|
||||
if (globalValues != null) {
|
||||
Ordinals.Docs globalOrdinals = globalValues.ordinals();
|
||||
DocIdSetIterator parentIdIterator = new ParentOrdIterator(innerIterator, parentOrds, globalOrdinals, this);
|
||||
|
@ -279,7 +279,7 @@ public class ChildrenConstantScoreQuery extends Query {
|
|||
|
||||
@Override
|
||||
public void setNextReader(AtomicReaderContext context) throws IOException {
|
||||
values = indexFieldData.load(context).getBytesValues(false);
|
||||
values = indexFieldData.load(context).getBytesValues();
|
||||
if (values != null) {
|
||||
globalOrdinals = values.ordinals();
|
||||
} else {
|
||||
|
|
|
@ -284,7 +284,7 @@ public class ChildrenQuery extends Query {
|
|||
DocIdSetIterator parents = BitsFilteredDocIdSet.wrap(parentsSet, context.reader().getLiveDocs()).iterator();
|
||||
|
||||
if (parents != null) {
|
||||
BytesValues.WithOrdinals bytesValues = collector.globalIfd.load(context).getBytesValues(false);
|
||||
BytesValues.WithOrdinals bytesValues = collector.globalIfd.load(context).getBytesValues();
|
||||
if (bytesValues == null) {
|
||||
return null;
|
||||
}
|
||||
|
@ -360,7 +360,7 @@ public class ChildrenQuery extends Query {
|
|||
|
||||
@Override
|
||||
public void setNextReader(AtomicReaderContext context) throws IOException {
|
||||
values = globalIfd.load(context).getBytesValues(false);
|
||||
values = globalIfd.load(context).getBytesValues();
|
||||
if (values != null) {
|
||||
globalOrdinals = values.ordinals();
|
||||
}
|
||||
|
|
|
@ -95,7 +95,7 @@ public class ParentConstantScoreQuery extends Query {
|
|||
return Queries.newMatchNoDocsQuery().createWeight(searcher);
|
||||
} else {
|
||||
AtomicFieldData.WithOrdinals afd = globalIfd.load(leaves.get(0));
|
||||
BytesValues.WithOrdinals globalValues = afd.getBytesValues(false);
|
||||
BytesValues.WithOrdinals globalValues = afd.getBytesValues();
|
||||
Ordinals.Docs globalOrdinals = globalValues.ordinals();
|
||||
maxOrd = globalOrdinals.getMaxOrd();
|
||||
}
|
||||
|
@ -196,7 +196,7 @@ public class ParentConstantScoreQuery extends Query {
|
|||
return null;
|
||||
}
|
||||
|
||||
BytesValues.WithOrdinals globalValues = globalIfd.load(context).getBytesValues(false);
|
||||
BytesValues.WithOrdinals globalValues = globalIfd.load(context).getBytesValues();
|
||||
if (globalValues != null) {
|
||||
DocIdSetIterator innerIterator = childrenDocIdSet.iterator();
|
||||
if (innerIterator != null) {
|
||||
|
@ -259,7 +259,7 @@ public class ParentConstantScoreQuery extends Query {
|
|||
|
||||
@Override
|
||||
public void setNextReader(AtomicReaderContext readerContext) throws IOException {
|
||||
BytesValues.WithOrdinals values = globalIfd.load(readerContext).getBytesValues(false);
|
||||
BytesValues.WithOrdinals values = globalIfd.load(readerContext).getBytesValues();
|
||||
if (values != null) {
|
||||
globalOrdinals = values.ordinals();
|
||||
}
|
||||
|
|
|
@ -203,7 +203,7 @@ public class ParentQuery extends Query {
|
|||
|
||||
@Override
|
||||
public void setNextReader(AtomicReaderContext context) throws IOException {
|
||||
values = globalIfd.load(context).getBytesValues(false);
|
||||
values = globalIfd.load(context).getBytesValues();
|
||||
if (values != null) {
|
||||
globalOrdinals = values.ordinals();
|
||||
}
|
||||
|
@ -263,7 +263,7 @@ public class ParentQuery extends Query {
|
|||
if (DocIdSets.isEmpty(childrenDocSet)) {
|
||||
return null;
|
||||
}
|
||||
BytesValues.WithOrdinals bytesValues = globalIfd.load(context).getBytesValues(false);
|
||||
BytesValues.WithOrdinals bytesValues = globalIfd.load(context).getBytesValues();
|
||||
if (bytesValues == null) {
|
||||
return null;
|
||||
}
|
||||
|
|
|
@ -23,13 +23,13 @@ import org.apache.lucene.index.AtomicReaderContext;
|
|||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.action.percolate.PercolateShardRequest;
|
||||
import org.elasticsearch.action.search.SearchType;
|
||||
import org.elasticsearch.cache.recycler.CacheRecycler;
|
||||
import org.elasticsearch.cache.recycler.PageCacheRecycler;
|
||||
import org.elasticsearch.common.lease.Releasable;
|
||||
import org.elasticsearch.common.lease.Releasables;
|
||||
import org.elasticsearch.common.lucene.HashedBytesRef;
|
||||
import org.elasticsearch.common.text.StringText;
|
||||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.index.analysis.AnalysisService;
|
||||
|
@ -92,7 +92,7 @@ public class PercolateContext extends SearchContext {
|
|||
private final PageCacheRecycler pageCacheRecycler;
|
||||
private final BigArrays bigArrays;
|
||||
private final ScriptService scriptService;
|
||||
private final ConcurrentMap<HashedBytesRef, Query> percolateQueries;
|
||||
private final ConcurrentMap<BytesRef, Query> percolateQueries;
|
||||
private final int numberOfShards;
|
||||
private String[] types;
|
||||
|
||||
|
@ -162,7 +162,7 @@ public class PercolateContext extends SearchContext {
|
|||
return indexService;
|
||||
}
|
||||
|
||||
public ConcurrentMap<HashedBytesRef, Query> percolateQueries() {
|
||||
public ConcurrentMap<BytesRef, Query> percolateQueries() {
|
||||
return percolateQueries;
|
||||
}
|
||||
|
||||
|
|
|
@ -43,7 +43,6 @@ import org.elasticsearch.common.bytes.BytesReference;
|
|||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.lucene.HashedBytesRef;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.lucene.search.XCollector;
|
||||
import org.elasticsearch.common.lucene.search.XConstantScoreQuery;
|
||||
|
@ -72,7 +71,10 @@ import org.elasticsearch.index.query.ParsedQuery;
|
|||
import org.elasticsearch.index.service.IndexService;
|
||||
import org.elasticsearch.index.shard.service.IndexShard;
|
||||
import org.elasticsearch.indices.IndicesService;
|
||||
import org.elasticsearch.percolator.QueryCollector.*;
|
||||
import org.elasticsearch.percolator.QueryCollector.Count;
|
||||
import org.elasticsearch.percolator.QueryCollector.Match;
|
||||
import org.elasticsearch.percolator.QueryCollector.MatchAndScore;
|
||||
import org.elasticsearch.percolator.QueryCollector.MatchAndSort;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.search.SearchParseElement;
|
||||
import org.elasticsearch.search.SearchShardTarget;
|
||||
|
@ -444,7 +446,7 @@ public class PercolatorService extends AbstractComponent {
|
|||
public PercolateShardResponse doPercolate(PercolateShardRequest request, PercolateContext context) {
|
||||
long count = 0;
|
||||
Lucene.ExistsCollector collector = new Lucene.ExistsCollector();
|
||||
for (Map.Entry<HashedBytesRef, Query> entry : context.percolateQueries().entrySet()) {
|
||||
for (Map.Entry<BytesRef, Query> entry : context.percolateQueries().entrySet()) {
|
||||
collector.reset();
|
||||
try {
|
||||
context.docSearcher().search(entry.getValue(), collector);
|
||||
|
@ -538,7 +540,7 @@ public class PercolatorService extends AbstractComponent {
|
|||
List<Map<String, HighlightField>> hls = new ArrayList<>();
|
||||
Lucene.ExistsCollector collector = new Lucene.ExistsCollector();
|
||||
|
||||
for (Map.Entry<HashedBytesRef, Query> entry : context.percolateQueries().entrySet()) {
|
||||
for (Map.Entry<BytesRef, Query> entry : context.percolateQueries().entrySet()) {
|
||||
collector.reset();
|
||||
if (context.highlight() != null) {
|
||||
context.parsedQuery(new ParsedQuery(entry.getValue(), ImmutableMap.<String, Filter>of()));
|
||||
|
@ -553,7 +555,7 @@ public class PercolatorService extends AbstractComponent {
|
|||
|
||||
if (collector.exists()) {
|
||||
if (!context.limit || count < context.size()) {
|
||||
matches.add(entry.getKey().bytes);
|
||||
matches.add(entry.getKey());
|
||||
if (context.highlight() != null) {
|
||||
highlightPhase.hitExecute(context, context.hitContext());
|
||||
hls.add(context.hitContext().hit().getHighlightFields());
|
||||
|
@ -745,19 +747,17 @@ public class PercolatorService extends AbstractComponent {
|
|||
final FieldMapper<?> idMapper = context.mapperService().smartNameFieldMapper(IdFieldMapper.NAME);
|
||||
final IndexFieldData<?> idFieldData = context.fieldData().getForField(idMapper);
|
||||
int i = 0;
|
||||
final HashedBytesRef spare = new HashedBytesRef(new BytesRef());
|
||||
for (ScoreDoc scoreDoc : topDocs.scoreDocs) {
|
||||
int segmentIdx = ReaderUtil.subIndex(scoreDoc.doc, percolatorSearcher.reader().leaves());
|
||||
AtomicReaderContext atomicReaderContext = percolatorSearcher.reader().leaves().get(segmentIdx);
|
||||
BytesValues values = idFieldData.load(atomicReaderContext).getBytesValues(true);
|
||||
BytesValues values = idFieldData.load(atomicReaderContext).getBytesValues();
|
||||
final int localDocId = scoreDoc.doc - atomicReaderContext.docBase;
|
||||
final int numValues = values.setDocument(localDocId);
|
||||
assert numValues == 1;
|
||||
spare.bytes = values.nextValue();
|
||||
spare.hash = values.currentValueHash();
|
||||
BytesRef bytes = values.nextValue();
|
||||
matches.add(values.copyShared());
|
||||
if (hls != null) {
|
||||
Query query = context.percolateQueries().get(spare);
|
||||
Query query = context.percolateQueries().get(bytes);
|
||||
context.parsedQuery(new ParsedQuery(query, ImmutableMap.<String, Filter>of()));
|
||||
context.hitContext().cache().clear();
|
||||
highlightPhase.hitExecute(context, context.hitContext());
|
||||
|
|
|
@ -25,7 +25,6 @@ import org.apache.lucene.index.AtomicReaderContext;
|
|||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.lucene.HashedBytesRef;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.lucene.search.FilteredCollector;
|
||||
import org.elasticsearch.index.fielddata.BytesValues;
|
||||
|
@ -54,11 +53,11 @@ abstract class QueryCollector extends Collector {
|
|||
|
||||
final IndexFieldData<?> idFieldData;
|
||||
final IndexSearcher searcher;
|
||||
final ConcurrentMap<HashedBytesRef, Query> queries;
|
||||
final ConcurrentMap<BytesRef, Query> queries;
|
||||
final ESLogger logger;
|
||||
|
||||
final Lucene.ExistsCollector collector = new Lucene.ExistsCollector();
|
||||
final HashedBytesRef spare = new HashedBytesRef(new BytesRef());
|
||||
BytesRef current;
|
||||
|
||||
BytesValues values;
|
||||
|
||||
|
@ -128,7 +127,7 @@ abstract class QueryCollector extends Collector {
|
|||
@Override
|
||||
public void setNextReader(AtomicReaderContext context) throws IOException {
|
||||
// we use the UID because id might not be indexed
|
||||
values = idFieldData.load(context).getBytesValues(true);
|
||||
values = idFieldData.load(context).getBytesValues();
|
||||
for (Collector collector : facetAndAggregatorCollector) {
|
||||
collector.setNextReader(context);
|
||||
}
|
||||
|
@ -163,8 +162,8 @@ abstract class QueryCollector extends Collector {
|
|||
return null;
|
||||
}
|
||||
assert numValues == 1;
|
||||
spare.reset(values.nextValue(), values.currentValueHash());
|
||||
return queries.get(spare);
|
||||
current = values.nextValue();
|
||||
return queries.get(current);
|
||||
}
|
||||
|
||||
|
||||
|
@ -216,7 +215,7 @@ abstract class QueryCollector extends Collector {
|
|||
postMatch(doc);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
logger.warn("[" + spare.bytes.utf8ToString() + "] failed to execute query", e);
|
||||
logger.warn("[" + current.utf8ToString() + "] failed to execute query", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -259,7 +258,7 @@ abstract class QueryCollector extends Collector {
|
|||
postMatch(doc);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
logger.warn("[" + spare.bytes.utf8ToString() + "] failed to execute query", e);
|
||||
logger.warn("[" + current.utf8ToString() + "] failed to execute query", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -331,7 +330,7 @@ abstract class QueryCollector extends Collector {
|
|||
postMatch(doc);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
logger.warn("[" + spare.bytes.utf8ToString() + "] failed to execute query", e);
|
||||
logger.warn("[" + current.utf8ToString() + "] failed to execute query", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -381,7 +380,7 @@ abstract class QueryCollector extends Collector {
|
|||
postMatch(doc);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
logger.warn("[" + spare.bytes.utf8ToString() + "] failed to execute query", e);
|
||||
logger.warn("[" + current.utf8ToString() + "] failed to execute query", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -80,9 +80,7 @@ public class StringTermsAggregator extends AbstractStringTermsAggregator {
|
|||
if (includeExclude != null && !includeExclude.accept(bytes)) {
|
||||
continue;
|
||||
}
|
||||
final int hash = values.currentValueHash();
|
||||
assert hash == bytes.hashCode();
|
||||
long bucketOrdinal = bucketOrds.add(bytes, hash);
|
||||
long bucketOrdinal = bucketOrds.add(bytes);
|
||||
if (bucketOrdinal < 0) { // already seen
|
||||
bucketOrdinal = - 1 - bucketOrdinal;
|
||||
collectExistingBucket(doc, bucketOrdinal);
|
||||
|
@ -151,7 +149,7 @@ public class StringTermsAggregator extends AbstractStringTermsAggregator {
|
|||
for (int i = 0; i < valueCount; ++i) {
|
||||
final BytesRef term = values.nextValue();
|
||||
if (includeExclude == null || includeExclude.accept(term)) {
|
||||
bucketOrds.add(term, values.currentValueHash());
|
||||
bucketOrds.add(term);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -176,9 +176,6 @@ public class AggregationContext implements ReaderContextAware, ScorerAware {
|
|||
readerAwares.add((ReaderContextAware) dataSource);
|
||||
}
|
||||
}
|
||||
if (config.needsHashes) {
|
||||
dataSource.setNeedsHashes(true);
|
||||
}
|
||||
return dataSource;
|
||||
}
|
||||
|
||||
|
@ -214,10 +211,6 @@ public class AggregationContext implements ReaderContextAware, ScorerAware {
|
|||
dataSource = new ValuesSource.Bytes.SortedAndUnique(dataSource);
|
||||
readerAwares.add((ReaderContextAware) dataSource);
|
||||
}
|
||||
|
||||
if (config.needsHashes) { // the data source needs hash if at least one consumer needs hashes
|
||||
dataSource.setNeedsHashes(true);
|
||||
}
|
||||
return dataSource;
|
||||
}
|
||||
|
||||
|
@ -244,9 +237,6 @@ public class AggregationContext implements ReaderContextAware, ScorerAware {
|
|||
readerAwares.add(dataSource);
|
||||
fieldDataSources.put(cacheKey, dataSource);
|
||||
}
|
||||
if (config.needsHashes) {
|
||||
dataSource.setNeedsHashes(true);
|
||||
}
|
||||
return dataSource;
|
||||
}
|
||||
|
||||
|
|
|
@ -23,13 +23,15 @@ import org.apache.lucene.index.IndexReader;
|
|||
import org.apache.lucene.index.IndexReaderContext;
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.util.*;
|
||||
import org.apache.lucene.util.ArrayUtil;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefArray;
|
||||
import org.apache.lucene.util.Counter;
|
||||
import org.elasticsearch.common.lucene.ReaderContextAware;
|
||||
import org.elasticsearch.common.lucene.TopReaderContextAware;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
import org.elasticsearch.index.fielddata.*;
|
||||
import org.elasticsearch.index.fielddata.AtomicFieldData.Order;
|
||||
import org.elasticsearch.index.fielddata.LongValues;
|
||||
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
||||
import org.elasticsearch.script.SearchScript;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Bytes.SortedAndUnique.SortedUniqueBytesValues;
|
||||
|
@ -142,11 +144,6 @@ public abstract class ValuesSource {
|
|||
*/
|
||||
public abstract BytesValues bytesValues();
|
||||
|
||||
/**
|
||||
* Ask the underlying data source to provide pre-computed hashes, optional operation.
|
||||
*/
|
||||
public void setNeedsHashes(boolean needsHashes) {}
|
||||
|
||||
public void setNeedsGlobalOrdinals(boolean needsGlobalOrdinals) {}
|
||||
|
||||
public abstract MetaData metaData();
|
||||
|
@ -167,7 +164,6 @@ public abstract class ValuesSource {
|
|||
|
||||
public static class FieldData extends WithOrdinals implements ReaderContextAware {
|
||||
|
||||
protected boolean needsHashes;
|
||||
protected final IndexFieldData.WithOrdinals<?> indexFieldData;
|
||||
protected final MetaData metaData;
|
||||
private boolean needsGlobalOrdinals;
|
||||
|
@ -184,7 +180,6 @@ public abstract class ValuesSource {
|
|||
public FieldData(IndexFieldData.WithOrdinals<?> indexFieldData, MetaData metaData) {
|
||||
this.indexFieldData = indexFieldData;
|
||||
this.metaData = metaData;
|
||||
needsHashes = false;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -192,10 +187,6 @@ public abstract class ValuesSource {
|
|||
return metaData;
|
||||
}
|
||||
|
||||
public final void setNeedsHashes(boolean needsHashes) {
|
||||
this.needsHashes = needsHashes;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setNeedsGlobalOrdinals(boolean needsGlobalOrdinals) {
|
||||
this.needsGlobalOrdinals = needsGlobalOrdinals;
|
||||
|
@ -205,12 +196,12 @@ public abstract class ValuesSource {
|
|||
public void setNextReader(AtomicReaderContext reader) {
|
||||
atomicFieldData = indexFieldData.load(reader);
|
||||
if (bytesValues != null) {
|
||||
bytesValues = atomicFieldData.getBytesValues(needsHashes);
|
||||
bytesValues = atomicFieldData.getBytesValues();
|
||||
}
|
||||
if (globalFieldData != null) {
|
||||
globalAtomicFieldData = globalFieldData.load(reader);
|
||||
if (globalBytesValues != null) {
|
||||
globalBytesValues = globalAtomicFieldData.getBytesValues(needsHashes);
|
||||
globalBytesValues = globalAtomicFieldData.getBytesValues();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -218,7 +209,7 @@ public abstract class ValuesSource {
|
|||
@Override
|
||||
public BytesValues.WithOrdinals bytesValues() {
|
||||
if (bytesValues == null) {
|
||||
bytesValues = atomicFieldData.getBytesValues(needsHashes);
|
||||
bytesValues = atomicFieldData.getBytesValues();
|
||||
}
|
||||
return bytesValues;
|
||||
}
|
||||
|
@ -233,7 +224,7 @@ public abstract class ValuesSource {
|
|||
@Override
|
||||
public BytesValues.WithOrdinals globalBytesValues() {
|
||||
if (globalBytesValues == null) {
|
||||
globalBytesValues = globalAtomicFieldData.getBytesValues(needsHashes);
|
||||
globalBytesValues = globalAtomicFieldData.getBytesValues();
|
||||
}
|
||||
return globalBytesValues;
|
||||
}
|
||||
|
@ -251,7 +242,7 @@ public abstract class ValuesSource {
|
|||
AtomicReaderContext atomicReaderContext = indexReader.leaves().get(0);
|
||||
IndexFieldData.WithOrdinals<?> globalFieldData = indexFieldData.loadGlobal(indexReader);
|
||||
AtomicFieldData.WithOrdinals afd = globalFieldData.load(atomicReaderContext);
|
||||
BytesValues.WithOrdinals values = afd.getBytesValues(false);
|
||||
BytesValues.WithOrdinals values = afd.getBytesValues();
|
||||
Ordinals.Docs ordinals = values.ordinals();
|
||||
return maxOrd = ordinals.getMaxOrd();
|
||||
}
|
||||
|
@ -292,14 +283,14 @@ public abstract class ValuesSource {
|
|||
public void setNextReader(AtomicReaderContext reader) {
|
||||
atomicFieldData = indexFieldData.load(reader);
|
||||
if (bytesValues != null) {
|
||||
bytesValues = atomicFieldData.getBytesValues(needsHashes);
|
||||
bytesValues = atomicFieldData.getBytesValues();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public org.elasticsearch.index.fielddata.BytesValues bytesValues() {
|
||||
if (bytesValues == null) {
|
||||
bytesValues = atomicFieldData.getBytesValues(needsHashes);
|
||||
bytesValues = atomicFieldData.getBytesValues();
|
||||
}
|
||||
return bytesValues;
|
||||
}
|
||||
|
@ -526,16 +517,11 @@ public abstract class ValuesSource {
|
|||
return indexFieldData.getNumericType().isFloatingPoint();
|
||||
}
|
||||
|
||||
@Override
|
||||
public final void setNeedsHashes(boolean needsHashes) {
|
||||
this.needsHashes = needsHashes;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setNextReader(AtomicReaderContext reader) {
|
||||
atomicFieldData = indexFieldData.load(reader);
|
||||
if (bytesValues != null) {
|
||||
bytesValues = atomicFieldData.getBytesValues(needsHashes);
|
||||
bytesValues = atomicFieldData.getBytesValues();
|
||||
}
|
||||
if (longValues != null) {
|
||||
longValues = atomicFieldData.getLongValues();
|
||||
|
@ -548,7 +534,7 @@ public abstract class ValuesSource {
|
|||
@Override
|
||||
public org.elasticsearch.index.fielddata.BytesValues bytesValues() {
|
||||
if (bytesValues == null) {
|
||||
bytesValues = atomicFieldData.getBytesValues(needsHashes);
|
||||
bytesValues = atomicFieldData.getBytesValues();
|
||||
}
|
||||
return bytesValues;
|
||||
}
|
||||
|
@ -798,7 +784,6 @@ public abstract class ValuesSource {
|
|||
|
||||
public static class GeoPoint extends ValuesSource implements ReaderContextAware {
|
||||
|
||||
protected boolean needsHashes;
|
||||
protected final IndexGeoPointFieldData<?> indexFieldData;
|
||||
private final MetaData metaData;
|
||||
protected AtomicGeoPointFieldData<?> atomicFieldData;
|
||||
|
@ -808,7 +793,6 @@ public abstract class ValuesSource {
|
|||
public GeoPoint(IndexGeoPointFieldData<?> indexFieldData, MetaData metaData) {
|
||||
this.indexFieldData = indexFieldData;
|
||||
this.metaData = metaData;
|
||||
needsHashes = false;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -816,16 +800,11 @@ public abstract class ValuesSource {
|
|||
return metaData;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final void setNeedsHashes(boolean needsHashes) {
|
||||
this.needsHashes = needsHashes;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setNextReader(AtomicReaderContext reader) {
|
||||
atomicFieldData = indexFieldData.load(reader);
|
||||
if (bytesValues != null) {
|
||||
bytesValues = atomicFieldData.getBytesValues(needsHashes);
|
||||
bytesValues = atomicFieldData.getBytesValues();
|
||||
}
|
||||
if (geoPointValues != null) {
|
||||
geoPointValues = atomicFieldData.getGeoPointValues();
|
||||
|
@ -835,7 +814,7 @@ public abstract class ValuesSource {
|
|||
@Override
|
||||
public org.elasticsearch.index.fielddata.BytesValues bytesValues() {
|
||||
if (bytesValues == null) {
|
||||
bytesValues = atomicFieldData.getBytesValues(needsHashes);
|
||||
bytesValues = atomicFieldData.getBytesValues();
|
||||
}
|
||||
return bytesValues;
|
||||
}
|
||||
|
|
|
@ -33,7 +33,6 @@ public class ValuesSourceConfig<VS extends ValuesSource> {
|
|||
SearchScript script;
|
||||
ValueType scriptValueType;
|
||||
boolean unmapped = false;
|
||||
boolean needsHashes = false;
|
||||
boolean ensureUnique = false;
|
||||
boolean ensureSorted = false;
|
||||
String formatPattern;
|
||||
|
@ -78,11 +77,6 @@ public class ValuesSourceConfig<VS extends ValuesSource> {
|
|||
return this;
|
||||
}
|
||||
|
||||
public ValuesSourceConfig<VS> needsHashes(boolean needsHashes) {
|
||||
this.needsHashes = needsHashes;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ValueFormat format() {
|
||||
return format;
|
||||
}
|
||||
|
|
|
@ -107,7 +107,7 @@ public class FieldsTermsStringFacetExecutor extends FacetExecutor {
|
|||
@Override
|
||||
public void setNextReader(AtomicReaderContext context) throws IOException {
|
||||
for (int i = 0; i < indexFieldDatas.length; i++) {
|
||||
values[i] = indexFieldDatas[i].load(context).getBytesValues(true);
|
||||
values[i] = indexFieldDatas[i].load(context).getBytesValues();
|
||||
}
|
||||
if (script != null) {
|
||||
script.setNextReader(context);
|
||||
|
|
|
@ -49,7 +49,7 @@ public class HashedAggregator {
|
|||
total += length;
|
||||
for (int i = 0; i < length; i++) {
|
||||
final BytesRef value = values.nextValue();
|
||||
onValue(docId, value, values.currentValueHash(), values);
|
||||
onValue(docId, value, value.hashCode(), values);
|
||||
pendingMissing = 0;
|
||||
}
|
||||
missing += pendingMissing;
|
||||
|
|
|
@ -107,7 +107,7 @@ public class TermsStringFacetExecutor extends FacetExecutor {
|
|||
|
||||
@Override
|
||||
public void setNextReader(AtomicReaderContext context) throws IOException {
|
||||
values = indexFieldData.load(context).getBytesValues(true);
|
||||
values = indexFieldData.load(context).getBytesValues();
|
||||
if (script != null) {
|
||||
script.setNextReader(context);
|
||||
}
|
||||
|
@ -130,7 +130,7 @@ public class TermsStringFacetExecutor extends FacetExecutor {
|
|||
for (AtomicReaderContext readerContext : context.searcher().getTopReaderContext().leaves()) {
|
||||
int maxDoc = readerContext.reader().maxDoc();
|
||||
if (indexFieldData instanceof IndexFieldData.WithOrdinals) {
|
||||
BytesValues.WithOrdinals values = ((IndexFieldData.WithOrdinals) indexFieldData).load(readerContext).getBytesValues(false);
|
||||
BytesValues.WithOrdinals values = ((IndexFieldData.WithOrdinals) indexFieldData).load(readerContext).getBytesValues();
|
||||
Ordinals.Docs ordinals = values.ordinals();
|
||||
// 0 = docs with no value for field, so start from 1 instead
|
||||
for (long ord = Ordinals.MIN_ORDINAL; ord < ordinals.getMaxOrd(); ord++) {
|
||||
|
@ -138,12 +138,12 @@ public class TermsStringFacetExecutor extends FacetExecutor {
|
|||
aggregator.addValue(value, value.hashCode(), values);
|
||||
}
|
||||
} else {
|
||||
BytesValues values = indexFieldData.load(readerContext).getBytesValues(true);
|
||||
BytesValues values = indexFieldData.load(readerContext).getBytesValues();
|
||||
for (int docId = 0; docId < maxDoc; docId++) {
|
||||
final int size = values.setDocument(docId);
|
||||
for (int i = 0; i < size; i++) {
|
||||
final BytesRef value = values.nextValue();
|
||||
aggregator.addValue(value, values.currentValueHash(), values);
|
||||
aggregator.addValue(value, value.hashCode(), values);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -213,7 +213,7 @@ public class TermsStringOrdinalsFacetExecutor extends FacetExecutor {
|
|||
Releasables.close(current);
|
||||
}
|
||||
}
|
||||
values = indexFieldData.load(context).getBytesValues(false);
|
||||
values = indexFieldData.load(context).getBytesValues();
|
||||
current = new ReaderAggregator(values, ordinalsCacheAbove, cacheRecycler);
|
||||
ordinals = values.ordinals();
|
||||
}
|
||||
|
|
|
@ -131,7 +131,7 @@ public class TermsStatsStringFacetExecutor extends FacetExecutor {
|
|||
|
||||
@Override
|
||||
public void setNextReader(AtomicReaderContext context) throws IOException {
|
||||
keyValues = keyIndexFieldData.load(context).getBytesValues(true);
|
||||
keyValues = keyIndexFieldData.load(context).getBytesValues();
|
||||
if (script != null) {
|
||||
script.setNextReader(context);
|
||||
} else {
|
||||
|
|
|
@ -66,7 +66,7 @@ public abstract class AbstractFieldDataImplTests extends AbstractFieldDataTests
|
|||
IndexFieldData indexFieldData = getForField("value");
|
||||
AtomicReaderContext readerContext = refreshReader();
|
||||
AtomicFieldData fieldData = indexFieldData.load(readerContext);
|
||||
BytesValues values = fieldData.getBytesValues(randomBoolean());
|
||||
BytesValues values = fieldData.getBytesValues();
|
||||
for (int i = 0; i < readerContext.reader().maxDoc(); ++i) {
|
||||
assertThat(values.setDocument(i), greaterThanOrEqualTo(1));
|
||||
}
|
||||
|
@ -80,7 +80,7 @@ public abstract class AbstractFieldDataImplTests extends AbstractFieldDataTests
|
|||
AtomicFieldData fieldData = indexFieldData.load(readerContext);
|
||||
assertThat(fieldData.getMemorySizeInBytes(), greaterThan(0l));
|
||||
|
||||
BytesValues bytesValues = fieldData.getBytesValues(randomBoolean());
|
||||
BytesValues bytesValues = fieldData.getBytesValues();
|
||||
|
||||
assertThat(bytesValues.isMultiValued(), equalTo(false));
|
||||
|
||||
|
@ -95,7 +95,7 @@ public abstract class AbstractFieldDataImplTests extends AbstractFieldDataTests
|
|||
assertValues(bytesValues, 1, one());
|
||||
assertValues(bytesValues, 2, three());
|
||||
|
||||
BytesValues hashedBytesValues = fieldData.getBytesValues(randomBoolean());
|
||||
BytesValues hashedBytesValues = fieldData.getBytesValues();
|
||||
|
||||
assertThat(convert(hashedBytesValues, 0), equalTo(new HashedBytesRef(two())));
|
||||
assertThat(convert(hashedBytesValues, 1), equalTo(new HashedBytesRef(one())));
|
||||
|
@ -127,7 +127,7 @@ public abstract class AbstractFieldDataImplTests extends AbstractFieldDataTests
|
|||
|
||||
private HashedBytesRef convert(BytesValues values, int doc) {
|
||||
if (values.setDocument(doc) > 0) {
|
||||
return new HashedBytesRef(BytesRef.deepCopyOf(values.nextValue()), values.currentValueHash());
|
||||
return new HashedBytesRef(BytesRef.deepCopyOf(values.nextValue()));
|
||||
} else {
|
||||
return new HashedBytesRef(new BytesRef());
|
||||
}
|
||||
|
@ -151,11 +151,8 @@ public abstract class AbstractFieldDataImplTests extends AbstractFieldDataTests
|
|||
|
||||
public void assertHashedValues(BytesValues values, int docId, BytesRef... actualValues) {
|
||||
assertThat(values.setDocument(docId), equalTo(actualValues.length));
|
||||
BytesRef r = new BytesRef();
|
||||
for (int i = 0; i < actualValues.length; i++) {
|
||||
assertThat(values.nextValue(), equalTo(new HashedBytesRef(actualValues[i]).bytes));
|
||||
assertThat(values.currentValueHash(), equalTo(new HashedBytesRef(actualValues[i]).hash));
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -163,7 +160,6 @@ public abstract class AbstractFieldDataImplTests extends AbstractFieldDataTests
|
|||
assertThat(values.setDocument(docId), equalTo(actualValues.length));
|
||||
for (int i = 0; i < actualValues.length; i++) {
|
||||
assertThat(values.nextValue(), equalTo(new HashedBytesRef(actualValues[i]).bytes));
|
||||
assertThat(values.currentValueHash(), equalTo(new HashedBytesRef(actualValues[i]).hash));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -176,7 +172,7 @@ public abstract class AbstractFieldDataImplTests extends AbstractFieldDataTests
|
|||
assertThat(fieldData.getMemorySizeInBytes(), greaterThan(0l));
|
||||
|
||||
BytesValues bytesValues = fieldData
|
||||
.getBytesValues(randomBoolean());
|
||||
.getBytesValues();
|
||||
|
||||
assertThat(bytesValues.isMultiValued(), equalTo(false));
|
||||
|
||||
|
@ -184,7 +180,7 @@ public abstract class AbstractFieldDataImplTests extends AbstractFieldDataTests
|
|||
assertValues(bytesValues, 1, Strings.EMPTY_ARRAY);
|
||||
assertValues(bytesValues, 2, three());
|
||||
|
||||
BytesValues hashedBytesValues = fieldData.getBytesValues(randomBoolean());
|
||||
BytesValues hashedBytesValues = fieldData.getBytesValues();
|
||||
assertThat(convert(hashedBytesValues, 0), equalTo(new HashedBytesRef(two())));
|
||||
assertThat(convert(hashedBytesValues, 1), equalTo(new HashedBytesRef(new BytesRef())));
|
||||
assertThat(convert(hashedBytesValues, 2), equalTo(new HashedBytesRef(three())));
|
||||
|
@ -205,7 +201,7 @@ public abstract class AbstractFieldDataImplTests extends AbstractFieldDataTests
|
|||
AtomicFieldData fieldData = indexFieldData.load(refreshReader());
|
||||
assertThat(fieldData.getMemorySizeInBytes(), greaterThan(0l));
|
||||
|
||||
BytesValues bytesValues = fieldData.getBytesValues(randomBoolean());
|
||||
BytesValues bytesValues = fieldData.getBytesValues();
|
||||
|
||||
assertThat(bytesValues.isMultiValued(), equalTo(true));
|
||||
|
||||
|
@ -214,7 +210,7 @@ public abstract class AbstractFieldDataImplTests extends AbstractFieldDataTests
|
|||
assertValues(bytesValues, 2, three());
|
||||
|
||||
|
||||
BytesValues hashedBytesValues = fieldData.getBytesValues(randomBoolean());
|
||||
BytesValues hashedBytesValues = fieldData.getBytesValues();
|
||||
|
||||
assertThat(convert(hashedBytesValues, 0), equalTo(new HashedBytesRef(two())));
|
||||
assertThat(convert(hashedBytesValues, 1), equalTo(new HashedBytesRef(one())));
|
||||
|
@ -247,14 +243,14 @@ public abstract class AbstractFieldDataImplTests extends AbstractFieldDataTests
|
|||
AtomicFieldData fieldData = indexFieldData.load(refreshReader());
|
||||
assertThat(fieldData.getMemorySizeInBytes(), greaterThan(0l));
|
||||
|
||||
BytesValues bytesValues = fieldData.getBytesValues(randomBoolean());
|
||||
BytesValues bytesValues = fieldData.getBytesValues();
|
||||
|
||||
assertThat(bytesValues.isMultiValued(), equalTo(true));
|
||||
|
||||
assertValues(bytesValues, 0, two(), four());
|
||||
assertValues(bytesValues, 1, Strings.EMPTY_ARRAY);
|
||||
|
||||
BytesValues hashedBytesValues = fieldData.getBytesValues(randomBoolean());
|
||||
BytesValues hashedBytesValues = fieldData.getBytesValues();
|
||||
|
||||
|
||||
assertThat(convert(hashedBytesValues, 0), equalTo(new HashedBytesRef(two())));
|
||||
|
@ -278,14 +274,14 @@ public abstract class AbstractFieldDataImplTests extends AbstractFieldDataTests
|
|||
// Some impls (FST) return size 0 and some (PagedBytes) do take size in the case no actual data is loaded
|
||||
assertThat(fieldData.getMemorySizeInBytes(), greaterThanOrEqualTo(0l));
|
||||
|
||||
BytesValues bytesValues = fieldData.getBytesValues(randomBoolean());
|
||||
BytesValues bytesValues = fieldData.getBytesValues();
|
||||
|
||||
assertThat(bytesValues.isMultiValued(), equalTo(false));
|
||||
|
||||
assertValues(bytesValues, 0, Strings.EMPTY_ARRAY);
|
||||
assertValues(bytesValues, 1, Strings.EMPTY_ARRAY);
|
||||
assertValues(bytesValues, 2, Strings.EMPTY_ARRAY);
|
||||
BytesValues hashedBytesValues = fieldData.getBytesValues(randomBoolean());
|
||||
BytesValues hashedBytesValues = fieldData.getBytesValues();
|
||||
|
||||
assertValues(hashedBytesValues, 0, Strings.EMPTY_ARRAY);
|
||||
assertValues(hashedBytesValues, 1, Strings.EMPTY_ARRAY);
|
||||
|
|
|
@ -42,10 +42,10 @@ import org.elasticsearch.common.lucene.search.XFilteredQuery;
|
|||
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource;
|
||||
import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource;
|
||||
import org.elasticsearch.search.MultiValueMode;
|
||||
import org.elasticsearch.index.fielddata.ordinals.GlobalOrdinalsIndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
||||
import org.elasticsearch.index.search.nested.NestedFieldComparatorSource;
|
||||
import org.elasticsearch.search.MultiValueMode;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -435,8 +435,8 @@ public abstract class AbstractStringFieldDataTests extends AbstractFieldDataImpl
|
|||
// First segment
|
||||
assertThat(globalOrdinals, instanceOf(GlobalOrdinalsIndexFieldData.class));
|
||||
AtomicFieldData.WithOrdinals afd = globalOrdinals.load(topLevelReader.leaves().get(0));
|
||||
BytesValues.WithOrdinals values = afd.getBytesValues(randomBoolean());
|
||||
Ordinals.Docs ordinals = afd.getBytesValues(randomBoolean()).ordinals();
|
||||
BytesValues.WithOrdinals values = afd.getBytesValues();
|
||||
Ordinals.Docs ordinals = afd.getBytesValues().ordinals();
|
||||
assertThat(ordinals.setDocument(0), equalTo(2));
|
||||
long ord = ordinals.nextOrd();
|
||||
assertThat(ord, equalTo(3l));
|
||||
|
@ -452,8 +452,8 @@ public abstract class AbstractStringFieldDataTests extends AbstractFieldDataImpl
|
|||
|
||||
// Second segment
|
||||
afd = globalOrdinals.load(topLevelReader.leaves().get(1));
|
||||
values = afd.getBytesValues(randomBoolean());
|
||||
ordinals = afd.getBytesValues(randomBoolean()).ordinals();
|
||||
values = afd.getBytesValues();
|
||||
ordinals = afd.getBytesValues().ordinals();
|
||||
assertThat(ordinals.setDocument(0), equalTo(3));
|
||||
ord = ordinals.nextOrd();
|
||||
assertThat(ord, equalTo(5l));
|
||||
|
@ -488,8 +488,8 @@ public abstract class AbstractStringFieldDataTests extends AbstractFieldDataImpl
|
|||
|
||||
// Third segment
|
||||
afd = globalOrdinals.load(topLevelReader.leaves().get(2));
|
||||
values = afd.getBytesValues(randomBoolean());
|
||||
ordinals = afd.getBytesValues(randomBoolean()).ordinals();
|
||||
values = afd.getBytesValues();
|
||||
ordinals = afd.getBytesValues().ordinals();
|
||||
assertThat(ordinals.setDocument(0), equalTo(3));
|
||||
ord = ordinals.nextOrd();
|
||||
assertThat(ord, equalTo(0l));
|
||||
|
|
|
@ -80,7 +80,7 @@ public class BinaryDVFieldDataTests extends AbstractFieldDataTests {
|
|||
IndexFieldData indexFieldData = getForField("field");
|
||||
AtomicFieldData fieldData = indexFieldData.load(reader);
|
||||
|
||||
BytesValues bytesValues = fieldData.getBytesValues(randomBoolean());
|
||||
BytesValues bytesValues = fieldData.getBytesValues();
|
||||
|
||||
CollectionUtils.sortAndDedup(bytesList1);
|
||||
assertThat(bytesValues.setDocument(0), equalTo(2));
|
||||
|
|
|
@ -401,9 +401,9 @@ public class DuelFieldDataTests extends AbstractFieldDataTests {
|
|||
for (Map.Entry<FieldDataType, Type> entry : typeMap.entrySet()) {
|
||||
ifdService.clear();
|
||||
IndexFieldData.WithOrdinals<?> fieldData = getForField(entry.getKey(), entry.getValue().name().toLowerCase(Locale.ROOT));
|
||||
BytesValues.WithOrdinals left = fieldData.load(readerContext).getBytesValues(randomBoolean());
|
||||
BytesValues.WithOrdinals left = fieldData.load(readerContext).getBytesValues();
|
||||
fieldData.clear();
|
||||
BytesValues.WithOrdinals right = fieldData.loadGlobal(topLevelReader).load(topLevelReader.leaves().get(0)).getBytesValues(randomBoolean());
|
||||
BytesValues.WithOrdinals right = fieldData.loadGlobal(topLevelReader).load(topLevelReader.leaves().get(0)).getBytesValues();
|
||||
Docs leftOrds = left.ordinals();
|
||||
Docs rightOrds = right.ordinals();
|
||||
assertEquals(leftOrds.getMaxOrd(), rightOrds.getMaxOrd());
|
||||
|
@ -483,7 +483,7 @@ public class DuelFieldDataTests extends AbstractFieldDataTests {
|
|||
|
||||
private void assertOrder(AtomicFieldData.Order order, IndexFieldData<?> data, AtomicReaderContext context) throws Exception {
|
||||
AtomicFieldData<?> leftData = randomBoolean() ? data.load(context) : data.loadDirect(context);
|
||||
assertThat(leftData.getBytesValues(randomBoolean()).getOrder(), is(order));
|
||||
assertThat(leftData.getBytesValues().getOrder(), is(order));
|
||||
}
|
||||
|
||||
private int[] getNumbers(Random random, int margin) {
|
||||
|
@ -504,8 +504,8 @@ public class DuelFieldDataTests extends AbstractFieldDataTests {
|
|||
AtomicFieldData<?> rightData = random.nextBoolean() ? right.load(context) : right.loadDirect(context);
|
||||
|
||||
int numDocs = context.reader().maxDoc();
|
||||
BytesValues leftBytesValues = leftData.getBytesValues(random.nextBoolean());
|
||||
BytesValues rightBytesValues = rightData.getBytesValues(random.nextBoolean());
|
||||
BytesValues leftBytesValues = leftData.getBytesValues();
|
||||
BytesValues rightBytesValues = rightData.getBytesValues();
|
||||
BytesRef leftSpare = new BytesRef();
|
||||
BytesRef rightSpare = new BytesRef();
|
||||
|
||||
|
@ -517,8 +517,6 @@ public class DuelFieldDataTests extends AbstractFieldDataTests {
|
|||
|
||||
rightSpare.copyBytes(rightBytesValues.nextValue());
|
||||
leftSpare.copyBytes(leftBytesValues.nextValue());
|
||||
assertThat(rightSpare.hashCode(), equalTo(rightBytesValues.currentValueHash()));
|
||||
assertThat(leftSpare.hashCode(), equalTo(leftBytesValues.currentValueHash()));
|
||||
if (previous != null && leftBytesValues.getOrder() == rightBytesValues.getOrder()) { // we can only compare the
|
||||
assertThat(pre.compare(previous, rightSpare), lessThan(0));
|
||||
}
|
||||
|
@ -526,9 +524,6 @@ public class DuelFieldDataTests extends AbstractFieldDataTests {
|
|||
pre.toString(rightSpare);
|
||||
pre.toString(leftSpare);
|
||||
assertThat(pre.toString(leftSpare), equalTo(pre.toString(rightSpare)));
|
||||
if (leftSpare.equals(rightSpare)) {
|
||||
assertThat(leftBytesValues.currentValueHash(), equalTo(rightBytesValues.currentValueHash()));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -71,7 +71,7 @@ public class FilterFieldDataTest extends AbstractFieldDataTests {
|
|||
.put("filter.frequency.min_segment_size", 100).put("filter.frequency.min", 0.0d).put("filter.frequency.max", random.nextBoolean() ? 100 : 0.5d));
|
||||
IndexFieldData<?> fieldData = getForField(fieldDataType, "high_freq");
|
||||
AtomicFieldData.WithOrdinals<ScriptDocValues.Strings> loadDirect = (WithOrdinals<Strings>) fieldData.loadDirect(context);
|
||||
BytesValues.WithOrdinals bytesValues = loadDirect.getBytesValues(randomBoolean());
|
||||
BytesValues.WithOrdinals bytesValues = loadDirect.getBytesValues();
|
||||
Docs ordinals = bytesValues.ordinals();
|
||||
assertThat(2L, equalTo(ordinals.getMaxOrd()));
|
||||
assertThat(bytesValues.getValueByOrd(0).utf8ToString(), equalTo("10"));
|
||||
|
@ -83,7 +83,7 @@ public class FilterFieldDataTest extends AbstractFieldDataTests {
|
|||
.put("filter.frequency.min_segment_size", 100).put("filter.frequency.min", random.nextBoolean() ? 101 : 101d/200.0d).put("filter.frequency.max", 201));
|
||||
IndexFieldData<?> fieldData = getForField(fieldDataType, "high_freq");
|
||||
AtomicFieldData.WithOrdinals<ScriptDocValues.Strings> loadDirect = (WithOrdinals<Strings>) fieldData.loadDirect(context);
|
||||
BytesValues.WithOrdinals bytesValues = loadDirect.getBytesValues(randomBoolean());
|
||||
BytesValues.WithOrdinals bytesValues = loadDirect.getBytesValues();
|
||||
Docs ordinals = bytesValues.ordinals();
|
||||
assertThat(1L, equalTo(ordinals.getMaxOrd()));
|
||||
assertThat(bytesValues.getValueByOrd(0).utf8ToString(), equalTo("5"));
|
||||
|
@ -95,7 +95,7 @@ public class FilterFieldDataTest extends AbstractFieldDataTests {
|
|||
.put("filter.frequency.min_segment_size", 101).put("filter.frequency.min", random.nextBoolean() ? 101 : 101d/200.0d));
|
||||
IndexFieldData<?> fieldData = getForField(fieldDataType, "med_freq");
|
||||
AtomicFieldData.WithOrdinals<ScriptDocValues.Strings> loadDirect = (WithOrdinals<Strings>) fieldData.loadDirect(context);
|
||||
BytesValues.WithOrdinals bytesValues = loadDirect.getBytesValues(randomBoolean());
|
||||
BytesValues.WithOrdinals bytesValues = loadDirect.getBytesValues();
|
||||
Docs ordinals = bytesValues.ordinals();
|
||||
assertThat(2L, equalTo(ordinals.getMaxOrd()));
|
||||
assertThat(bytesValues.getValueByOrd(0).utf8ToString(), equalTo("10"));
|
||||
|
@ -108,7 +108,7 @@ public class FilterFieldDataTest extends AbstractFieldDataTests {
|
|||
.put("filter.frequency.min_segment_size", 101).put("filter.frequency.min", random.nextBoolean() ? 101 : 101d/200.0d));
|
||||
IndexFieldData<?> fieldData = getForField(fieldDataType, "med_freq");
|
||||
AtomicFieldData.WithOrdinals<ScriptDocValues.Strings> loadDirect = (WithOrdinals<Strings>) fieldData.loadDirect(context);
|
||||
BytesValues.WithOrdinals bytesValues = loadDirect.getBytesValues(randomBoolean());
|
||||
BytesValues.WithOrdinals bytesValues = loadDirect.getBytesValues();
|
||||
Docs ordinals = bytesValues.ordinals();
|
||||
assertThat(2L, equalTo(ordinals.getMaxOrd()));
|
||||
assertThat(bytesValues.getValueByOrd(0).utf8ToString(), equalTo("10"));
|
||||
|
@ -124,7 +124,7 @@ public class FilterFieldDataTest extends AbstractFieldDataTests {
|
|||
.put("filter.frequency.max", random.nextBoolean() ? 99 : 99d/200.0d)); // 100
|
||||
IndexFieldData<?> fieldData = getForField(fieldDataType, "high_freq");
|
||||
AtomicFieldData.WithOrdinals<ScriptDocValues.Strings> loadDirect = (WithOrdinals<Strings>) fieldData.loadDirect(context);
|
||||
BytesValues.WithOrdinals bytesValues = loadDirect.getBytesValues(randomBoolean());
|
||||
BytesValues.WithOrdinals bytesValues = loadDirect.getBytesValues();
|
||||
Docs ordinals = bytesValues.ordinals();
|
||||
assertThat(1L, equalTo(ordinals.getMaxOrd()));
|
||||
assertThat(bytesValues.getValueByOrd(0).utf8ToString(), equalTo("100"));
|
||||
|
@ -168,7 +168,7 @@ public class FilterFieldDataTest extends AbstractFieldDataTests {
|
|||
.put("filter.regex.pattern", "\\d"));
|
||||
IndexFieldData<?> fieldData = getForField(fieldDataType, "high_freq");
|
||||
AtomicFieldData.WithOrdinals<ScriptDocValues.Strings> loadDirect = (WithOrdinals<Strings>) fieldData.loadDirect(context);
|
||||
BytesValues.WithOrdinals bytesValues = loadDirect.getBytesValues(randomBoolean());
|
||||
BytesValues.WithOrdinals bytesValues = loadDirect.getBytesValues();
|
||||
Docs ordinals = bytesValues.ordinals();
|
||||
assertThat(1L, equalTo(ordinals.getMaxOrd()));
|
||||
assertThat(bytesValues.getValueByOrd(0).utf8ToString(), equalTo("5"));
|
||||
|
@ -179,7 +179,7 @@ public class FilterFieldDataTest extends AbstractFieldDataTests {
|
|||
.put("filter.regex.pattern", "\\d{1,2}"));
|
||||
IndexFieldData<?> fieldData = getForField(fieldDataType, "high_freq");
|
||||
AtomicFieldData.WithOrdinals<ScriptDocValues.Strings> loadDirect = (WithOrdinals<Strings>) fieldData.loadDirect(context);
|
||||
BytesValues.WithOrdinals bytesValues = loadDirect.getBytesValues(randomBoolean());
|
||||
BytesValues.WithOrdinals bytesValues = loadDirect.getBytesValues();
|
||||
Docs ordinals = bytesValues.ordinals();
|
||||
assertThat(2L, equalTo(ordinals.getMaxOrd()));
|
||||
assertThat(bytesValues.getValueByOrd(0).utf8ToString(), equalTo("10"));
|
||||
|
|
|
@ -28,13 +28,13 @@ import org.apache.lucene.util.BytesRef;
|
|||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
|
||||
import org.elasticsearch.common.compress.CompressedString;
|
||||
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||
import org.elasticsearch.search.MultiValueMode;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.MapperTestUtils;
|
||||
import org.elasticsearch.index.mapper.Uid;
|
||||
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
||||
import org.elasticsearch.index.service.IndexService;
|
||||
import org.elasticsearch.search.MultiValueMode;
|
||||
import org.elasticsearch.test.index.service.StubIndexService;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
@ -107,7 +107,7 @@ public class ParentChildFieldDataTests extends AbstractFieldDataTests {
|
|||
AtomicFieldData fieldData = indexFieldData.load(refreshReader());
|
||||
assertThat(fieldData.getMemorySizeInBytes(), greaterThan(0l));
|
||||
|
||||
BytesValues bytesValues = fieldData.getBytesValues(randomBoolean());
|
||||
BytesValues bytesValues = fieldData.getBytesValues();
|
||||
assertThat(bytesValues.setDocument(0), equalTo(1));
|
||||
assertThat(bytesValues.nextValue().utf8ToString(), equalTo("1"));
|
||||
|
||||
|
|
|
@ -117,7 +117,6 @@ public class FieldDataSourceTests extends ElasticsearchTestCase {
|
|||
final int valueCount = values.setDocument(i);
|
||||
for (int j = 0; j < valueCount; ++j) {
|
||||
final BytesRef term = values.nextValue();
|
||||
assertEquals(term.hashCode(), values.currentValueHash());
|
||||
assertTrue(term.bytesEquals(values.copyShared()));
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue