Refactor FieldData iterations
This commit primarily folds [Double|Bytes|Long|GeoPoint]Values.Iter into [Double|Bytes|Long|GeoPoint]Values. Iterations now don't require a auxillary class (Iter) but instead driven by native for loops. All [Double|Bytes|Long|GeoPoint]Values are stateful and provide `setDocId` and `nextValue` methods to iterate over all values in a document. This has several advantage: * The amout of specialized classes is reduced * Iteration is clearly stateful ie. Iters can't be confused to be local. * All iterations are size bounded which prevents runtime checks and allows JIT optimizations / loop un-rolling and most iterations are branch free. * Due to the bounded iteration the need for a `hasNext` method call is removed. * Value iterations feels more native. This commit also adds consistent documentation and unifies the calcualtion if SortMode is involved. This commit also changes the runtime behavior of BytesValues#getValue() such that it will never return `null` anymore. If a document has no value in a field this method still returns a `BytesRef` with a `length` of 0. To identify documents with no values #hasValue() or #setDocument(int) should be used. The latter should be preferred if the value will be consumed in the case the document has a value.
This commit is contained in:
parent
7bd1a55f6e
commit
7867de4f5b
|
@ -31,10 +31,6 @@ public abstract class AbstractAtomicNumericFieldData implements AtomicNumericFie
|
||||||
this.isFloat = isFloat;
|
this.isFloat = isFloat;
|
||||||
}
|
}
|
||||||
|
|
||||||
public abstract LongValues getLongValues();
|
|
||||||
|
|
||||||
public abstract DoubleValues getDoubleValues();
|
|
||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ScriptDocValues getScriptValues() {
|
public ScriptDocValues getScriptValues() {
|
||||||
|
@ -50,45 +46,31 @@ public abstract class AbstractAtomicNumericFieldData implements AtomicNumericFie
|
||||||
if (isFloat) {
|
if (isFloat) {
|
||||||
final DoubleValues values = getDoubleValues();
|
final DoubleValues values = getDoubleValues();
|
||||||
return new BytesValues(values.isMultiValued()) {
|
return new BytesValues(values.isMultiValued()) {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean hasValue(int docId) {
|
public boolean hasValue(int docId) {
|
||||||
return values.hasValue(docId);
|
return values.hasValue(docId);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BytesRef getValueScratch(int docId, BytesRef ret) {
|
public BytesRef getValue(int docId) {
|
||||||
if (values.hasValue(docId)) {
|
if (values.hasValue(docId)) {
|
||||||
ret.copyChars(Double.toString(values.getValue(docId)));
|
scratch.copyChars(Double.toString(values.getValue(docId)));
|
||||||
} else {
|
} else {
|
||||||
ret.length = 0;
|
scratch.length = 0;
|
||||||
}
|
}
|
||||||
return ret;
|
return scratch;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int setDocument(int docId) {
|
||||||
|
this.docId = docId;
|
||||||
|
return values.setDocument(docId);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Iter getIter(int docId) {
|
public BytesRef nextValue() {
|
||||||
final DoubleValues.Iter iter = values.getIter(docId);
|
scratch.copyChars(Double.toString(values.nextValue()));
|
||||||
return new BytesValues.Iter() {
|
return scratch;
|
||||||
private final BytesRef spare = new BytesRef();
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasNext() {
|
|
||||||
return iter.hasNext();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public BytesRef next() {
|
|
||||||
spare.copyChars(Double.toString(iter.next()));
|
|
||||||
return spare;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int hash() {
|
|
||||||
return spare.hashCode();
|
|
||||||
}
|
|
||||||
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
} else {
|
} else {
|
||||||
|
@ -101,38 +83,25 @@ public abstract class AbstractAtomicNumericFieldData implements AtomicNumericFie
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BytesRef getValueScratch(int docId, BytesRef ret) {
|
public BytesRef getValue(int docId) {
|
||||||
if (values.hasValue(docId)) {
|
if (values.hasValue(docId)) {
|
||||||
ret.copyChars(Long.toString(values.getValue(docId)));
|
scratch.copyChars(Long.toString(values.getValue(docId)));
|
||||||
} else {
|
} else {
|
||||||
ret.length = 0;
|
scratch.length = 0;
|
||||||
}
|
}
|
||||||
return ret;
|
return scratch;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Iter getIter(int docId) {
|
public int setDocument(int docId) {
|
||||||
final LongValues.Iter iter = values.getIter(docId);
|
this.docId = docId;
|
||||||
return new BytesValues.Iter() {
|
return values.setDocument(docId);
|
||||||
private final BytesRef spare = new BytesRef();
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean hasNext() {
|
public BytesRef nextValue() {
|
||||||
return iter.hasNext();
|
scratch.copyChars(Long.toString(values.nextValue()));
|
||||||
}
|
return scratch;
|
||||||
|
|
||||||
@Override
|
|
||||||
public BytesRef next() {
|
|
||||||
spare.copyChars(Long.toString(iter.next()));
|
|
||||||
return spare;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int hash() {
|
|
||||||
return spare.hashCode();
|
|
||||||
}
|
|
||||||
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -142,5 +111,4 @@ public abstract class AbstractAtomicNumericFieldData implements AtomicNumericFie
|
||||||
public BytesValues getHashedBytesValues() {
|
public BytesValues getHashedBytesValues() {
|
||||||
return getBytesValues();
|
return getBytesValues();
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.index.fielddata;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.elasticsearch.common.geo.GeoHashUtils;
|
import org.elasticsearch.common.geo.GeoHashUtils;
|
||||||
import org.elasticsearch.common.geo.GeoPoint;
|
import org.elasticsearch.common.geo.GeoPoint;
|
||||||
import org.elasticsearch.index.fielddata.BytesValues.Iter;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*/
|
*/
|
||||||
|
@ -41,41 +40,30 @@ public abstract class AtomicGeoPointFieldData<Script extends ScriptDocValues> im
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BytesRef getValueScratch(int docId, BytesRef ret) {
|
public BytesRef getValue(int docId) {
|
||||||
GeoPoint value = values.getValue(docId);
|
GeoPoint value = values.getValue(docId);
|
||||||
if (value != null) {
|
if (value != null) {
|
||||||
ret.copyChars(GeoHashUtils.encode(value.lat(), value.lon()));
|
scratch.copyChars(GeoHashUtils.encode(value.lat(), value.lon()));
|
||||||
} else {
|
} else {
|
||||||
ret.length = 0;
|
scratch.length = 0;
|
||||||
|
return scratch;
|
||||||
}
|
}
|
||||||
return ret;
|
return scratch;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Iter getIter(int docId) {
|
public int setDocument(int docId) {
|
||||||
final GeoPointValues.Iter iter = values.getIter(docId);
|
this.docId = docId;
|
||||||
return new BytesValues.Iter() {
|
return values.setDocument(docId);
|
||||||
private final BytesRef spare = new BytesRef();
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasNext() {
|
|
||||||
return iter.hasNext();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public BytesRef next() {
|
|
||||||
GeoPoint value = iter.next();
|
|
||||||
spare.copyChars(GeoHashUtils.encode(value.lat(), value.lon()));
|
|
||||||
return spare;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int hash() {
|
|
||||||
return spare.hashCode();
|
|
||||||
}
|
|
||||||
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public BytesRef nextValue() {
|
||||||
|
GeoPoint value = values.nextValue();
|
||||||
|
scratch.copyChars(GeoHashUtils.encode(value.lat(), value.lon()));
|
||||||
|
return scratch;
|
||||||
|
}
|
||||||
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -25,13 +25,35 @@ import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
||||||
import org.elasticsearch.index.fielddata.ordinals.Ordinals.Docs;
|
import org.elasticsearch.index.fielddata.ordinals.Ordinals.Docs;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
* A state-full lightweight per document set of <code>byte[]</code> values.
|
||||||
|
*
|
||||||
|
* To iterate over values in a document use the following pattern:
|
||||||
|
* <pre>
|
||||||
|
* BytesValues values = ..;
|
||||||
|
* final int numValues = values.setDocId(docId);
|
||||||
|
* for (int i = 0; i < numValues; i++) {
|
||||||
|
* BytesRef value = values.nextValue();
|
||||||
|
* // process value
|
||||||
|
* }
|
||||||
|
* </pre>
|
||||||
*/
|
*/
|
||||||
public abstract class BytesValues {
|
public abstract class BytesValues {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An empty {@link BytesValues instance}
|
||||||
|
*/
|
||||||
public static final BytesValues EMPTY = new Empty();
|
public static final BytesValues EMPTY = new Empty();
|
||||||
|
|
||||||
private boolean multiValued;
|
private boolean multiValued;
|
||||||
|
|
||||||
protected final BytesRef scratch = new BytesRef();
|
protected final BytesRef scratch = new BytesRef();
|
||||||
|
|
||||||
|
protected int docId = -1;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new {@link BytesValues} instance
|
||||||
|
* @param multiValued <code>true</code> iff this instance is multivalued. Otherwise <code>false</code>.
|
||||||
|
*/
|
||||||
protected BytesValues(boolean multiValued) {
|
protected BytesValues(boolean multiValued) {
|
||||||
this.multiValued = multiValued;
|
this.multiValued = multiValued;
|
||||||
}
|
}
|
||||||
|
@ -44,191 +66,70 @@ public abstract class BytesValues {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Is there a value for this doc?
|
* Returns <code>true</code> if the given document ID has a value in this. Otherwise <code>false</code>.
|
||||||
*/
|
*/
|
||||||
public abstract boolean hasValue(int docId);
|
public abstract boolean hasValue(int docId);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Converts the provided bytes to "safe" ones from a "non" safe call made (if needed). Note,
|
* Converts the current shared {@link BytesRef} to a stable instance. Note,
|
||||||
* this calls makes the bytes safe for *reads*, not writes (into the same BytesRef). For example,
|
* this calls makes the bytes safe for *reads*, not writes (into the same BytesRef). For example,
|
||||||
* it makes it safe to be placed in a map.
|
* it makes it safe to be placed in a map.
|
||||||
*/
|
*/
|
||||||
public BytesRef makeSafe(BytesRef bytes) {
|
public BytesRef copyShared() {
|
||||||
return BytesRef.deepCopyOf(bytes);
|
return BytesRef.deepCopyOf(scratch);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns a bytes value for a docId. Note, the content of it might be shared across invocation.
|
* Returns a value for the given document id. If the document
|
||||||
|
* has more than one value the returned value is one of the values
|
||||||
|
* associated with the document.
|
||||||
|
*
|
||||||
|
* Note: the {@link BytesRef} might be shared across invocations.
|
||||||
|
*
|
||||||
|
* @param docId the documents id.
|
||||||
|
* @return a value for the given document id or a {@link BytesRef} with a length of <tt>0</tt>if the document
|
||||||
|
* has no value.
|
||||||
*/
|
*/
|
||||||
public BytesRef getValue(int docId) {
|
public abstract BytesRef getValue(int docId);
|
||||||
if (hasValue(docId)) {
|
|
||||||
return getValueScratch(docId, scratch);
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the bytes value for the docId, with the provided "ret" which will be filled with the
|
* Sets iteration to the specified docID and returns the number of
|
||||||
* result which will also be returned. If there is no value for this docId, the length will be 0.
|
* values for this document ID,
|
||||||
* Implementations can either change the {@link BytesRef#bytes bytes reference} of the {@link BytesRef}
|
* @param docId document ID
|
||||||
* to point to an internal structure or modify the content of the {@link BytesRef} but should
|
*
|
||||||
* always do it in a consistent way. For example, it is illegal to change the bytes content in
|
* @see #nextValue()
|
||||||
* some call and to change the reference to point to an internal structure in another call, this
|
|
||||||
* will lead to bugs. It is also illegal for callers to write into the {@link BytesRef#bytes bytes}
|
|
||||||
* after this method has returned.
|
|
||||||
*/
|
*/
|
||||||
public abstract BytesRef getValueScratch(int docId, BytesRef ret);
|
public int setDocument(int docId) {
|
||||||
|
this.docId = docId;
|
||||||
|
return hasValue(docId) ? 1 : 0;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Fills the given spare for the given doc ID and returns the hashcode of the reference as defined by
|
* Returns the next value for the current docID set to {@link #setDocument(int)}.
|
||||||
* {@link BytesRef#hashCode()}
|
* This method should only be called <tt>N</tt> times where <tt>N</tt> is the number
|
||||||
|
* returned from {@link #setDocument(int)}. If called more than <tt>N</tt> times the behavior
|
||||||
|
* is undefined.
|
||||||
|
*
|
||||||
|
* Note: the returned {@link BytesRef} might be shared across invocations.
|
||||||
|
*
|
||||||
|
* @return the next value for the current docID set to {@link #setDocument(int)}.
|
||||||
*/
|
*/
|
||||||
public int getValueHashed(int docId, BytesRef spare) {
|
public BytesRef nextValue() {
|
||||||
return getValueScratch(docId, spare).hashCode();
|
assert docId != -1;
|
||||||
|
return getValue(docId);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns a bytes value iterator for a docId. Note, the content of it might be shared across invocation.
|
* Returns the hash value of the previously returned shared {@link BytesRef} instances.
|
||||||
|
*
|
||||||
|
* @return the hash value of the previously returned shared {@link BytesRef} instances.
|
||||||
*/
|
*/
|
||||||
public abstract Iter getIter(int docId); // TODO: maybe this should return null for no values so we can safe one call?
|
public int currentValueHash() {
|
||||||
|
return scratch.hashCode();
|
||||||
|
|
||||||
public static interface Iter {
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns whether this iterator still contains elements.
|
|
||||||
*/
|
|
||||||
boolean hasNext();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the next element of this iterator. Please note that the returned bytes may be
|
|
||||||
* reused across invocations so they should be copied for later reference. The behavior of
|
|
||||||
* this method is undefined if the iterator is exhausted.
|
|
||||||
*/
|
|
||||||
BytesRef next();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the hash value of the last {@link BytesRef} returned by {@link #next()}. The
|
|
||||||
* behavior is undefined if this iterator is not positioned or exhausted.
|
|
||||||
*/
|
|
||||||
int hash();
|
|
||||||
|
|
||||||
public static class Empty implements Iter {
|
|
||||||
|
|
||||||
public static final Empty INSTANCE = new Empty();
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasNext() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public BytesRef next() {
|
|
||||||
throw new ElasticSearchIllegalStateException();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int hash() {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public static class Single implements Iter {
|
|
||||||
|
|
||||||
protected BytesRef value;
|
|
||||||
protected long ord;
|
|
||||||
protected boolean done;
|
|
||||||
|
|
||||||
public Single reset(BytesRef value, long ord) {
|
|
||||||
this.value = value;
|
|
||||||
this.ord = ord;
|
|
||||||
this.done = false;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasNext() {
|
|
||||||
return !done;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public BytesRef next() {
|
|
||||||
assert !done;
|
|
||||||
done = true;
|
|
||||||
return value;
|
|
||||||
}
|
|
||||||
|
|
||||||
public int hash() {
|
|
||||||
return value.hashCode();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static class Multi implements Iter {
|
|
||||||
|
|
||||||
protected long innerOrd;
|
|
||||||
protected long ord;
|
|
||||||
protected BytesValues.WithOrdinals withOrds;
|
|
||||||
protected Ordinals.Docs.Iter ordsIter;
|
|
||||||
protected final BytesRef scratch = new BytesRef();
|
|
||||||
|
|
||||||
public Multi(WithOrdinals withOrds) {
|
|
||||||
this.withOrds = withOrds;
|
|
||||||
assert withOrds.isMultiValued();
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
public Multi reset(Ordinals.Docs.Iter ordsIter) {
|
|
||||||
this.ordsIter = ordsIter;
|
|
||||||
innerOrd = ord = ordsIter.next();
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasNext() {
|
|
||||||
return innerOrd != 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public BytesRef next() {
|
|
||||||
withOrds.getValueScratchByOrd(innerOrd, scratch);
|
|
||||||
ord = innerOrd;
|
|
||||||
innerOrd = ordsIter.next();
|
|
||||||
return scratch;
|
|
||||||
}
|
|
||||||
|
|
||||||
public int hash() {
|
|
||||||
return scratch.hashCode();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class Empty extends BytesValues {
|
|
||||||
|
|
||||||
public Empty() {
|
|
||||||
super(false);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasValue(int docId) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Iter getIter(int docId) {
|
|
||||||
return Iter.Empty.INSTANCE;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public BytesRef getValueScratch(int docId, BytesRef ret) {
|
|
||||||
ret.length = 0;
|
|
||||||
return ret;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Bytes values that are based on ordinals.
|
* Ordinal based {@link BytesValues}.
|
||||||
*/
|
*/
|
||||||
public static abstract class WithOrdinals extends BytesValues {
|
public static abstract class WithOrdinals extends BytesValues {
|
||||||
|
|
||||||
|
@ -239,76 +140,86 @@ public abstract class BytesValues {
|
||||||
this.ordinals = ordinals;
|
this.ordinals = ordinals;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the associated ordinals instance.
|
||||||
|
* @return the associated ordinals instance.
|
||||||
|
*/
|
||||||
public Ordinals.Docs ordinals() {
|
public Ordinals.Docs ordinals() {
|
||||||
return ordinals;
|
return ordinals;
|
||||||
}
|
}
|
||||||
|
|
||||||
public BytesRef getValueByOrd(long ord) {
|
/**
|
||||||
return getValueScratchByOrd(ord, scratch);
|
* Returns the value for the given ordinal.
|
||||||
}
|
* @param ord the ordinal to lookup.
|
||||||
|
* @return a shared {@link BytesRef} instance holding the value associated
|
||||||
protected Iter.Multi newMultiIter() {
|
* with the given ordinal or <code>null</code> if ordinal is <tt>0</tt>
|
||||||
assert this.isMultiValued();
|
*/
|
||||||
return new Iter.Multi(this);
|
public abstract BytesRef getValueByOrd(long ord);
|
||||||
}
|
|
||||||
|
|
||||||
protected Iter.Single newSingleIter() {
|
|
||||||
assert !this.isMultiValued();
|
|
||||||
return new Iter.Single();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean hasValue(int docId) {
|
public boolean hasValue(int docId) {
|
||||||
return ordinals.getOrd(docId) != 0;
|
return ordinals.getOrd(docId) != Ordinals.MISSING_ORDINAL;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BytesRef getValue(int docId) {
|
public BytesRef getValue(int docId) {
|
||||||
final long ord = ordinals.getOrd(docId);
|
final long ord = ordinals.getOrd(docId);
|
||||||
if (ord == 0) {
|
if (ord == Ordinals.MISSING_ORDINAL) {
|
||||||
return null;
|
scratch.length = 0;
|
||||||
|
return scratch;
|
||||||
}
|
}
|
||||||
return getValueScratchByOrd(ord, scratch);
|
return getValueByOrd(ord);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BytesRef getValueScratch(int docId, BytesRef ret) {
|
public int setDocument(int docId) {
|
||||||
return getValueScratchByOrd(ordinals.getOrd(docId), ret);
|
this.docId = docId;
|
||||||
|
int length = ordinals.setDocument(docId);
|
||||||
|
assert hasValue(docId) == length > 0 : "Doc: [" + docId + "] hasValue: [" + hasValue(docId) + "] but length is [" + length + "]";
|
||||||
|
return length;
|
||||||
}
|
}
|
||||||
|
|
||||||
public BytesRef getSafeValueByOrd(int ord) {
|
@Override
|
||||||
return getValueScratchByOrd(ord, new BytesRef());
|
public BytesRef nextValue() {
|
||||||
}
|
assert docId != -1;
|
||||||
|
return getValueByOrd(ordinals.nextOrd());
|
||||||
/**
|
|
||||||
* Returns the bytes value for the docId, with the provided "ret" which will be filled with the
|
|
||||||
* result which will also be returned. If there is no value for this docId, the length will be 0.
|
|
||||||
* Implementations can either change the {@link BytesRef#bytes bytes reference} of the {@link BytesRef}
|
|
||||||
* to point to an internal structure or modify the content of the {@link BytesRef} but should
|
|
||||||
* always do it in a consistent way. For example, it is illegal to change the bytes content in
|
|
||||||
* some call and to change the reference to point to an internal structure in another call, this
|
|
||||||
* will lead to bugs. It is also illegal for callers to write into the {@link BytesRef#bytes bytes}
|
|
||||||
* after this method has returned.
|
|
||||||
*/
|
|
||||||
public abstract BytesRef getValueScratchByOrd(long ord, BytesRef ret);
|
|
||||||
|
|
||||||
public static class Empty extends WithOrdinals {
|
|
||||||
|
|
||||||
public Empty(Ordinals.Docs ordinals) {
|
|
||||||
super(ordinals);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public BytesRef getValueScratchByOrd(long ord, BytesRef ret) {
|
|
||||||
ret.length = 0;
|
|
||||||
return ret;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Iter getIter(int docId) {
|
|
||||||
return Iter.Empty.INSTANCE;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An empty {@link BytesValues} implementation
|
||||||
|
*/
|
||||||
|
private final static class Empty extends BytesValues {
|
||||||
|
|
||||||
|
Empty() {
|
||||||
|
super(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean hasValue(int docId) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public BytesRef getValue(int docId) {
|
||||||
|
scratch.length = 0;
|
||||||
|
return scratch;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int setDocument(int docId) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public BytesRef nextValue() {
|
||||||
|
throw new ElasticSearchIllegalStateException("Empty BytesValues has no next value");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int currentValueHash() {
|
||||||
|
throw new ElasticSearchIllegalStateException("Empty BytesValues has no hash for the current Value");
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,14 +24,33 @@ import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
||||||
import org.elasticsearch.index.fielddata.ordinals.Ordinals.Docs;
|
import org.elasticsearch.index.fielddata.ordinals.Ordinals.Docs;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
* A state-full lightweight per document set of <code>double</code> values.
|
||||||
|
*
|
||||||
|
* To iterate over values in a document use the following pattern:
|
||||||
|
* <pre>
|
||||||
|
* DoubleValues values = ..;
|
||||||
|
* final int numValues = values.setDocId(docId);
|
||||||
|
* for (int i = 0; i < numValues; i++) {
|
||||||
|
* double value = values.nextValue();
|
||||||
|
* // process value
|
||||||
|
* }
|
||||||
|
* </pre>
|
||||||
*/
|
*/
|
||||||
public abstract class DoubleValues {
|
public abstract class DoubleValues {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An empty {@link DoubleValues instance}
|
||||||
|
*/
|
||||||
public static final DoubleValues EMPTY = new Empty();
|
public static final DoubleValues EMPTY = new Empty();
|
||||||
|
|
||||||
private final boolean multiValued;
|
private final boolean multiValued;
|
||||||
protected final Iter.Single iter = new Iter.Single();
|
|
||||||
|
|
||||||
|
protected int docId;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new {@link DoubleValues} instance
|
||||||
|
* @param multiValued <code>true</code> iff this instance is multivalued. Otherwise <code>false</code>.
|
||||||
|
*/
|
||||||
protected DoubleValues(boolean multiValued) {
|
protected DoubleValues(boolean multiValued) {
|
||||||
this.multiValued = multiValued;
|
this.multiValued = multiValued;
|
||||||
}
|
}
|
||||||
|
@ -44,12 +63,31 @@ public abstract class DoubleValues {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Is there a value for this doc?
|
* Returns <code>true</code> if the given document ID has a value in this. Otherwise <code>false</code>.
|
||||||
*/
|
*/
|
||||||
public abstract boolean hasValue(int docId);
|
public abstract boolean hasValue(int docId);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a value for the given document id. If the document
|
||||||
|
* has more than one value the returned value is one of the values
|
||||||
|
* associated with the document.
|
||||||
|
* @param docId the documents id.
|
||||||
|
* @return a value for the given document id.
|
||||||
|
*/
|
||||||
public abstract double getValue(int docId);
|
public abstract double getValue(int docId);
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a value for the given document id or the given missing value if
|
||||||
|
* {@link #hasValue(int)} returns <code>false</code> ie. the document has no
|
||||||
|
* value associated with it.
|
||||||
|
*
|
||||||
|
* @param docId the documents id.
|
||||||
|
* @param missingValue the missing value
|
||||||
|
* @return a value for the given document id or the given missing value if
|
||||||
|
* {@link #hasValue(int)} returns <code>false</code> ie. the document has no
|
||||||
|
* value associated with it.
|
||||||
|
*/
|
||||||
public double getValueMissing(int docId, double missingValue) {
|
public double getValueMissing(int docId, double missingValue) {
|
||||||
if (hasValue(docId)) {
|
if (hasValue(docId)) {
|
||||||
return getValue(docId);
|
return getValue(docId);
|
||||||
|
@ -57,164 +95,99 @@ public abstract class DoubleValues {
|
||||||
return missingValue;
|
return missingValue;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Iter getIter(int docId) {
|
/**
|
||||||
assert !isMultiValued();
|
* Sets iteration to the specified docID and returns the number of
|
||||||
if (hasValue(docId)) {
|
* values for this document ID,
|
||||||
return iter.reset(getValue(docId));
|
* @param docId document ID
|
||||||
} else {
|
*
|
||||||
return Iter.Empty.INSTANCE;
|
* @see #nextValue()
|
||||||
}
|
*/
|
||||||
|
public int setDocument(int docId) {
|
||||||
|
this.docId = docId;
|
||||||
|
return hasValue(docId) ? 1 : 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
public static abstract class Dense extends DoubleValues {
|
* Returns the next value for the current docID set to {@link #setDocument(int)}.
|
||||||
|
* This method should only be called <tt>N</tt> times where <tt>N</tt> is the number
|
||||||
|
* returned from {@link #setDocument(int)}. If called more than <tt>N</tt> times the behavior
|
||||||
protected Dense(boolean multiValued) {
|
* is undefined.
|
||||||
super(multiValued);
|
*
|
||||||
}
|
* @return the next value for the current docID set to {@link #setDocument(int)}.
|
||||||
|
*/
|
||||||
@Override
|
public double nextValue() {
|
||||||
public final boolean hasValue(int docId) {
|
return getValue(docId);
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
public final double getValueMissing(int docId, double missingValue) {
|
|
||||||
assert hasValue(docId);
|
|
||||||
assert !isMultiValued();
|
|
||||||
return getValue(docId);
|
|
||||||
}
|
|
||||||
|
|
||||||
public final Iter getIter(int docId) {
|
|
||||||
assert hasValue(docId);
|
|
||||||
assert !isMultiValued();
|
|
||||||
return iter.reset(getValue(docId));
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Ordinal based {@link DoubleValues}.
|
||||||
|
*/
|
||||||
public static abstract class WithOrdinals extends DoubleValues {
|
public static abstract class WithOrdinals extends DoubleValues {
|
||||||
|
|
||||||
protected final Docs ordinals;
|
protected final Docs ordinals;
|
||||||
private final Iter.Multi iter;
|
|
||||||
|
|
||||||
protected WithOrdinals(Ordinals.Docs ordinals) {
|
protected WithOrdinals(Ordinals.Docs ordinals) {
|
||||||
super(ordinals.isMultiValued());
|
super(ordinals.isMultiValued());
|
||||||
this.ordinals = ordinals;
|
this.ordinals = ordinals;
|
||||||
iter = new Iter.Multi(this);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the associated ordinals instance.
|
||||||
|
* @return the associated ordinals instance.
|
||||||
|
*/
|
||||||
public Docs ordinals() {
|
public Docs ordinals() {
|
||||||
return ordinals;
|
return ordinals;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the value for the given ordinal.
|
||||||
|
* @param ord the ordinal to lookup.
|
||||||
|
* @return a double value associated with the given ordinal.
|
||||||
|
*/
|
||||||
|
public abstract double getValueByOrd(long ord);
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public final boolean hasValue(int docId) {
|
public final boolean hasValue(int docId) {
|
||||||
return ordinals.getOrd(docId) != 0;
|
return ordinals.getOrd(docId) != Ordinals.MISSING_ORDINAL;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public final double getValue(int docId) {
|
public final double getValue(int docId) {
|
||||||
return getValueByOrd(ordinals.getOrd(docId));
|
final long ord = ordinals.getOrd(docId);
|
||||||
|
if (ord == Ordinals.MISSING_ORDINAL) {
|
||||||
|
return 0d;
|
||||||
|
}
|
||||||
|
return getValueByOrd(ord);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int setDocument(int docId) {
|
||||||
|
this.docId = docId;
|
||||||
|
return ordinals.setDocument(docId);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public double nextValue() {
|
||||||
|
return getValueByOrd(ordinals.nextOrd());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public final double getValueMissing(int docId, double missingValue) {
|
public final double getValueMissing(int docId, double missingValue) {
|
||||||
final long ord = ordinals.getOrd(docId);
|
final long ord = ordinals.getOrd(docId);
|
||||||
if (ord == 0) {
|
if (ord == Ordinals.MISSING_ORDINAL) {
|
||||||
return missingValue;
|
return missingValue;
|
||||||
} else {
|
} else {
|
||||||
return getValueByOrd(ord);
|
return getValueByOrd(ord);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public abstract double getValueByOrd(long ord);
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public final Iter getIter(int docId) {
|
|
||||||
return iter.reset(ordinals.getIter(docId));
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
/**
|
||||||
|
* An empty {@link DoubleValues} implementation
|
||||||
|
*/
|
||||||
|
private static class Empty extends DoubleValues {
|
||||||
|
|
||||||
public static interface Iter {
|
Empty() {
|
||||||
|
|
||||||
boolean hasNext();
|
|
||||||
|
|
||||||
double next();
|
|
||||||
|
|
||||||
public static class Empty implements Iter {
|
|
||||||
|
|
||||||
public static final Empty INSTANCE = new Empty();
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasNext() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public double next() {
|
|
||||||
throw new ElasticSearchIllegalStateException();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static class Single implements Iter {
|
|
||||||
|
|
||||||
public double value;
|
|
||||||
public boolean done;
|
|
||||||
|
|
||||||
public Single reset(double value) {
|
|
||||||
this.value = value;
|
|
||||||
this.done = false;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasNext() {
|
|
||||||
return !done;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public double next() {
|
|
||||||
assert !done;
|
|
||||||
done = true;
|
|
||||||
return value;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static class Multi implements Iter {
|
|
||||||
|
|
||||||
private Ordinals.Docs.Iter ordsIter;
|
|
||||||
private long ord;
|
|
||||||
private WithOrdinals values;
|
|
||||||
|
|
||||||
public Multi(WithOrdinals values) {
|
|
||||||
this.values = values;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Multi reset(Ordinals.Docs.Iter ordsIter) {
|
|
||||||
this.ordsIter = ordsIter;
|
|
||||||
this.ord = ordsIter.next();
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasNext() {
|
|
||||||
return ord != 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public double next() {
|
|
||||||
double value = values.getValueByOrd(ord);
|
|
||||||
ord = ordsIter.next();
|
|
||||||
return value;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static class Empty extends DoubleValues {
|
|
||||||
|
|
||||||
public Empty() {
|
|
||||||
super(false);
|
super(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -228,33 +201,15 @@ public abstract class DoubleValues {
|
||||||
// conforms with all other impls when there is no value
|
// conforms with all other impls when there is no value
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Iter getIter(int docId) {
|
public int setDocument(int docId) {
|
||||||
return Iter.Empty.INSTANCE;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
@Override
|
||||||
|
public double nextValue() {
|
||||||
public static class Filtered extends DoubleValues {
|
throw new ElasticSearchIllegalStateException("Empty DoubleValues has no next value");
|
||||||
|
|
||||||
protected final DoubleValues delegate;
|
|
||||||
|
|
||||||
public Filtered(DoubleValues delegate) {
|
|
||||||
super(delegate.isMultiValued());
|
|
||||||
this.delegate = delegate;
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean hasValue(int docId) {
|
|
||||||
return delegate.hasValue(docId);
|
|
||||||
}
|
|
||||||
|
|
||||||
public double getValue(int docId) {
|
|
||||||
return delegate.getValue(docId);
|
|
||||||
}
|
|
||||||
|
|
||||||
public Iter getIter(int docId) {
|
|
||||||
return delegate.getIter(docId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,66 @@
|
||||||
|
/*
|
||||||
|
* Licensed to ElasticSearch and Shay Banon under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. ElasticSearch licenses this
|
||||||
|
* file to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.index.fielddata;
|
||||||
|
|
||||||
|
import org.apache.lucene.util.BytesRef;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* <code>FilterBytesValues</code> contains another {@link BytesValues}, which it
|
||||||
|
* uses as its basic source of data, possibly transforming the data along the
|
||||||
|
* way or providing additional functionality.
|
||||||
|
*/
|
||||||
|
public abstract class FilterBytesValues extends BytesValues {
|
||||||
|
|
||||||
|
protected final BytesValues delegate;
|
||||||
|
|
||||||
|
protected FilterBytesValues(BytesValues delegate) {
|
||||||
|
super(delegate.isMultiValued());
|
||||||
|
this.delegate = delegate;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean hasValue(int docId) {
|
||||||
|
return delegate.hasValue(docId);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public BytesRef copyShared() {
|
||||||
|
return delegate.copyShared();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int setDocument(int docId) {
|
||||||
|
return delegate.setDocument(docId);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public BytesRef nextValue() {
|
||||||
|
return delegate.nextValue();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int currentValueHash() {
|
||||||
|
return delegate.currentValueHash();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public BytesRef getValue(int docId) {
|
||||||
|
return delegate.getValue(docId);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,58 @@
|
||||||
|
/*
|
||||||
|
* Licensed to ElasticSearch and Shay Banon under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. ElasticSearch licenses this
|
||||||
|
* file to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.index.fielddata;
|
||||||
|
/**
|
||||||
|
* <code>FilterDoubleValues</code> contains another {@link DoubleValues}, which it
|
||||||
|
* uses as its basic source of data, possibly transforming the data along the
|
||||||
|
* way or providing additional functionality.
|
||||||
|
*/
|
||||||
|
public abstract class FilterDoubleValues extends DoubleValues {
|
||||||
|
|
||||||
|
protected final DoubleValues delegate;
|
||||||
|
|
||||||
|
protected FilterDoubleValues(DoubleValues delegate) {
|
||||||
|
super(delegate.isMultiValued());
|
||||||
|
this.delegate = delegate;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean hasValue(int docId) {
|
||||||
|
return delegate.hasValue(docId);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public double getValue(int docId) {
|
||||||
|
return delegate.getValue(docId);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int setDocument(int docId) {
|
||||||
|
return delegate.setDocument(docId);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public double nextValue() {
|
||||||
|
return delegate.nextValue();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public double getValueMissing(int docId, double missingValue) {
|
||||||
|
return delegate.getValueMissing(docId, missingValue);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,59 @@
|
||||||
|
/*
|
||||||
|
* Licensed to ElasticSearch and Shay Banon under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. ElasticSearch licenses this
|
||||||
|
* file to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.index.fielddata;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* <code>FilterLongValues</code> contains another {@link LongValues}, which it
|
||||||
|
* uses as its basic source of data, possibly transforming the data along the
|
||||||
|
* way or providing additional functionality.
|
||||||
|
*/
|
||||||
|
public class FilterLongValues extends LongValues {
|
||||||
|
|
||||||
|
protected final LongValues delegate;
|
||||||
|
|
||||||
|
protected FilterLongValues(LongValues delegate) {
|
||||||
|
super(delegate.isMultiValued());
|
||||||
|
this.delegate = delegate;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean hasValue(int docId) {
|
||||||
|
return delegate.hasValue(docId);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long getValue(int docId) {
|
||||||
|
return delegate.getValue(docId);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int setDocument(int docId) {
|
||||||
|
return delegate.setDocument(docId);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long nextValue() {
|
||||||
|
return delegate.nextValue();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long getValueMissing(int docId, long missingValue) {
|
||||||
|
return delegate.getValueMissing(docId, missingValue); //To change body of overridden methods use File | Settings | File Templates.
|
||||||
|
}
|
||||||
|
}
|
|
@ -23,13 +23,35 @@ import org.elasticsearch.ElasticSearchIllegalStateException;
|
||||||
import org.elasticsearch.common.geo.GeoPoint;
|
import org.elasticsearch.common.geo.GeoPoint;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
* A state-full lightweight per document set of {@link GeoPoint} values.
|
||||||
|
* To iterate over values in a document use the following pattern:
|
||||||
|
* <pre>
|
||||||
|
* GeoPointValues values = ..;
|
||||||
|
* final int numValues = values.setDocId(docId);
|
||||||
|
* for (int i = 0; i < numValues; i++) {
|
||||||
|
* GeoPoint value = values.nextValue();
|
||||||
|
* // process value
|
||||||
|
* }
|
||||||
|
* </pre>
|
||||||
*/
|
*/
|
||||||
public abstract class GeoPointValues {
|
public abstract class GeoPointValues {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An empty {@link GeoPointValues instance}
|
||||||
|
*/
|
||||||
public static final GeoPointValues EMPTY = new Empty();
|
public static final GeoPointValues EMPTY = new Empty();
|
||||||
|
|
||||||
private final boolean multiValued;
|
private final boolean multiValued;
|
||||||
|
|
||||||
|
protected int docId = -1;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new {@link GeoPointValues} instance
|
||||||
|
* @param multiValued <code>true</code> iff this instance is multivalued. Otherwise <code>false</code>.
|
||||||
|
*/
|
||||||
|
protected GeoPointValues(boolean multiValued) {
|
||||||
|
this.multiValued = multiValued;
|
||||||
|
}
|
||||||
/**
|
/**
|
||||||
* Is one of the documents in this field data values is multi valued?
|
* Is one of the documents in this field data values is multi valued?
|
||||||
*/
|
*/
|
||||||
|
@ -38,100 +60,93 @@ public abstract class GeoPointValues {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Is there a value for this doc?
|
* Returns <code>true</code> if the given document ID has a value in this. Otherwise <code>false</code>.
|
||||||
*/
|
*/
|
||||||
public abstract boolean hasValue(int docId);
|
public abstract boolean hasValue(int docId);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a value for the given document id. If the document
|
||||||
|
* has more than one value the returned value is one of the values
|
||||||
|
* associated with the document.
|
||||||
|
*
|
||||||
|
* Note: the {@link GeoPoint} might be shared across invocations.
|
||||||
|
*
|
||||||
|
* @param docId the documents id.
|
||||||
|
* @return a value for the given document id.
|
||||||
|
*/
|
||||||
public abstract GeoPoint getValue(int docId);
|
public abstract GeoPoint getValue(int docId);
|
||||||
|
|
||||||
public abstract GeoPoint getValueSafe(int docId);
|
/**
|
||||||
|
* Sets iteration to the specified docID and returns the number of
|
||||||
public abstract Iter getIter(int docId);
|
* values for this document ID,
|
||||||
|
* @param docId document ID
|
||||||
public abstract Iter getIterSafe(int docId);
|
*
|
||||||
|
* @see #nextValue()
|
||||||
protected GeoPointValues(boolean multiValued) {
|
*/
|
||||||
this.multiValued = multiValued;
|
public int setDocument(int docId) {
|
||||||
|
this.docId = docId;
|
||||||
|
return hasValue(docId) ? 1 : 0;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns the next value for the current docID set to {@link #setDocument(int)}.
|
||||||
|
* This method should only be called <tt>N</tt> times where <tt>N</tt> is the number
|
||||||
|
* returned from {@link #setDocument(int)}. If called more than <tt>N</tt> times the behavior
|
||||||
|
* is undefined.
|
||||||
|
*
|
||||||
|
* Note: the returned {@link GeoPoint} might be shared across invocations.
|
||||||
|
*
|
||||||
|
* @return the next value for the current docID set to {@link #setDocument(int)}.
|
||||||
|
*/
|
||||||
|
public GeoPoint nextValue() {
|
||||||
|
assert docId != -1;
|
||||||
|
return getValue(docId);
|
||||||
}
|
}
|
||||||
|
|
||||||
public GeoPoint getValueMissing(int docId, GeoPoint defaultGeoPoint) {
|
/**
|
||||||
|
* Returns a value for the given document id or the given missing value if
|
||||||
|
* {@link #hasValue(int)} returns <code>false</code> ie. the document has no
|
||||||
|
* value associated with it.
|
||||||
|
*
|
||||||
|
* @param docId the documents id.
|
||||||
|
* @param missingValue the missing value
|
||||||
|
* @return a value for the given document id or the given missing value if
|
||||||
|
* {@link #hasValue(int)} returns <code>false</code> ie. the document has no
|
||||||
|
* value associated with it.
|
||||||
|
*/
|
||||||
|
public GeoPoint getValueMissing(int docId, GeoPoint missingValue) {
|
||||||
if (hasValue(docId)) {
|
if (hasValue(docId)) {
|
||||||
return getValue(docId);
|
return getValue(docId);
|
||||||
}
|
}
|
||||||
return defaultGeoPoint;
|
return missingValue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
public static interface Iter {
|
* An empty {@link GeoPointValues} implementation
|
||||||
|
*/
|
||||||
boolean hasNext();
|
private static final class Empty extends GeoPointValues {
|
||||||
|
|
||||||
GeoPoint next();
|
|
||||||
|
|
||||||
static class Empty implements Iter {
|
|
||||||
|
|
||||||
public static final Empty INSTANCE = new Empty();
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasNext() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public GeoPoint next() {
|
|
||||||
throw new ElasticSearchIllegalStateException();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static class Single implements Iter {
|
|
||||||
|
|
||||||
public GeoPoint value;
|
|
||||||
public boolean done;
|
|
||||||
|
|
||||||
public Single reset(GeoPoint value) {
|
|
||||||
this.value = value;
|
|
||||||
this.done = false;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasNext() {
|
|
||||||
return !done;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public GeoPoint next() {
|
|
||||||
assert !done;
|
|
||||||
done = true;
|
|
||||||
return value;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static class Empty extends GeoPointValues {
|
|
||||||
protected Empty() {
|
protected Empty() {
|
||||||
super(false);
|
super(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public boolean hasValue(int docId) {
|
public boolean hasValue(int docId) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
public GeoPoint getValueSafe(int docId) {
|
@Override
|
||||||
return getValue(docId);
|
|
||||||
}
|
|
||||||
|
|
||||||
public Iter getIterSafe(int docId) {
|
|
||||||
return getIter(docId);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
public GeoPoint getValue(int docId) {
|
public GeoPoint getValue(int docId) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Iter getIter(int docId) {
|
@Override
|
||||||
return Iter.Empty.INSTANCE;
|
public int setDocument(int docId) {
|
||||||
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public GeoPoint nextValue() {
|
||||||
|
throw new ElasticSearchIllegalStateException("Empty GeoPointValues has no next value");
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,14 +24,34 @@ import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
||||||
import org.elasticsearch.index.fielddata.ordinals.Ordinals.Docs;
|
import org.elasticsearch.index.fielddata.ordinals.Ordinals.Docs;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
* A state-full lightweight per document set of <code>long</code> values.
|
||||||
|
*
|
||||||
|
* To iterate over values in a document use the following pattern:
|
||||||
|
* <pre>
|
||||||
|
* LongValues values = ..;
|
||||||
|
* final int numValues = values.setDocId(docId);
|
||||||
|
* for (int i = 0; i < numValues; i++) {
|
||||||
|
* long value = values.nextValue();
|
||||||
|
* // process value
|
||||||
|
* }
|
||||||
|
* </pre>
|
||||||
|
*
|
||||||
*/
|
*/
|
||||||
public abstract class LongValues {
|
public abstract class LongValues {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An empty {@link LongValues instance}
|
||||||
|
*/
|
||||||
public static final LongValues EMPTY = new Empty();
|
public static final LongValues EMPTY = new Empty();
|
||||||
|
|
||||||
private final boolean multiValued;
|
private final boolean multiValued;
|
||||||
protected final Iter.Single iter = new Iter.Single();
|
|
||||||
|
|
||||||
|
protected int docId;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new {@link LongValues} instance
|
||||||
|
* @param multiValued <code>true</code> iff this instance is multivalued. Otherwise <code>false</code>.
|
||||||
|
*/
|
||||||
protected LongValues(boolean multiValued) {
|
protected LongValues(boolean multiValued) {
|
||||||
this.multiValued = multiValued;
|
this.multiValued = multiValued;
|
||||||
}
|
}
|
||||||
|
@ -44,12 +64,30 @@ public abstract class LongValues {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Is there a value for this doc?
|
* Returns <code>true</code> if the given document ID has a value in this. Otherwise <code>false</code>.
|
||||||
*/
|
*/
|
||||||
public abstract boolean hasValue(int docId);
|
public abstract boolean hasValue(int docId);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a value for the given document id. If the document
|
||||||
|
* has more than one value the returned value is one of the values
|
||||||
|
* associated with the document.
|
||||||
|
* @param docId the documents id.
|
||||||
|
* @return a value for the given document id.
|
||||||
|
*/
|
||||||
public abstract long getValue(int docId);
|
public abstract long getValue(int docId);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a value for the given document id or the given missing value if
|
||||||
|
* {@link #hasValue(int)} returns <code>false</code> ie. the document has no
|
||||||
|
* value associated with it.
|
||||||
|
*
|
||||||
|
* @param docId the documents id.
|
||||||
|
* @param missingValue the missing value
|
||||||
|
* @return a value for the given document id or the given missing value if
|
||||||
|
* {@link #hasValue(int)} returns <code>false</code> ie. the document has no
|
||||||
|
* value associated with it.
|
||||||
|
*/
|
||||||
public long getValueMissing(int docId, long missingValue) {
|
public long getValueMissing(int docId, long missingValue) {
|
||||||
if (hasValue(docId)) {
|
if (hasValue(docId)) {
|
||||||
return getValue(docId);
|
return getValue(docId);
|
||||||
|
@ -57,162 +95,85 @@ public abstract class LongValues {
|
||||||
return missingValue;
|
return missingValue;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Iter getIter(int docId) {
|
/**
|
||||||
assert !isMultiValued();
|
* Sets iteration to the specified docID and returns the number of
|
||||||
if (hasValue(docId)) {
|
* values for this document ID,
|
||||||
return iter.reset(getValue(docId));
|
* @param docId document ID
|
||||||
} else {
|
*
|
||||||
return Iter.Empty.INSTANCE;
|
* @see #nextValue()
|
||||||
}
|
*/
|
||||||
|
public int setDocument(int docId) {
|
||||||
|
this.docId = docId;
|
||||||
|
return hasValue(docId) ? 1 : 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
public static abstract class Dense extends LongValues {
|
* Returns the next value for the current docID set to {@link #setDocument(int)}.
|
||||||
|
* This method should only be called <tt>N</tt> times where <tt>N</tt> is the number
|
||||||
|
* returned from {@link #setDocument(int)}. If called more than <tt>N</tt> times the behavior
|
||||||
protected Dense(boolean multiValued) {
|
* is undefined.
|
||||||
super(multiValued);
|
*
|
||||||
}
|
* @return the next value for the current docID set to {@link #setDocument(int)}.
|
||||||
|
*/
|
||||||
@Override
|
public long nextValue() {
|
||||||
public final boolean hasValue(int docId) {
|
return getValue(docId);
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
public final long getValueMissing(int docId, long missingValue) {
|
|
||||||
assert hasValue(docId);
|
|
||||||
assert !isMultiValued();
|
|
||||||
return getValue(docId);
|
|
||||||
}
|
|
||||||
|
|
||||||
public final Iter getIter(int docId) {
|
|
||||||
assert hasValue(docId);
|
|
||||||
assert !isMultiValued();
|
|
||||||
return iter.reset(getValue(docId));
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Ordinal based {@link LongValues}.
|
||||||
|
*/
|
||||||
public static abstract class WithOrdinals extends LongValues {
|
public static abstract class WithOrdinals extends LongValues {
|
||||||
|
|
||||||
protected final Docs ordinals;
|
protected final Docs ordinals;
|
||||||
private final Iter.Multi iter;
|
|
||||||
|
|
||||||
protected WithOrdinals(Ordinals.Docs ordinals) {
|
protected WithOrdinals(Ordinals.Docs ordinals) {
|
||||||
super(ordinals.isMultiValued());
|
super(ordinals.isMultiValued());
|
||||||
this.ordinals = ordinals;
|
this.ordinals = ordinals;
|
||||||
iter = new Iter.Multi(this);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the associated ordinals instance.
|
||||||
|
* @return the associated ordinals instance.
|
||||||
|
*/
|
||||||
public Docs ordinals() {
|
public Docs ordinals() {
|
||||||
return this.ordinals;
|
return this.ordinals;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the value for the given ordinal.
|
||||||
|
* @param ord the ordinal to lookup.
|
||||||
|
* @return a long value associated with the given ordinal.
|
||||||
|
*/
|
||||||
|
public abstract long getValueByOrd(long ord);
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public final boolean hasValue(int docId) {
|
public final boolean hasValue(int docId) {
|
||||||
return ordinals.getOrd(docId) != 0;
|
return ordinals.getOrd(docId) != Ordinals.MISSING_ORDINAL;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public final long getValue(int docId) {
|
public final long getValue(int docId) {
|
||||||
return getValueByOrd(ordinals.getOrd(docId));
|
long ord = ordinals.getOrd(docId);
|
||||||
}
|
if (ord == Ordinals.MISSING_ORDINAL) {
|
||||||
|
return 0l;
|
||||||
public abstract long getValueByOrd(long ord);
|
}
|
||||||
|
return getValueByOrd(ord);
|
||||||
@Override
|
|
||||||
public final Iter getIter(int docId) {
|
|
||||||
return iter.reset(ordinals.getIter(docId));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public final long getValueMissing(int docId, long missingValue) {
|
public int setDocument(int docId) {
|
||||||
final long ord = ordinals.getOrd(docId);
|
this.docId = docId;
|
||||||
if (ord == 0) {
|
return ordinals.setDocument(docId);
|
||||||
return missingValue;
|
|
||||||
} else {
|
|
||||||
return getValueByOrd(ord);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
@Override
|
||||||
|
public long nextValue() {
|
||||||
public static interface Iter {
|
return getValueByOrd(ordinals.nextOrd());
|
||||||
|
|
||||||
boolean hasNext();
|
|
||||||
|
|
||||||
long next();
|
|
||||||
|
|
||||||
public static class Empty implements Iter {
|
|
||||||
|
|
||||||
public static final Empty INSTANCE = new Empty();
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasNext() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long next() {
|
|
||||||
throw new ElasticSearchIllegalStateException();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static class Single implements Iter {
|
|
||||||
|
|
||||||
public long value;
|
|
||||||
public boolean done;
|
|
||||||
|
|
||||||
public Single reset(long value) {
|
|
||||||
this.value = value;
|
|
||||||
this.done = false;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasNext() {
|
|
||||||
return !done;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long next() {
|
|
||||||
assert !done;
|
|
||||||
done = true;
|
|
||||||
return value;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static class Multi implements Iter {
|
|
||||||
|
|
||||||
private org.elasticsearch.index.fielddata.ordinals.Ordinals.Docs.Iter ordsIter;
|
|
||||||
private long ord;
|
|
||||||
private WithOrdinals values;
|
|
||||||
|
|
||||||
public Multi(WithOrdinals values) {
|
|
||||||
this.values = values;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Multi reset(Ordinals.Docs.Iter ordsIter) {
|
|
||||||
this.ordsIter = ordsIter;
|
|
||||||
this.ord = ordsIter.next();
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasNext() {
|
|
||||||
return ord != 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long next() {
|
|
||||||
long value = values.getValueByOrd(ord);
|
|
||||||
ord = ordsIter.next();
|
|
||||||
return value;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static class Empty extends LongValues {
|
|
||||||
|
private static final class Empty extends LongValues {
|
||||||
|
|
||||||
public Empty() {
|
public Empty() {
|
||||||
super(false);
|
super(false);
|
||||||
|
@ -228,34 +189,16 @@ public abstract class LongValues {
|
||||||
// conforms with all other impls when there is no value
|
// conforms with all other impls when there is no value
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Iter getIter(int docId) {
|
public int setDocument(int docId) {
|
||||||
return Iter.Empty.INSTANCE;
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long nextValue() {
|
||||||
|
throw new ElasticSearchIllegalStateException("Empty LongValues has no next value");
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class Filtered extends LongValues {
|
|
||||||
|
|
||||||
protected final LongValues delegate;
|
|
||||||
|
|
||||||
public Filtered(LongValues delegate) {
|
|
||||||
super(delegate.isMultiValued());
|
|
||||||
this.delegate = delegate;
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean hasValue(int docId) {
|
|
||||||
return delegate.hasValue(docId);
|
|
||||||
}
|
|
||||||
|
|
||||||
public long getValue(int docId) {
|
|
||||||
return delegate.getValue(docId);
|
|
||||||
}
|
|
||||||
|
|
||||||
public Iter getIter(int docId) {
|
|
||||||
return delegate.getIter(docId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,7 +26,6 @@ import org.elasticsearch.common.unit.DistanceUnit;
|
||||||
import org.elasticsearch.common.util.SlicedDoubleList;
|
import org.elasticsearch.common.util.SlicedDoubleList;
|
||||||
import org.elasticsearch.common.util.SlicedLongList;
|
import org.elasticsearch.common.util.SlicedLongList;
|
||||||
import org.elasticsearch.common.util.SlicedObjectList;
|
import org.elasticsearch.common.util.SlicedObjectList;
|
||||||
import org.elasticsearch.index.fielddata.BytesValues.Iter;
|
|
||||||
import org.joda.time.DateTimeZone;
|
import org.joda.time.DateTimeZone;
|
||||||
import org.joda.time.MutableDateTime;
|
import org.joda.time.MutableDateTime;
|
||||||
|
|
||||||
|
@ -102,33 +101,29 @@ public abstract class ScriptDocValues {
|
||||||
return this.values;
|
return this.values;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Iter getBytesIter() {
|
|
||||||
return values.getIter(docId);
|
|
||||||
}
|
|
||||||
|
|
||||||
public BytesRef getBytesValue() {
|
public BytesRef getBytesValue() {
|
||||||
return values.getValue(docId);
|
return values.getValue(docId);
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getValue() {
|
public String getValue() {
|
||||||
final BytesRef value = values.getValue(docId);
|
String value = null;
|
||||||
if (value != null) {
|
if (values.setDocument(docId) > 0) {
|
||||||
UnicodeUtil.UTF8toUTF16(value, spare);
|
UnicodeUtil.UTF8toUTF16(values.nextValue(), spare);
|
||||||
return spare.toString();
|
value = spare.toString();
|
||||||
}
|
}
|
||||||
return null;
|
return value;
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<String> getValues() {
|
public List<String> getValues() {
|
||||||
if (!listLoaded) {
|
if (!listLoaded) {
|
||||||
|
final int numValues = values.setDocument(docId);
|
||||||
list.offset = 0;
|
list.offset = 0;
|
||||||
list.length = 0;
|
list.grow(numValues);
|
||||||
Iter iter = values.getIter(docId);
|
list.length = numValues;
|
||||||
while (iter.hasNext()) {
|
for (int i = 0; i < numValues; i++) {
|
||||||
BytesRef next = iter.next();
|
BytesRef next = values.nextValue();
|
||||||
list.grow(list.length + 1);
|
|
||||||
UnicodeUtil.UTF8toUTF16(next, spare);
|
UnicodeUtil.UTF8toUTF16(next, spare);
|
||||||
list.values[list.length++] = spare.toString();
|
list.values[i] = spare.toString();
|
||||||
}
|
}
|
||||||
listLoaded = true;
|
listLoaded = true;
|
||||||
}
|
}
|
||||||
|
@ -158,22 +153,18 @@ public abstract class ScriptDocValues {
|
||||||
return !values.hasValue(docId);
|
return !values.hasValue(docId);
|
||||||
}
|
}
|
||||||
|
|
||||||
public LongValues.Iter getIter() {
|
|
||||||
return values.getIter(docId);
|
|
||||||
}
|
|
||||||
|
|
||||||
public long getValue() {
|
public long getValue() {
|
||||||
return values.getValue(docId);
|
return values.getValue(docId);
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<Long> getValues() {
|
public List<Long> getValues() {
|
||||||
if (!listLoaded) {
|
if (!listLoaded) {
|
||||||
final LongValues.Iter iter = values.getIter(docId);
|
final int numValues = values.setDocument(docId);
|
||||||
list.offset = 0;
|
list.offset = 0;
|
||||||
list.length = 0;
|
list.grow(numValues);
|
||||||
while (iter.hasNext()) {
|
list.length = numValues;
|
||||||
list.grow(list.length + 1);
|
for (int i = 0; i < numValues; i++) {
|
||||||
list.values[list.length++] = iter.next();
|
list.values[i] = values.nextValue();
|
||||||
}
|
}
|
||||||
listLoaded = true;
|
listLoaded = true;
|
||||||
}
|
}
|
||||||
|
@ -207,9 +198,6 @@ public abstract class ScriptDocValues {
|
||||||
return !values.hasValue(docId);
|
return !values.hasValue(docId);
|
||||||
}
|
}
|
||||||
|
|
||||||
public DoubleValues.Iter getIter() {
|
|
||||||
return values.getIter(docId);
|
|
||||||
}
|
|
||||||
|
|
||||||
public double getValue() {
|
public double getValue() {
|
||||||
return values.getValue(docId);
|
return values.getValue(docId);
|
||||||
|
@ -217,12 +205,12 @@ public abstract class ScriptDocValues {
|
||||||
|
|
||||||
public List<Double> getValues() {
|
public List<Double> getValues() {
|
||||||
if (!listLoaded) {
|
if (!listLoaded) {
|
||||||
final DoubleValues.Iter iter = values.getIter(docId);
|
int numValues = values.setDocument(docId);
|
||||||
list.offset = 0;
|
list.offset = 0;
|
||||||
list.length = 0;
|
list.grow(numValues);
|
||||||
while (iter.hasNext()) {
|
list.length = numValues;
|
||||||
list.grow(list.length + 1);
|
for (int i = 0; i < numValues; i++) {
|
||||||
list.values[list.length++] = iter.next();
|
list.values[i] = values.nextValue();
|
||||||
}
|
}
|
||||||
listLoaded = true;
|
listLoaded = true;
|
||||||
}
|
}
|
||||||
|
@ -291,19 +279,18 @@ public abstract class ScriptDocValues {
|
||||||
|
|
||||||
public List<GeoPoint> getValues() {
|
public List<GeoPoint> getValues() {
|
||||||
if (!listLoaded) {
|
if (!listLoaded) {
|
||||||
GeoPointValues.Iter iter = values.getIter(docId);
|
int numValues = values.setDocument(docId);
|
||||||
list.offset = 0;
|
list.offset = 0;
|
||||||
list.length = 0;
|
list.grow(numValues);
|
||||||
while (iter.hasNext()) {
|
list.length = numValues;
|
||||||
int index = list.length;
|
for (int i = 0; i < numValues; i++) {
|
||||||
list.grow(index + 1);
|
GeoPoint next = values.nextValue();
|
||||||
GeoPoint next = iter.next();
|
GeoPoint point = list.values[i];
|
||||||
GeoPoint point = list.values[index];
|
|
||||||
if (point == null) {
|
if (point == null) {
|
||||||
point = list.values[index] = new GeoPoint();
|
point = list.values[i] = new GeoPoint();
|
||||||
}
|
}
|
||||||
point.reset(next.lat(), next.lon());
|
point.reset(next.lat(), next.lon());
|
||||||
list.values[list.length++] = point;
|
list.values[i] = point;
|
||||||
}
|
}
|
||||||
listLoaded = true;
|
listLoaded = true;
|
||||||
}
|
}
|
||||||
|
|
|
@ -188,7 +188,7 @@ public final class BytesRefOrdValComparator extends NestedWrappableComparator<By
|
||||||
@Override
|
@Override
|
||||||
public int compareDocToValue(int doc, BytesRef value) {
|
public int compareDocToValue(int doc, BytesRef value) {
|
||||||
final long ord = getOrd(doc);
|
final long ord = getOrd(doc);
|
||||||
final BytesRef docValue = ord == 0 ? missingValue : termsIndex.getValueByOrd(ord);
|
final BytesRef docValue = ord == Ordinals.MISSING_ORDINAL ? missingValue : termsIndex.getValueByOrd(ord);
|
||||||
return compareValues(docValue, value);
|
return compareValues(docValue, value);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -200,7 +200,7 @@ public final class BytesRefOrdValComparator extends NestedWrappableComparator<By
|
||||||
public int compareBottom(int doc) {
|
public int compareBottom(int doc) {
|
||||||
assert bottomSlot != -1;
|
assert bottomSlot != -1;
|
||||||
final long docOrd = getOrd(doc);
|
final long docOrd = getOrd(doc);
|
||||||
final long comparableOrd = docOrd == 0 ? missingOrd : docOrd << 2;
|
final long comparableOrd = docOrd == Ordinals.MISSING_ORDINAL ? missingOrd : docOrd << 2;
|
||||||
return LongValuesComparator.compare(bottomOrd, comparableOrd);
|
return LongValuesComparator.compare(bottomOrd, comparableOrd);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -213,7 +213,7 @@ public final class BytesRefOrdValComparator extends NestedWrappableComparator<By
|
||||||
@Override
|
@Override
|
||||||
public void copy(int slot, int doc) {
|
public void copy(int slot, int doc) {
|
||||||
final long ord = getOrd(doc);
|
final long ord = getOrd(doc);
|
||||||
if (ord == 0) {
|
if (ord == Ordinals.MISSING_ORDINAL) {
|
||||||
ords[slot] = missingOrd;
|
ords[slot] = missingOrd;
|
||||||
values[slot] = missingValue;
|
values[slot] = missingValue;
|
||||||
} else {
|
} else {
|
||||||
|
@ -222,7 +222,7 @@ public final class BytesRefOrdValComparator extends NestedWrappableComparator<By
|
||||||
if (values[slot] == null || values[slot] == missingValue) {
|
if (values[slot] == null || values[slot] == missingValue) {
|
||||||
values[slot] = new BytesRef();
|
values[slot] = new BytesRef();
|
||||||
}
|
}
|
||||||
termsIndex.getValueScratchByOrd(ord, values[slot]);
|
values[slot].copyBytes(termsIndex.getValueByOrd(ord));
|
||||||
}
|
}
|
||||||
readerGen[slot] = currentReaderGen;
|
readerGen[slot] = currentReaderGen;
|
||||||
}
|
}
|
||||||
|
@ -274,7 +274,7 @@ public final class BytesRefOrdValComparator extends NestedWrappableComparator<By
|
||||||
termsIndex = indexFieldData.load(context).getBytesValues();
|
termsIndex = indexFieldData.load(context).getBytesValues();
|
||||||
assert termsIndex.ordinals() != null && termsIndex.ordinals().ordinals() != null;
|
assert termsIndex.ordinals() != null && termsIndex.ordinals().ordinals() != null;
|
||||||
if (missingValue == null) {
|
if (missingValue == null) {
|
||||||
missingOrd = 0;
|
missingOrd = Ordinals.MISSING_ORDINAL;
|
||||||
} else {
|
} else {
|
||||||
missingOrd = ordInCurrentReader(termsIndex, missingValue);
|
missingOrd = ordInCurrentReader(termsIndex, missingValue);
|
||||||
assert consistentInsertedOrd(termsIndex, missingOrd, missingValue);
|
assert consistentInsertedOrd(termsIndex, missingOrd, missingValue);
|
||||||
|
@ -304,7 +304,7 @@ public final class BytesRefOrdValComparator extends NestedWrappableComparator<By
|
||||||
final BytesRef bottomValue = values[bottomSlot];
|
final BytesRef bottomValue = values[bottomSlot];
|
||||||
|
|
||||||
if (bottomValue == null) {
|
if (bottomValue == null) {
|
||||||
bottomOrd = 0;
|
bottomOrd = Ordinals.MISSING_ORDINAL;
|
||||||
} else if (currentReaderGen == readerGen[bottomSlot]) {
|
} else if (currentReaderGen == readerGen[bottomSlot]) {
|
||||||
bottomOrd = ords[bottomSlot];
|
bottomOrd = ords[bottomSlot];
|
||||||
} else {
|
} else {
|
||||||
|
@ -336,7 +336,8 @@ public final class BytesRefOrdValComparator extends NestedWrappableComparator<By
|
||||||
}
|
}
|
||||||
|
|
||||||
final protected static long binarySearch(BytesValues.WithOrdinals a, BytesRef key, long low, long high) {
|
final protected static long binarySearch(BytesValues.WithOrdinals a, BytesRef key, long low, long high) {
|
||||||
assert a.getValueByOrd(high) == null | a.getValueByOrd(high) != null; // make sure we actually can get these values
|
assert low != Ordinals.MISSING_ORDINAL;
|
||||||
|
assert high == Ordinals.MISSING_ORDINAL || (a.getValueByOrd(high) == null | a.getValueByOrd(high) != null); // make sure we actually can get these values
|
||||||
assert low == high + 1 || a.getValueByOrd(low) == null | a.getValueByOrd(low) != null;
|
assert low == high + 1 || a.getValueByOrd(low) == null | a.getValueByOrd(low) != null;
|
||||||
while (low <= high) {
|
while (low <= high) {
|
||||||
long mid = (low + high) >>> 1;
|
long mid = (low + high) >>> 1;
|
||||||
|
@ -358,57 +359,17 @@ public final class BytesRefOrdValComparator extends NestedWrappableComparator<By
|
||||||
return -(low + 1);
|
return -(low + 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
static BytesRef getRelevantValue(BytesValues.WithOrdinals readerValues, int docId, SortMode sortMode) {
|
|
||||||
BytesValues.Iter iter = readerValues.getIter(docId);
|
|
||||||
if (!iter.hasNext()) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
BytesRef currentVal = iter.next();
|
|
||||||
BytesRef relevantVal = currentVal;
|
|
||||||
while (true) {
|
|
||||||
int cmp = currentVal.compareTo(relevantVal);
|
|
||||||
if (sortMode == SortMode.MAX) {
|
|
||||||
if (cmp > 0) {
|
|
||||||
relevantVal = currentVal;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if (cmp < 0) {
|
|
||||||
relevantVal = currentVal;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (!iter.hasNext()) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
currentVal = iter.next();
|
|
||||||
}
|
|
||||||
return relevantVal;
|
|
||||||
}
|
|
||||||
|
|
||||||
static long getRelevantOrd(Ordinals.Docs readerOrds, int docId, SortMode sortMode) {
|
static long getRelevantOrd(Ordinals.Docs readerOrds, int docId, SortMode sortMode) {
|
||||||
Ordinals.Docs.Iter iter = readerOrds.getIter(docId);
|
int length = readerOrds.setDocument(docId);
|
||||||
long currentVal = iter.next();
|
long relevantVal = sortMode.startLong();
|
||||||
if (currentVal == 0) {
|
long result = 0;
|
||||||
return 0;
|
assert sortMode == SortMode.MAX || sortMode == SortMode.MIN;
|
||||||
|
for (int i = 0; i < length; i++) {
|
||||||
|
result = relevantVal = sortMode.apply(readerOrds.nextOrd(), relevantVal);
|
||||||
}
|
}
|
||||||
|
assert result >= 0;
|
||||||
long relevantVal = currentVal;
|
assert result <= readerOrds.getMaxOrd();
|
||||||
while (true) {
|
return result;
|
||||||
if (sortMode == SortMode.MAX) {
|
|
||||||
if (currentVal > relevantVal) {
|
|
||||||
relevantVal = currentVal;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if (currentVal < relevantVal) {
|
|
||||||
relevantVal = currentVal;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
currentVal = iter.next();
|
|
||||||
if (currentVal == 0) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return relevantVal;
|
|
||||||
// Enable this when the api can tell us that the ords per doc are ordered
|
// Enable this when the api can tell us that the ords per doc are ordered
|
||||||
/*if (reversed) {
|
/*if (reversed) {
|
||||||
IntArrayRef ref = readerOrds.getOrds(docId);
|
IntArrayRef ref = readerOrds.getOrds(docId);
|
||||||
|
|
|
@ -25,6 +25,7 @@ import org.apache.lucene.util.ArrayUtil;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.apache.lucene.util.RamUsageEstimator;
|
import org.apache.lucene.util.RamUsageEstimator;
|
||||||
import org.elasticsearch.index.fielddata.BytesValues;
|
import org.elasticsearch.index.fielddata.BytesValues;
|
||||||
|
import org.elasticsearch.index.fielddata.FilterBytesValues;
|
||||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -61,22 +62,21 @@ public final class BytesRefValComparator extends NestedWrappableComparator<Bytes
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int compareBottom(int doc) throws IOException {
|
public int compareBottom(int doc) throws IOException {
|
||||||
BytesRef val2 = docTerms.getValue(doc);
|
int length = docTerms.setDocument(doc); // safes one hasValue lookup
|
||||||
if (val2 == null) {
|
BytesRef val2 = length == 0 ? missingValue : docTerms.nextValue();
|
||||||
val2 = missingValue;
|
|
||||||
}
|
|
||||||
return compareValues(bottom, val2);
|
return compareValues(bottom, val2);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void copy(int slot, int doc) throws IOException {
|
public void copy(int slot, int doc) throws IOException {
|
||||||
if (!docTerms.hasValue(doc)) {
|
int length = docTerms.setDocument(doc); // safes one hasValue lookup
|
||||||
|
if (length == 0) {
|
||||||
values[slot] = missingValue;
|
values[slot] = missingValue;
|
||||||
} else {
|
} else {
|
||||||
if (values[slot] == null || values[slot] == missingValue) {
|
if (values[slot] == null || values[slot] == missingValue) {
|
||||||
values[slot] = new BytesRef();
|
values[slot] = new BytesRef();
|
||||||
}
|
}
|
||||||
docTerms.getValueScratch(doc, values[slot]);
|
values[slot].copyBytes(docTerms.nextValue());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -114,39 +114,14 @@ public final class BytesRefValComparator extends NestedWrappableComparator<Bytes
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int compareDocToValue(int doc, BytesRef value) {
|
public int compareDocToValue(int doc, BytesRef value) {
|
||||||
return docTerms.getValue(doc).compareTo(value);
|
final int length = docTerms.setDocument(doc); // safes one hasValue lookup
|
||||||
|
return (length == 0 ? missingValue : docTerms.nextValue()).compareTo(value);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class FilteredByteValues extends BytesValues {
|
private static final class MultiValuedBytesWrapper extends FilterBytesValues {
|
||||||
|
|
||||||
protected final BytesValues delegate;
|
|
||||||
|
|
||||||
public FilteredByteValues(BytesValues delegate) {
|
|
||||||
super(delegate.isMultiValued());
|
|
||||||
this.delegate = delegate;
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean hasValue(int docId) {
|
|
||||||
return delegate.hasValue(docId);
|
|
||||||
}
|
|
||||||
|
|
||||||
public BytesRef makeSafe(BytesRef bytes) {
|
|
||||||
return delegate.makeSafe(bytes);
|
|
||||||
}
|
|
||||||
|
|
||||||
public BytesRef getValueScratch(int docId, BytesRef ret) {
|
|
||||||
return delegate.getValueScratch(docId, ret);
|
|
||||||
}
|
|
||||||
|
|
||||||
public Iter getIter(int docId) {
|
|
||||||
return delegate.getIter(docId);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
private static final class MultiValuedBytesWrapper extends FilteredByteValues {
|
|
||||||
|
|
||||||
private final SortMode sortMode;
|
private final SortMode sortMode;
|
||||||
|
private int numValues;
|
||||||
|
|
||||||
public MultiValuedBytesWrapper(BytesValues delegate, SortMode sortMode) {
|
public MultiValuedBytesWrapper(BytesValues delegate, SortMode sortMode) {
|
||||||
super(delegate);
|
super(delegate);
|
||||||
|
@ -154,40 +129,47 @@ public final class BytesRefValComparator extends NestedWrappableComparator<Bytes
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BytesRef getValueScratch(int docId, BytesRef relevantVal) {
|
public BytesRef getValue(int docId) {
|
||||||
BytesValues.Iter iter = delegate.getIter(docId);
|
numValues = delegate.setDocument(docId);
|
||||||
if (!iter.hasNext()) {
|
scratch.length = 0;
|
||||||
relevantVal.length = 0;
|
if (numValues == 0) {
|
||||||
return relevantVal;
|
scratch.length = 0;
|
||||||
|
return scratch;
|
||||||
}
|
}
|
||||||
|
return nextValue();
|
||||||
|
}
|
||||||
|
|
||||||
BytesRef currentVal = iter.next();
|
public int setDocument(int docId) {
|
||||||
|
// either 0 or 1
|
||||||
|
return Math.min(1, (numValues = delegate.setDocument(docId)));
|
||||||
|
}
|
||||||
|
|
||||||
|
public BytesRef nextValue() {
|
||||||
|
BytesRef currentVal = delegate.nextValue();
|
||||||
// We MUST allocate a new byte[] since relevantVal might have been filled by reference by a PagedBytes instance
|
// We MUST allocate a new byte[] since relevantVal might have been filled by reference by a PagedBytes instance
|
||||||
// meaning that the BytesRef.bytes are shared and shouldn't be overwritten. We can't use the bytes of the iterator
|
// meaning that the BytesRef.bytes are shared and shouldn't be overwritten. We can't use the bytes of the iterator
|
||||||
// either because they will be overwritten by subsequent calls in the current thread
|
// either because they will be overwritten by subsequent calls in the current thread
|
||||||
relevantVal.bytes = new byte[ArrayUtil.oversize(currentVal.length, RamUsageEstimator.NUM_BYTES_BYTE)];
|
scratch.bytes = new byte[ArrayUtil.oversize(currentVal.length, RamUsageEstimator.NUM_BYTES_BYTE)];
|
||||||
relevantVal.offset = 0;
|
scratch.offset = 0;
|
||||||
relevantVal.length = 0;
|
scratch.length = currentVal.length;
|
||||||
relevantVal.append(currentVal);
|
System.arraycopy(currentVal.bytes, currentVal.offset, scratch.bytes, 0, currentVal.length);
|
||||||
while (true) {
|
for (int i = 1; i < numValues; i++) {
|
||||||
int cmp = currentVal.compareTo(relevantVal);
|
currentVal = delegate.nextValue();
|
||||||
if (sortMode == SortMode.MAX) {
|
if (sortMode == SortMode.MAX) {
|
||||||
if (cmp > 0) {
|
if (currentVal.compareTo(scratch) > 0) {
|
||||||
relevantVal.length = 0;
|
scratch.grow(currentVal.length);
|
||||||
relevantVal.append(currentVal);
|
scratch.length = currentVal.length;
|
||||||
|
System.arraycopy(currentVal.bytes, currentVal.offset, scratch.bytes, 0, currentVal.length);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (cmp < 0) {
|
if (currentVal.compareTo(scratch) < 0) {
|
||||||
relevantVal.length = 0;
|
scratch.grow(currentVal.length);
|
||||||
relevantVal.append(currentVal);
|
scratch.length = currentVal.length;
|
||||||
|
System.arraycopy(currentVal.bytes, currentVal.offset, scratch.bytes, 0, currentVal.length);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!iter.hasNext()) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
currentVal = iter.next();
|
|
||||||
}
|
}
|
||||||
return relevantVal;
|
return scratch;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.index.fielddata.fieldcomparator;
|
||||||
import org.apache.lucene.index.AtomicReaderContext;
|
import org.apache.lucene.index.AtomicReaderContext;
|
||||||
import org.apache.lucene.search.FieldComparator;
|
import org.apache.lucene.search.FieldComparator;
|
||||||
import org.elasticsearch.index.fielddata.DoubleValues;
|
import org.elasticsearch.index.fielddata.DoubleValues;
|
||||||
|
import org.elasticsearch.index.fielddata.FilterDoubleValues;
|
||||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -70,7 +71,7 @@ abstract class DoubleValuesComparatorBase<T extends Number> extends NumberCompar
|
||||||
return Double.compare(left, right);
|
return Double.compare(left, right);
|
||||||
}
|
}
|
||||||
|
|
||||||
static final class MultiValueWrapper extends DoubleValues.Filtered {
|
static final class MultiValueWrapper extends FilterDoubleValues {
|
||||||
|
|
||||||
private final SortMode sortMode;
|
private final SortMode sortMode;
|
||||||
|
|
||||||
|
@ -81,42 +82,15 @@ abstract class DoubleValuesComparatorBase<T extends Number> extends NumberCompar
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double getValueMissing(int docId, double missing) {
|
public double getValueMissing(int docId, double missing) {
|
||||||
DoubleValues.Iter iter = delegate.getIter(docId);
|
int numValues = delegate.setDocument(docId);
|
||||||
if (!iter.hasNext()) {
|
if (numValues == 0) {
|
||||||
return missing;
|
return missing;
|
||||||
}
|
}
|
||||||
|
double relevantVal = sortMode.startDouble();
|
||||||
double currentVal = iter.next();
|
for (int i = 0; i < numValues; i++) {
|
||||||
double relevantVal = currentVal;
|
relevantVal = sortMode.apply(relevantVal, delegate.nextValue());
|
||||||
int counter = 1;
|
|
||||||
while (iter.hasNext()) {
|
|
||||||
currentVal = iter.next();
|
|
||||||
int cmp = Double.compare(currentVal, relevantVal);
|
|
||||||
switch (sortMode) {
|
|
||||||
case SUM:
|
|
||||||
relevantVal += currentVal;
|
|
||||||
break;
|
|
||||||
case AVG:
|
|
||||||
relevantVal += currentVal;
|
|
||||||
counter++;
|
|
||||||
break;
|
|
||||||
case MIN:
|
|
||||||
if (cmp < 0) {
|
|
||||||
relevantVal = currentVal;
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case MAX:
|
|
||||||
if (cmp > 0) {
|
|
||||||
relevantVal = currentVal;
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (sortMode == SortMode.AVG) {
|
|
||||||
return relevantVal / counter;
|
|
||||||
} else {
|
|
||||||
return relevantVal;
|
|
||||||
}
|
}
|
||||||
|
return sortMode.reduce(relevantVal, numValues);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -41,6 +41,7 @@ public class GeoDistanceComparator extends NumberComparatorBase<Double> {
|
||||||
protected final GeoDistance geoDistance;
|
protected final GeoDistance geoDistance;
|
||||||
protected final GeoDistance.FixedSourceDistance fixedSourceDistance;
|
protected final GeoDistance.FixedSourceDistance fixedSourceDistance;
|
||||||
protected final SortMode sortMode;
|
protected final SortMode sortMode;
|
||||||
|
private static final Double MISSING_VALUE = Double.MAX_VALUE;
|
||||||
|
|
||||||
private final double[] values;
|
private final double[] values;
|
||||||
private double bottom;
|
private double bottom;
|
||||||
|
@ -71,39 +72,19 @@ public class GeoDistanceComparator extends NumberComparatorBase<Double> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int compare(int slot1, int slot2) {
|
public int compare(int slot1, int slot2) {
|
||||||
final double v1 = values[slot1];
|
return Double.compare(values[slot1], values[slot2]);
|
||||||
final double v2 = values[slot2];
|
|
||||||
if (v1 > v2) {
|
|
||||||
return 1;
|
|
||||||
} else if (v1 < v2) {
|
|
||||||
return -1;
|
|
||||||
} else {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int compareBottom(int doc) {
|
public int compareBottom(int doc) {
|
||||||
final double v2 = geoDistanceValues.computeDistance(doc);
|
final double v2 = geoDistanceValues.computeDistance(doc);
|
||||||
if (bottom > v2) {
|
return Double.compare(bottom, v2);
|
||||||
return 1;
|
|
||||||
} else if (bottom < v2) {
|
|
||||||
return -1;
|
|
||||||
} else {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int compareDocToValue(int doc, Double distance2) throws IOException {
|
public int compareDocToValue(int doc, Double distance2) throws IOException {
|
||||||
double distance1 = geoDistanceValues.computeDistance(doc);
|
double distance1 = geoDistanceValues.computeDistance(doc);
|
||||||
if (distance1 < distance2) {
|
return Double.compare(distance1, distance2);
|
||||||
return -1;
|
|
||||||
} else if (distance1 == distance2) {
|
|
||||||
return 0;
|
|
||||||
} else {
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -133,12 +114,12 @@ public class GeoDistanceComparator extends NumberComparatorBase<Double> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void missing(int slot) {
|
public void missing(int slot) {
|
||||||
values[slot] = Double.MAX_VALUE;
|
values[slot] = MISSING_VALUE;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int compareBottomMissing() {
|
public int compareBottomMissing() {
|
||||||
return Double.compare(bottom, Double.MAX_VALUE);
|
return Double.compare(bottom, MISSING_VALUE);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Computes the distance based on geo points.
|
// Computes the distance based on geo points.
|
||||||
|
@ -170,7 +151,7 @@ public class GeoDistanceComparator extends NumberComparatorBase<Double> {
|
||||||
GeoPoint geoPoint = readerValues.getValue(doc);
|
GeoPoint geoPoint = readerValues.getValue(doc);
|
||||||
if (geoPoint == null) {
|
if (geoPoint == null) {
|
||||||
// is this true? push this to the "end"
|
// is this true? push this to the "end"
|
||||||
return Double.MAX_VALUE;
|
return MISSING_VALUE;
|
||||||
} else {
|
} else {
|
||||||
return fixedSourceDistance.calculate(geoPoint.lat(), geoPoint.lon());
|
return fixedSourceDistance.calculate(geoPoint.lat(), geoPoint.lon());
|
||||||
}
|
}
|
||||||
|
@ -189,42 +170,15 @@ public class GeoDistanceComparator extends NumberComparatorBase<Double> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double computeDistance(int doc) {
|
public double computeDistance(int doc) {
|
||||||
GeoPointValues.Iter iter = readerValues.getIter(doc);
|
final int length = readerValues.setDocument(doc);
|
||||||
if (!iter.hasNext()) {
|
double distance = sortMode.startDouble();
|
||||||
return Double.MAX_VALUE;
|
double result = MISSING_VALUE;
|
||||||
}
|
for (int i = 0; i < length; i++) {
|
||||||
|
GeoPoint point = readerValues.nextValue();
|
||||||
GeoPoint point = iter.next();
|
result = distance = sortMode.apply(distance, fixedSourceDistance.calculate(point.lat(), point.lon()));
|
||||||
double distance = fixedSourceDistance.calculate(point.lat(), point.lon());
|
|
||||||
int counter = 1;
|
|
||||||
while (iter.hasNext()) {
|
|
||||||
point = iter.next();
|
|
||||||
double newDistance = fixedSourceDistance.calculate(point.lat(), point.lon());
|
|
||||||
switch (sortMode) {
|
|
||||||
case MIN:
|
|
||||||
if (distance > newDistance) {
|
|
||||||
distance = newDistance;
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case MAX:
|
|
||||||
if (distance < newDistance) {
|
|
||||||
distance = newDistance;
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case AVG:
|
|
||||||
distance += newDistance;
|
|
||||||
counter++;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (sortMode == SortMode.AVG && counter > 1) {
|
|
||||||
return distance / counter;
|
|
||||||
} else {
|
|
||||||
return distance;
|
|
||||||
}
|
}
|
||||||
|
return sortMode.reduce(result, length);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,6 +20,7 @@ package org.elasticsearch.index.fielddata.fieldcomparator;
|
||||||
|
|
||||||
import org.apache.lucene.index.AtomicReaderContext;
|
import org.apache.lucene.index.AtomicReaderContext;
|
||||||
import org.apache.lucene.search.FieldComparator;
|
import org.apache.lucene.search.FieldComparator;
|
||||||
|
import org.elasticsearch.index.fielddata.FilterLongValues;
|
||||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||||
import org.elasticsearch.index.fielddata.LongValues;
|
import org.elasticsearch.index.fielddata.LongValues;
|
||||||
|
|
||||||
|
@ -77,7 +78,7 @@ abstract class LongValuesComparatorBase<T extends Number> extends NumberComparat
|
||||||
return compare(bottom, missingValue);
|
return compare(bottom, missingValue);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final class MultiValueWrapper extends LongValues.Filtered {
|
private static final class MultiValueWrapper extends FilterLongValues {
|
||||||
|
|
||||||
private final SortMode sortMode;
|
private final SortMode sortMode;
|
||||||
|
|
||||||
|
@ -88,40 +89,14 @@ abstract class LongValuesComparatorBase<T extends Number> extends NumberComparat
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long getValueMissing(int docId, long missing) {
|
public long getValueMissing(int docId, long missing) {
|
||||||
LongValues.Iter iter = delegate.getIter(docId);
|
final int numValues = delegate.setDocument(docId);
|
||||||
if (!iter.hasNext()) {
|
long relevantVal = sortMode.startLong();
|
||||||
return missing;
|
long result = missing;
|
||||||
|
for (int i = 0; i < numValues; i++) {
|
||||||
|
result = relevantVal = sortMode.apply(relevantVal, delegate.nextValue());
|
||||||
}
|
}
|
||||||
|
return sortMode.reduce(result, numValues);
|
||||||
|
|
||||||
long currentVal = iter.next();
|
|
||||||
long relevantVal = currentVal;
|
|
||||||
int counter = 1;
|
|
||||||
while (iter.hasNext()) {
|
|
||||||
currentVal = iter.next();
|
|
||||||
switch (sortMode) {
|
|
||||||
case SUM:
|
|
||||||
relevantVal += currentVal;
|
|
||||||
break;
|
|
||||||
case AVG:
|
|
||||||
relevantVal += currentVal;
|
|
||||||
counter++;
|
|
||||||
break;
|
|
||||||
case MAX:
|
|
||||||
if (currentVal > relevantVal) {
|
|
||||||
relevantVal = currentVal;
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case MIN:
|
|
||||||
if (currentVal < relevantVal) {
|
|
||||||
relevantVal = currentVal;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (sortMode == SortMode.AVG) {
|
|
||||||
return relevantVal / counter;
|
|
||||||
} else {
|
|
||||||
return relevantVal;
|
|
||||||
}
|
|
||||||
// If we have a method on readerValues that tells if the values emitted by Iter or ArrayRef are sorted per
|
// If we have a method on readerValues that tells if the values emitted by Iter or ArrayRef are sorted per
|
||||||
// document that we can do this or something similar:
|
// document that we can do this or something similar:
|
||||||
// (This is already possible, if values are loaded from index, but we just need a method that tells us this
|
// (This is already possible, if values are loaded from index, but we just need a method that tells us this
|
||||||
|
|
|
@ -22,6 +22,8 @@ package org.elasticsearch.index.fielddata.fieldcomparator;
|
||||||
|
|
||||||
import org.elasticsearch.ElasticSearchIllegalArgumentException;
|
import org.elasticsearch.ElasticSearchIllegalArgumentException;
|
||||||
|
|
||||||
|
import java.util.Locale;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Defines what values to pick in the case a document contains multiple values for a particular field.
|
* Defines what values to pick in the case a document contains multiple values for a particular field.
|
||||||
*/
|
*/
|
||||||
|
@ -30,33 +32,233 @@ public enum SortMode {
|
||||||
/**
|
/**
|
||||||
* Sum of all the values.
|
* Sum of all the values.
|
||||||
*/
|
*/
|
||||||
SUM,
|
SUM {
|
||||||
|
/**
|
||||||
|
* Returns the sum of the two values
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public double apply(double a, double b) {
|
||||||
|
return a + b;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the sum of the two values
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public long apply(long a, long b) {
|
||||||
|
return a + b;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Average of all the values.
|
* Average of all the values.
|
||||||
*/
|
*/
|
||||||
AVG,
|
AVG {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the sum of the two values
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public double apply(double a, double b) {
|
||||||
|
return a + b;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the sum of the two values
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public long apply(long a, long b) {
|
||||||
|
return a + b;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns <code>a / Math.max(1.0d, numValues)</code>
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public double reduce(double a, int numValues) {
|
||||||
|
return a / Math.max(1.0d, (double) numValues);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns <code>Math.round(a / Math.max(1.0, numValues))</code>
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public long reduce(long a, int numValues) {
|
||||||
|
return Math.round(a / Math.max(1.0, numValues));
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Pick the lowest value.
|
* Pick the lowest value.
|
||||||
*/
|
*/
|
||||||
MIN,
|
MIN {
|
||||||
|
/**
|
||||||
|
* Equivalent to {@link Math#min(double, double)}
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public double apply(double a, double b) {
|
||||||
|
return Math.min(a, b);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Equivalent to {@link Math#min(long, long)}
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public long apply(long a, long b) {
|
||||||
|
return Math.min(a, b);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns {@link Double#POSITIVE_INFINITY}
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public double startDouble() {
|
||||||
|
return Double.POSITIVE_INFINITY;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns {@link Long#MAX_VALUE}
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public long startLong() {
|
||||||
|
return Long.MAX_VALUE;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Pick the highest value.
|
* Pick the highest value.
|
||||||
*/
|
*/
|
||||||
MAX;
|
MAX {
|
||||||
|
/**
|
||||||
|
* Equivalent to {@link Math#max(double, double)}
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public double apply(double a, double b) {
|
||||||
|
return Math.max(a, b);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Equivalent to {@link Math#max(long, long)}
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public long apply(long a, long b) {
|
||||||
|
return Math.max(a, b);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns {@link Double#NEGATIVE_INFINITY}
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public double startDouble() {
|
||||||
|
return Double.NEGATIVE_INFINITY;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns {@link Long#MIN_VALUE}
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public long startLong() {
|
||||||
|
return Long.MIN_VALUE;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Applies the sort mode and returns the result. This method is meant to be
|
||||||
|
* a binary function that is commonly used in a loop to find the relevant
|
||||||
|
* value for the sort mode in a list of values. For instance if the sort mode
|
||||||
|
* is {@link SortMode#MAX} this method is equivalent to {@link Math#max(double, double)}.
|
||||||
|
*
|
||||||
|
* Note: all implementations are idempotent.
|
||||||
|
*
|
||||||
|
* @param a an argument
|
||||||
|
* @param b another argument
|
||||||
|
* @return the result of the function.
|
||||||
|
*/
|
||||||
|
public abstract double apply(double a, double b);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Applies the sort mode and returns the result. This method is meant to be
|
||||||
|
* a binary function that is commonly used in a loop to find the relevant
|
||||||
|
* value for the sort mode in a list of values. For instance if the sort mode
|
||||||
|
* is {@link SortMode#MAX} this method is equivalent to {@link Math#max(long, long)}.
|
||||||
|
*
|
||||||
|
* Note: all implementations are idempotent.
|
||||||
|
*
|
||||||
|
* @param a an argument
|
||||||
|
* @param b another argument
|
||||||
|
* @return the result of the function.
|
||||||
|
*/
|
||||||
|
public abstract long apply(long a, long b);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns an initial value for the sort mode that is guaranteed to have no impact if passed
|
||||||
|
* to {@link #apply(double, double)}. This value should be used as the initial value if the
|
||||||
|
* sort mode is applied to a non-empty list of values. For instance:
|
||||||
|
* <pre>
|
||||||
|
* double relevantValue = sortMode.startDouble();
|
||||||
|
* for (int i = 0; i < array.length; i++) {
|
||||||
|
* relevantValue = sortMode.apply(array[i], relevantValue);
|
||||||
|
* }
|
||||||
|
* </pre>
|
||||||
|
*
|
||||||
|
* Note: This method return <code>0</code> by default.
|
||||||
|
*
|
||||||
|
* @return an initial value for the sort mode.
|
||||||
|
*/
|
||||||
|
public double startDouble() {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns an initial value for the sort mode that is guaranteed to have no impact if passed
|
||||||
|
* to {@link #apply(long, long)}. This value should be used as the initial value if the
|
||||||
|
* sort mode is applied to a non-empty list of values. For instance:
|
||||||
|
* <pre>
|
||||||
|
* long relevantValue = sortMode.startLong();
|
||||||
|
* for (int i = 0; i < array.length; i++) {
|
||||||
|
* relevantValue = sortMode.apply(array[i], relevantValue);
|
||||||
|
* }
|
||||||
|
* </pre>
|
||||||
|
*
|
||||||
|
* Note: This method return <code>0</code> by default.
|
||||||
|
* @return an initial value for the sort mode.
|
||||||
|
*/
|
||||||
|
public long startLong() {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the aggregated value based on the sort mode. For instance if {@link SortMode#AVG} is used
|
||||||
|
* this method divides the given value by the number of values. The default implementation returns
|
||||||
|
* the first argument.
|
||||||
|
*
|
||||||
|
* Note: all implementations are idempotent.
|
||||||
|
*/
|
||||||
|
public double reduce(double a, int numValues) {
|
||||||
|
return a;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the aggregated value based on the sort mode. For instance if {@link SortMode#AVG} is used
|
||||||
|
* this method divides the given value by the number of values. The default implementation returns
|
||||||
|
* the first argument.
|
||||||
|
*
|
||||||
|
* Note: all implementations are idempotent.
|
||||||
|
*/
|
||||||
|
public long reduce(long a, int numValues) {
|
||||||
|
return a;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A case insensitive version of {@link #valueOf(String)}
|
||||||
|
*
|
||||||
|
* @throws ElasticSearchIllegalArgumentException if the given string doesn't match a sort mode or is <code>null</code>.
|
||||||
|
*/
|
||||||
public static SortMode fromString(String sortMode) {
|
public static SortMode fromString(String sortMode) {
|
||||||
if ("min".equals(sortMode)) {
|
try {
|
||||||
return MIN;
|
return valueOf(sortMode.toUpperCase(Locale.ROOT));
|
||||||
} else if ("max".equals(sortMode)) {
|
} catch (Throwable t) {
|
||||||
return MAX;
|
|
||||||
} else if ("sum".equals(sortMode)) {
|
|
||||||
return SUM;
|
|
||||||
} else if ("avg".equals(sortMode)) {
|
|
||||||
return AVG;
|
|
||||||
} else {
|
|
||||||
throw new ElasticSearchIllegalArgumentException("Illegal sort_mode " + sortMode);
|
throw new ElasticSearchIllegalArgumentException("Illegal sort_mode " + sortMode);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -38,16 +38,6 @@ public class DocIdOrdinals implements Ordinals {
|
||||||
this.numDocs = numDocs;
|
this.numDocs = numDocs;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasSingleArrayBackingStorage() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Object getBackingStorage() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long getMemorySizeInBytes() {
|
public long getMemorySizeInBytes() {
|
||||||
return RamUsageEstimator.NUM_BYTES_OBJECT_REF;
|
return RamUsageEstimator.NUM_BYTES_OBJECT_REF;
|
||||||
|
@ -82,7 +72,8 @@ public class DocIdOrdinals implements Ordinals {
|
||||||
|
|
||||||
private final DocIdOrdinals parent;
|
private final DocIdOrdinals parent;
|
||||||
private final LongsRef longsScratch = new LongsRef(new long[1], 0, 1);
|
private final LongsRef longsScratch = new LongsRef(new long[1], 0, 1);
|
||||||
private final SingleValueIter iter = new SingleValueIter();
|
private int docId = -1;
|
||||||
|
private long currentOrdinal = -1;
|
||||||
|
|
||||||
public Docs(DocIdOrdinals parent) {
|
public Docs(DocIdOrdinals parent) {
|
||||||
this.parent = parent;
|
this.parent = parent;
|
||||||
|
@ -115,18 +106,32 @@ public class DocIdOrdinals implements Ordinals {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long getOrd(int docId) {
|
public long getOrd(int docId) {
|
||||||
return docId + 1;
|
return currentOrdinal = docId + 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public LongsRef getOrds(int docId) {
|
public LongsRef getOrds(int docId) {
|
||||||
longsScratch.longs[0] = docId + 1;
|
longsScratch.longs[0] = currentOrdinal = docId + 1;
|
||||||
return longsScratch;
|
return longsScratch;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Iter getIter(int docId) {
|
public long nextOrd() {
|
||||||
return iter.reset(docId + 1);
|
assert docId >= 0;
|
||||||
|
currentOrdinal = docId + 1;
|
||||||
|
docId = -1;
|
||||||
|
return currentOrdinal;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int setDocument(int docId) {
|
||||||
|
this.docId = docId;
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long currentOrd() {
|
||||||
|
return currentOrdinal;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,6 +20,7 @@
|
||||||
package org.elasticsearch.index.fielddata.ordinals;
|
package org.elasticsearch.index.fielddata.ordinals;
|
||||||
|
|
||||||
import org.apache.lucene.util.LongsRef;
|
import org.apache.lucene.util.LongsRef;
|
||||||
|
import org.elasticsearch.ElasticSearchIllegalStateException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*/
|
*/
|
||||||
|
@ -36,16 +37,6 @@ public class EmptyOrdinals implements Ordinals {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasSingleArrayBackingStorage() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Object getBackingStorage() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean isMultiValued() {
|
public boolean isMultiValued() {
|
||||||
return false;
|
return false;
|
||||||
|
@ -72,7 +63,6 @@ public class EmptyOrdinals implements Ordinals {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class Docs implements Ordinals.Docs {
|
public static class Docs implements Ordinals.Docs {
|
||||||
|
|
||||||
private final EmptyOrdinals parent;
|
private final EmptyOrdinals parent;
|
||||||
public static final LongsRef EMPTY_LONGS_REF = new LongsRef();
|
public static final LongsRef EMPTY_LONGS_REF = new LongsRef();
|
||||||
|
|
||||||
|
@ -116,9 +106,18 @@ public class EmptyOrdinals implements Ordinals {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Iter getIter(int docId) {
|
public long nextOrd() {
|
||||||
return EmptyIter.INSTANCE;
|
throw new ElasticSearchIllegalStateException("Empty ordinals has no nextOrd");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int setDocument(int docId) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long currentOrd() {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,7 +25,6 @@ import org.apache.lucene.util.RamUsageEstimator;
|
||||||
import org.apache.lucene.util.packed.AppendingPackedLongBuffer;
|
import org.apache.lucene.util.packed.AppendingPackedLongBuffer;
|
||||||
import org.apache.lucene.util.packed.MonotonicAppendingLongBuffer;
|
import org.apache.lucene.util.packed.MonotonicAppendingLongBuffer;
|
||||||
import org.apache.lucene.util.packed.PackedInts;
|
import org.apache.lucene.util.packed.PackedInts;
|
||||||
import org.elasticsearch.index.fielddata.ordinals.Ordinals.Docs.Iter;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* {@link Ordinals} implementation which is efficient at storing field data ordinals for multi-valued or sparse fields.
|
* {@link Ordinals} implementation which is efficient at storing field data ordinals for multi-valued or sparse fields.
|
||||||
|
@ -77,16 +76,6 @@ public class MultiOrdinals implements Ordinals {
|
||||||
assert ords.size() == builder.getTotalNumOrds() : ords.size() + " != " + builder.getTotalNumOrds();
|
assert ords.size() == builder.getTotalNumOrds() : ords.size() + " != " + builder.getTotalNumOrds();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasSingleArrayBackingStorage() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Object getBackingStorage() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long getMemorySizeInBytes() {
|
public long getMemorySizeInBytes() {
|
||||||
return endOffsets.ramBytesUsed() + ords.ramBytesUsed();
|
return endOffsets.ramBytesUsed() + ords.ramBytesUsed();
|
||||||
|
@ -123,14 +112,15 @@ public class MultiOrdinals implements Ordinals {
|
||||||
private final MonotonicAppendingLongBuffer endOffsets;
|
private final MonotonicAppendingLongBuffer endOffsets;
|
||||||
private final AppendingPackedLongBuffer ords;
|
private final AppendingPackedLongBuffer ords;
|
||||||
private final LongsRef longsScratch;
|
private final LongsRef longsScratch;
|
||||||
private final MultiIter iter;
|
private long offset;
|
||||||
|
private long limit;
|
||||||
|
private long currentOrd;
|
||||||
|
|
||||||
MultiDocs(MultiOrdinals ordinals) {
|
MultiDocs(MultiOrdinals ordinals) {
|
||||||
this.ordinals = ordinals;
|
this.ordinals = ordinals;
|
||||||
this.endOffsets = ordinals.endOffsets;
|
this.endOffsets = ordinals.endOffsets;
|
||||||
this.ords = ordinals.ords;
|
this.ords = ordinals.ords;
|
||||||
this.longsScratch = new LongsRef(16);
|
this.longsScratch = new LongsRef(16);
|
||||||
this.iter = new MultiIter(ords);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -163,9 +153,9 @@ public class MultiOrdinals implements Ordinals {
|
||||||
final long startOffset = docId > 0 ? endOffsets.get(docId - 1) : 0;
|
final long startOffset = docId > 0 ? endOffsets.get(docId - 1) : 0;
|
||||||
final long endOffset = endOffsets.get(docId);
|
final long endOffset = endOffsets.get(docId);
|
||||||
if (startOffset == endOffset) {
|
if (startOffset == endOffset) {
|
||||||
return 0L; // ord for missing values
|
return currentOrd = 0L; // ord for missing values
|
||||||
} else {
|
} else {
|
||||||
return 1L + ords.get(startOffset);
|
return currentOrd = 1L + ords.get(startOffset);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -186,34 +176,23 @@ public class MultiOrdinals implements Ordinals {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Iter getIter(int docId) {
|
public long nextOrd() {
|
||||||
final long startOffset = docId > 0 ? endOffsets.get(docId - 1) : 0;
|
assert offset < limit;
|
||||||
final long endOffset = endOffsets.get(docId);
|
return currentOrd = 1L + ords.get(offset++);
|
||||||
iter.offset = startOffset;
|
|
||||||
iter.endOffset = endOffset;
|
|
||||||
return iter;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
static class MultiIter implements Iter {
|
|
||||||
|
|
||||||
final AppendingPackedLongBuffer ordinals;
|
|
||||||
long offset, endOffset;
|
|
||||||
|
|
||||||
MultiIter(AppendingPackedLongBuffer ordinals) {
|
|
||||||
this.ordinals = ordinals;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long next() {
|
public int setDocument(int docId) {
|
||||||
if (offset >= endOffset) {
|
final long startOffset = docId > 0 ? endOffsets.get(docId - 1) : 0;
|
||||||
return 0L;
|
final long endOffset = endOffsets.get(docId);
|
||||||
} else {
|
offset = startOffset;
|
||||||
return 1L + ordinals.get(offset++);
|
limit = endOffset;
|
||||||
}
|
return (int) (endOffset - startOffset);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long currentOrd() {
|
||||||
|
return currentOrd;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,15 +26,8 @@ import org.apache.lucene.util.LongsRef;
|
||||||
*/
|
*/
|
||||||
public interface Ordinals {
|
public interface Ordinals {
|
||||||
|
|
||||||
/**
|
static final long MISSING_ORDINAL = 0;
|
||||||
* Are the ordinals backed by a single ordinals array?
|
static final long MIN_ORDINAL = 1;
|
||||||
*/
|
|
||||||
boolean hasSingleArrayBackingStorage();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the backing storage for this ordinals.
|
|
||||||
*/
|
|
||||||
Object getBackingStorage();
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The memory size this ordinals take.
|
* The memory size this ordinals take.
|
||||||
|
@ -52,13 +45,13 @@ public interface Ordinals {
|
||||||
int getNumDocs();
|
int getNumDocs();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The number of ordinals, excluding the "0" ordinal indicating a missing value.
|
* The number of ordinals, excluding the {@link #MISSING_ORDINAL} ordinal indicating a missing value.
|
||||||
*/
|
*/
|
||||||
long getNumOrds();
|
long getNumOrds();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns total unique ord count; this includes +1 for
|
* Returns total unique ord count; this includes +1 for
|
||||||
* the null ord (always 0).
|
* the {@link #MISSING_ORDINAL} ord (always {@value #MISSING_ORDINAL} ).
|
||||||
*/
|
*/
|
||||||
long getMaxOrd();
|
long getMaxOrd();
|
||||||
|
|
||||||
|
@ -72,6 +65,16 @@ public interface Ordinals {
|
||||||
* is that this gets created for each "iteration" over ordinals.
|
* is that this gets created for each "iteration" over ordinals.
|
||||||
* <p/>
|
* <p/>
|
||||||
* <p>A value of 0 ordinal when iterating indicated "no" value.</p>
|
* <p>A value of 0 ordinal when iterating indicated "no" value.</p>
|
||||||
|
* To iterate of a set of ordinals for a given document use {@link #setDocument(int)} and {@link #nextOrd()} as
|
||||||
|
* show in the example below:
|
||||||
|
* <pre>
|
||||||
|
* Ordinals.Docs docs = ...;
|
||||||
|
* final int len = docs.setDocId(docId);
|
||||||
|
* for (int i = 0; i < len; i++) {
|
||||||
|
* final long ord = docs.nextOrd();
|
||||||
|
* // process ord
|
||||||
|
* }
|
||||||
|
* </pre>
|
||||||
*/
|
*/
|
||||||
interface Docs {
|
interface Docs {
|
||||||
|
|
||||||
|
@ -113,51 +116,35 @@ public interface Ordinals {
|
||||||
*/
|
*/
|
||||||
LongsRef getOrds(int docId);
|
LongsRef getOrds(int docId);
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns an iterator of the ordinals that match the docId, with an
|
* Returns the next ordinal for the current docID set to {@link #setDocument(int)}.
|
||||||
* empty iterator for a doc with no ordinals.
|
* This method should only be called <tt>N</tt> times where <tt>N</tt> is the number
|
||||||
|
* returned from {@link #setDocument(int)}. If called more than <tt>N</tt> times the behavior
|
||||||
|
* is undefined.
|
||||||
|
*
|
||||||
|
* Note: This method will never return <tt>0</tt>.
|
||||||
|
*
|
||||||
|
* @return the next ordinal for the current docID set to {@link #setDocument(int)}.
|
||||||
*/
|
*/
|
||||||
Iter getIter(int docId);
|
long nextOrd();
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* An iterator over ordinals values.
|
* Sets iteration to the specified docID and returns the number of
|
||||||
|
* ordinals for this document ID,
|
||||||
|
* @param docId document ID
|
||||||
|
*
|
||||||
|
* @see #nextOrd()
|
||||||
*/
|
*/
|
||||||
interface Iter {
|
int setDocument(int docId);
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets the next ordinal. Returning 0 if the iteration is exhausted.
|
|
||||||
*/
|
|
||||||
long next();
|
|
||||||
}
|
|
||||||
|
|
||||||
static class EmptyIter implements Iter {
|
|
||||||
|
|
||||||
public static EmptyIter INSTANCE = new EmptyIter();
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long next() {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static class SingleValueIter implements Iter {
|
|
||||||
|
|
||||||
private long value;
|
|
||||||
|
|
||||||
public SingleValueIter reset(long value) {
|
|
||||||
this.value = value;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long next() {
|
|
||||||
long actual = value;
|
|
||||||
value = 0;
|
|
||||||
return actual;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the current ordinal in the iteration
|
||||||
|
* @return the current ordinal in the iteration
|
||||||
|
*/
|
||||||
|
long currentOrd();
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -44,19 +44,6 @@ public class SinglePackedOrdinals implements Ordinals {
|
||||||
this.reader = reader;
|
this.reader = reader;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasSingleArrayBackingStorage() {
|
|
||||||
return reader.hasArray();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Object getBackingStorage() {
|
|
||||||
if (reader.hasArray()) {
|
|
||||||
return reader.getArray();
|
|
||||||
}
|
|
||||||
return reader;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long getMemorySizeInBytes() {
|
public long getMemorySizeInBytes() {
|
||||||
if (size == -1) {
|
if (size == -1) {
|
||||||
|
@ -96,7 +83,7 @@ public class SinglePackedOrdinals implements Ordinals {
|
||||||
private final PackedInts.Reader reader;
|
private final PackedInts.Reader reader;
|
||||||
|
|
||||||
private final LongsRef longsScratch = new LongsRef(1);
|
private final LongsRef longsScratch = new LongsRef(1);
|
||||||
private final SingleValueIter iter = new SingleValueIter();
|
private long currentOrdinal;
|
||||||
|
|
||||||
public Docs(SinglePackedOrdinals parent, PackedInts.Reader reader) {
|
public Docs(SinglePackedOrdinals parent, PackedInts.Reader reader) {
|
||||||
this.parent = parent;
|
this.parent = parent;
|
||||||
|
@ -130,26 +117,34 @@ public class SinglePackedOrdinals implements Ordinals {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long getOrd(int docId) {
|
public long getOrd(int docId) {
|
||||||
return reader.get(docId);
|
return currentOrdinal = reader.get(docId);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public LongsRef getOrds(int docId) {
|
public LongsRef getOrds(int docId) {
|
||||||
final long ordinal = reader.get(docId);
|
final long ordinal = reader.get(docId);
|
||||||
if (ordinal == 0) {
|
longsScratch.offset = 0;
|
||||||
longsScratch.length = 0;
|
longsScratch.length = (int)Math.min(currentOrdinal, 1);
|
||||||
} else {
|
longsScratch.longs[0] = currentOrdinal = ordinal;
|
||||||
longsScratch.offset = 0;
|
|
||||||
longsScratch.length = 1;
|
|
||||||
longsScratch.longs[0] = ordinal;
|
|
||||||
}
|
|
||||||
return longsScratch;
|
return longsScratch;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Iter getIter(int docId) {
|
public long nextOrd() {
|
||||||
return iter.reset((int) reader.get(docId));
|
assert currentOrdinal > 0;
|
||||||
|
return currentOrdinal;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int setDocument(int docId) {
|
||||||
|
currentOrdinal = reader.get(docId);
|
||||||
|
// either this is > 1 or 0 - in any case it prevents a branch!
|
||||||
|
return (int)Math.min(currentOrdinal, 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long currentOrd() {
|
||||||
|
return currentOrdinal;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -90,27 +90,19 @@ public class BinaryDVAtomicFieldData implements AtomicFieldData<ScriptDocValues.
|
||||||
|
|
||||||
return new BytesValues(false) {
|
return new BytesValues(false) {
|
||||||
|
|
||||||
final BytesValues.Iter.Single iter = new BytesValues.Iter.Single();
|
|
||||||
final BytesRef spare = new BytesRef();
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean hasValue(int docId) {
|
public boolean hasValue(int docId) {
|
||||||
return docsWithField.get(docId);
|
return docsWithField.get(docId);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BytesRef getValueScratch(int docId, BytesRef ret) {
|
public BytesRef getValue(int docId) {
|
||||||
values.get(docId, ret);
|
if (docsWithField.get(docId)) {
|
||||||
return ret;
|
values.get(docId, scratch);
|
||||||
}
|
return scratch;
|
||||||
|
|
||||||
@Override
|
|
||||||
public Iter getIter(int docId) {
|
|
||||||
if (!docsWithField.get(docId)) {
|
|
||||||
return BytesValues.Iter.Empty.INSTANCE;
|
|
||||||
}
|
}
|
||||||
values.get(docId, spare);
|
scratch.length = 0;
|
||||||
return iter.reset(spare, -1L);
|
return scratch;
|
||||||
}
|
}
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
|
@ -0,0 +1,45 @@
|
||||||
|
/*
|
||||||
|
* Licensed to ElasticSearch and Shay Banon under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. ElasticSearch licenses this
|
||||||
|
* file to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.index.fielddata.plain;
|
||||||
|
|
||||||
|
import org.elasticsearch.index.fielddata.DoubleValues;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Package private base class for dense long values.
|
||||||
|
*/
|
||||||
|
abstract class DenseDoubleValues extends DoubleValues {
|
||||||
|
|
||||||
|
|
||||||
|
protected DenseDoubleValues(boolean multiValued) {
|
||||||
|
super(multiValued);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public final boolean hasValue(int docId) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
public final double getValueMissing(int docId, double missingValue) {
|
||||||
|
assert hasValue(docId);
|
||||||
|
assert !isMultiValued();
|
||||||
|
return getValue(docId);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,48 @@
|
||||||
|
/*
|
||||||
|
* Licensed to ElasticSearch and Shay Banon under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. ElasticSearch licenses this
|
||||||
|
* file to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.index.fielddata.plain;
|
||||||
|
|
||||||
|
import org.elasticsearch.index.fielddata.LongValues;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Package private base class for dense long values.
|
||||||
|
*/
|
||||||
|
abstract class DenseLongValues extends LongValues {
|
||||||
|
|
||||||
|
protected DenseLongValues(boolean multiValued) {
|
||||||
|
super(multiValued);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public final boolean hasValue(int docId) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
public final long getValueMissing(int docId, long missingValue) {
|
||||||
|
assert hasValue(docId);
|
||||||
|
assert !isMultiValued();
|
||||||
|
return getValue(docId);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int setDocument(int docId) {
|
||||||
|
this.docId = docId;
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
}
|
|
@ -155,6 +155,7 @@ public abstract class DoubleArrayAtomicFieldData extends AbstractAtomicNumericFi
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public final long getValueByOrd(long ord) {
|
public final long getValueByOrd(long ord) {
|
||||||
|
assert ord != Ordinals.MISSING_ORDINAL;
|
||||||
return (long) values.get(ord);
|
return (long) values.get(ord);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -170,6 +171,7 @@ public abstract class DoubleArrayAtomicFieldData extends AbstractAtomicNumericFi
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double getValueByOrd(long ord) {
|
public double getValueByOrd(long ord) {
|
||||||
|
assert ord != Ordinals.MISSING_ORDINAL;
|
||||||
return values.get(ord);
|
return values.get(ord);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -322,7 +324,7 @@ public abstract class DoubleArrayAtomicFieldData extends AbstractAtomicNumericFi
|
||||||
return new DoubleValues(values);
|
return new DoubleValues(values);
|
||||||
}
|
}
|
||||||
|
|
||||||
static class LongValues extends org.elasticsearch.index.fielddata.LongValues.Dense {
|
static final class LongValues extends DenseLongValues {
|
||||||
|
|
||||||
private final BigDoubleArrayList values;
|
private final BigDoubleArrayList values;
|
||||||
|
|
||||||
|
@ -336,9 +338,16 @@ public abstract class DoubleArrayAtomicFieldData extends AbstractAtomicNumericFi
|
||||||
return (long) values.get(docId);
|
return (long) values.get(docId);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long nextValue() {
|
||||||
|
return (long) values.get(docId);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static class DoubleValues extends org.elasticsearch.index.fielddata.DoubleValues.Dense {
|
static final class DoubleValues extends DenseDoubleValues {
|
||||||
|
|
||||||
private final BigDoubleArrayList values;
|
private final BigDoubleArrayList values;
|
||||||
|
|
||||||
|
@ -351,7 +360,12 @@ public abstract class DoubleArrayAtomicFieldData extends AbstractAtomicNumericFi
|
||||||
public double getValue(int docId) {
|
public double getValue(int docId) {
|
||||||
return values.get(docId);
|
return values.get(docId);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public double nextValue() {
|
||||||
|
return values.get(docId);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,56 @@
|
||||||
|
/*
|
||||||
|
* Licensed to ElasticSearch and Shay Banon under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. ElasticSearch licenses this
|
||||||
|
* file to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.index.fielddata.plain;
|
||||||
|
|
||||||
|
import org.apache.lucene.util.BytesRef;
|
||||||
|
import org.elasticsearch.ElasticSearchIllegalStateException;
|
||||||
|
import org.elasticsearch.index.fielddata.BytesValues;
|
||||||
|
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An empty {@link org.elasticsearch.index.fielddata.BytesValues.WithOrdinals} implementation
|
||||||
|
*/
|
||||||
|
final class EmptyByteValuesWithOrdinals extends BytesValues.WithOrdinals {
|
||||||
|
|
||||||
|
EmptyByteValuesWithOrdinals(Ordinals.Docs ordinals) {
|
||||||
|
super(ordinals);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public BytesRef getValueByOrd(long ord) {
|
||||||
|
scratch.length = 0;
|
||||||
|
return scratch;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int setDocument(int docId) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public BytesRef nextValue() {
|
||||||
|
throw new ElasticSearchIllegalStateException("Empty BytesValues has no next value");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int currentValueHash() {
|
||||||
|
throw new ElasticSearchIllegalStateException("Empty BytesValues has no hash for the current value");
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -21,11 +21,9 @@ package org.elasticsearch.index.fielddata.plain;
|
||||||
|
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.apache.lucene.util.IntsRef;
|
import org.apache.lucene.util.IntsRef;
|
||||||
import org.apache.lucene.util.fst.BytesRefFSTEnum;
|
import org.apache.lucene.util.fst.*;
|
||||||
import org.apache.lucene.util.fst.FST;
|
|
||||||
import org.apache.lucene.util.fst.FST.Arc;
|
import org.apache.lucene.util.fst.FST.Arc;
|
||||||
import org.apache.lucene.util.fst.FST.BytesReader;
|
import org.apache.lucene.util.fst.FST.BytesReader;
|
||||||
import org.apache.lucene.util.fst.Util;
|
|
||||||
import org.elasticsearch.common.util.BigIntArray;
|
import org.elasticsearch.common.util.BigIntArray;
|
||||||
import org.elasticsearch.index.fielddata.AtomicFieldData;
|
import org.elasticsearch.index.fielddata.AtomicFieldData;
|
||||||
import org.elasticsearch.index.fielddata.ScriptDocValues;
|
import org.elasticsearch.index.fielddata.ScriptDocValues;
|
||||||
|
@ -94,7 +92,7 @@ public class FSTBytesAtomicFieldData implements AtomicFieldData.WithOrdinals<Scr
|
||||||
@Override
|
@Override
|
||||||
public BytesValues.WithOrdinals getBytesValues() {
|
public BytesValues.WithOrdinals getBytesValues() {
|
||||||
assert fst != null;
|
assert fst != null;
|
||||||
return ordinals.isMultiValued() ? new BytesValues.Multi(fst, ordinals.ordinals()) : new BytesValues.Single(fst, ordinals.ordinals());
|
return new BytesValues(fst, ordinals.ordinals());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -126,10 +124,10 @@ public class FSTBytesAtomicFieldData implements AtomicFieldData.WithOrdinals<Scr
|
||||||
}
|
}
|
||||||
this.hashes = hashes;
|
this.hashes = hashes;
|
||||||
}
|
}
|
||||||
return ordinals.isMultiValued() ? new BytesValues.MultiHashed(fst, ordinals.ordinals(), hashes) : new BytesValues.SingleHashed(fst, ordinals.ordinals(), hashes);
|
return new HashedBytesValues(fst, ordinals.ordinals(), hashes);
|
||||||
}
|
}
|
||||||
|
|
||||||
static abstract class BytesValues extends org.elasticsearch.index.fielddata.BytesValues.WithOrdinals {
|
static class BytesValues extends org.elasticsearch.index.fielddata.BytesValues.WithOrdinals {
|
||||||
|
|
||||||
protected final FST<Long> fst;
|
protected final FST<Long> fst;
|
||||||
protected final Ordinals.Docs ordinals;
|
protected final Ordinals.Docs ordinals;
|
||||||
|
@ -148,112 +146,40 @@ public class FSTBytesAtomicFieldData implements AtomicFieldData.WithOrdinals<Scr
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BytesRef getValueScratchByOrd(long ord, BytesRef ret) {
|
public BytesRef getValueByOrd(long ord) {
|
||||||
if (ord == 0) {
|
assert ord != Ordinals.MISSING_ORDINAL;
|
||||||
ret.length = 0;
|
|
||||||
return ret;
|
|
||||||
}
|
|
||||||
in.setPosition(0);
|
in.setPosition(0);
|
||||||
fst.getFirstArc(firstArc);
|
fst.getFirstArc(firstArc);
|
||||||
try {
|
try {
|
||||||
IntsRef output = Util.getByOutput(fst, ord, in, firstArc, scratchArc, scratchInts);
|
IntsRef output = Util.getByOutput(fst, ord, in, firstArc, scratchArc, scratchInts);
|
||||||
ret.grow(output.length);
|
scratch.length = scratch.offset = 0;
|
||||||
ret.length = ret.offset = 0;
|
scratch.grow(output.length);
|
||||||
Util.toBytesRef(output, ret);
|
Util.toBytesRef(output, scratch);
|
||||||
} catch (IOException ex) {
|
} catch (IOException ex) {
|
||||||
//bogus
|
//bogus
|
||||||
}
|
}
|
||||||
return ret;
|
return scratch;
|
||||||
}
|
}
|
||||||
|
|
||||||
static class Single extends BytesValues {
|
}
|
||||||
private final Iter.Single iter;
|
|
||||||
|
static final class HashedBytesValues extends BytesValues {
|
||||||
|
private final BigIntArray hashes;
|
||||||
|
|
||||||
Single(FST<Long> fst, Ordinals.Docs ordinals) {
|
HashedBytesValues(FST<Long> fst, Docs ordinals, BigIntArray hashes) {
|
||||||
super(fst, ordinals);
|
super(fst, ordinals);
|
||||||
assert !ordinals.isMultiValued();
|
this.hashes = hashes;
|
||||||
this.iter = newSingleIter();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Iter getIter(int docId) {
|
|
||||||
long ord = ordinals.getOrd(docId);
|
|
||||||
if (ord == 0) return Iter.Empty.INSTANCE;
|
|
||||||
return iter.reset(getValueByOrd(ord), ord);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static final class SingleHashed extends Single {
|
@Override
|
||||||
private final BigIntArray hashes;
|
public int currentValueHash() {
|
||||||
|
assert ordinals.currentOrd() >= 0;
|
||||||
SingleHashed(FST<Long> fst, Docs ordinals, BigIntArray hashes) {
|
return hashes.get(ordinals.currentOrd());
|
||||||
super(fst, ordinals);
|
|
||||||
this.hashes = hashes;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected Iter.Single newSingleIter() {
|
|
||||||
return new Iter.Single() {
|
|
||||||
public int hash() {
|
|
||||||
return hashes.get(ord);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int getValueHashed(int docId, BytesRef ret) {
|
|
||||||
final long ord = ordinals.getOrd(docId);
|
|
||||||
getValueScratchByOrd(ord, ret);
|
|
||||||
return hashes.get(ord);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static class Multi extends BytesValues {
|
|
||||||
|
|
||||||
private final Iter.Multi iter;
|
|
||||||
|
|
||||||
Multi(FST<Long> fst, Ordinals.Docs ordinals) {
|
|
||||||
super(fst, ordinals);
|
|
||||||
assert ordinals.isMultiValued();
|
|
||||||
this.iter = newMultiIter();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Iter getIter(int docId) {
|
|
||||||
return iter.reset(ordinals.getIter(docId));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
static final class MultiHashed extends Multi {
|
|
||||||
private final BigIntArray hashes;
|
|
||||||
|
|
||||||
MultiHashed(FST<Long> fst, Docs ordinals, BigIntArray hashes) {
|
|
||||||
super(fst, ordinals);
|
|
||||||
this.hashes = hashes;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected Iter.Multi newMultiIter() {
|
|
||||||
return new Iter.Multi(this) {
|
|
||||||
public int hash() {
|
|
||||||
return hashes.get(ord);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int getValueHashed(int docId, BytesRef ret) {
|
|
||||||
final long ord = ordinals.getOrd(docId);
|
|
||||||
getValueScratchByOrd(ord, ret);
|
|
||||||
return hashes.get(ord);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
static class Empty extends FSTBytesAtomicFieldData {
|
final static class Empty extends FSTBytesAtomicFieldData {
|
||||||
|
|
||||||
Empty(int numDocs) {
|
Empty(int numDocs) {
|
||||||
super(null, new EmptyOrdinals(numDocs));
|
super(null, new EmptyOrdinals(numDocs));
|
||||||
|
@ -276,7 +202,7 @@ public class FSTBytesAtomicFieldData implements AtomicFieldData.WithOrdinals<Scr
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BytesValues.WithOrdinals getBytesValues() {
|
public BytesValues.WithOrdinals getBytesValues() {
|
||||||
return new BytesValues.WithOrdinals.Empty(ordinals.ordinals());
|
return new EmptyByteValuesWithOrdinals(ordinals.ordinals());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -153,6 +153,7 @@ public abstract class FloatArrayAtomicFieldData extends AbstractAtomicNumericFie
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long getValueByOrd(long ord) {
|
public long getValueByOrd(long ord) {
|
||||||
|
assert ord != Ordinals.MISSING_ORDINAL;
|
||||||
return (long) values.get(ord);
|
return (long) values.get(ord);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -323,7 +324,7 @@ public abstract class FloatArrayAtomicFieldData extends AbstractAtomicNumericFie
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
static class LongValues extends org.elasticsearch.index.fielddata.LongValues.Dense {
|
static class LongValues extends DenseLongValues {
|
||||||
|
|
||||||
private final BigFloatArrayList values;
|
private final BigFloatArrayList values;
|
||||||
|
|
||||||
|
@ -336,10 +337,16 @@ public abstract class FloatArrayAtomicFieldData extends AbstractAtomicNumericFie
|
||||||
public long getValue(int docId) {
|
public long getValue(int docId) {
|
||||||
return (long) values.get(docId);
|
return (long) values.get(docId);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long nextValue() {
|
||||||
|
return (long) values.get(docId);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static class DoubleValues extends org.elasticsearch.index.fielddata.DoubleValues.Dense {
|
static class DoubleValues extends DenseDoubleValues {
|
||||||
|
|
||||||
private final BigFloatArrayList values;
|
private final BigFloatArrayList values;
|
||||||
|
|
||||||
|
@ -352,6 +359,12 @@ public abstract class FloatArrayAtomicFieldData extends AbstractAtomicNumericFie
|
||||||
public double getValue(int docId) {
|
public double getValue(int docId) {
|
||||||
return (double) values.get(docId);
|
return (double) values.get(docId);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public double nextValue() {
|
||||||
|
return values.get(docId);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -147,111 +147,39 @@ public abstract class GeoPointDoubleArrayAtomicFieldData extends AtomicGeoPointF
|
||||||
private final Ordinals.Docs ordinals;
|
private final Ordinals.Docs ordinals;
|
||||||
|
|
||||||
private final GeoPoint scratch = new GeoPoint();
|
private final GeoPoint scratch = new GeoPoint();
|
||||||
private final ValuesIter valuesIter;
|
|
||||||
private final SafeValuesIter safeValuesIter;
|
|
||||||
|
|
||||||
GeoPointValuesWithOrdinals(BigDoubleArrayList lon, BigDoubleArrayList lat, Ordinals.Docs ordinals) {
|
GeoPointValuesWithOrdinals(BigDoubleArrayList lon, BigDoubleArrayList lat, Ordinals.Docs ordinals) {
|
||||||
super(ordinals.isMultiValued());
|
super(ordinals.isMultiValued());
|
||||||
this.lon = lon;
|
this.lon = lon;
|
||||||
this.lat = lat;
|
this.lat = lat;
|
||||||
this.ordinals = ordinals;
|
this.ordinals = ordinals;
|
||||||
this.valuesIter = new ValuesIter(lon, lat);
|
|
||||||
this.safeValuesIter = new SafeValuesIter(lon, lat);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean hasValue(int docId) {
|
public boolean hasValue(int docId) {
|
||||||
return ordinals.getOrd(docId) != 0;
|
return ordinals.getOrd(docId) != Ordinals.MISSING_ORDINAL;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public GeoPoint getValue(int docId) {
|
public GeoPoint getValue(int docId) {
|
||||||
long ord = ordinals.getOrd(docId);
|
long ord = ordinals.getOrd(docId);
|
||||||
if (ord == 0L) {
|
if (ord == Ordinals.MISSING_ORDINAL) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
return scratch.reset(lat.get(ord), lon.get(ord));
|
return scratch.reset(lat.get(ord), lon.get(ord));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public GeoPoint getValueSafe(int docId) {
|
public GeoPoint nextValue() {
|
||||||
long ord = ordinals.getOrd(docId);
|
final long ord = ordinals.nextOrd();
|
||||||
if (ord == 0L) {
|
assert ord > 0;
|
||||||
return null;
|
return scratch.reset(lat.get(ord), lon.get(ord));
|
||||||
}
|
|
||||||
return new GeoPoint(lat.get(ord), lon.get(ord));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Iter getIter(int docId) {
|
public int setDocument(int docId) {
|
||||||
return valuesIter.reset(ordinals.getIter(docId));
|
this.docId = docId;
|
||||||
}
|
return ordinals.setDocument(docId);
|
||||||
|
|
||||||
@Override
|
|
||||||
public Iter getIterSafe(int docId) {
|
|
||||||
return safeValuesIter.reset(ordinals.getIter(docId));
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
static class ValuesIter implements Iter {
|
|
||||||
|
|
||||||
private final BigDoubleArrayList lon, lat;
|
|
||||||
private final GeoPoint scratch = new GeoPoint();
|
|
||||||
|
|
||||||
private Ordinals.Docs.Iter ordsIter;
|
|
||||||
private long ord;
|
|
||||||
|
|
||||||
ValuesIter(BigDoubleArrayList lon, BigDoubleArrayList lat) {
|
|
||||||
this.lon = lon;
|
|
||||||
this.lat = lat;
|
|
||||||
}
|
|
||||||
|
|
||||||
public ValuesIter reset(Ordinals.Docs.Iter ordsIter) {
|
|
||||||
this.ordsIter = ordsIter;
|
|
||||||
this.ord = ordsIter.next();
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean hasNext() {
|
|
||||||
return ord != 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
public GeoPoint next() {
|
|
||||||
scratch.reset(lat.get(ord), lon.get(ord));
|
|
||||||
ord = ordsIter.next();
|
|
||||||
return scratch;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static class SafeValuesIter implements Iter {
|
|
||||||
|
|
||||||
private final BigDoubleArrayList lon, lat;
|
|
||||||
|
|
||||||
private Ordinals.Docs.Iter ordsIter;
|
|
||||||
private long ord;
|
|
||||||
|
|
||||||
SafeValuesIter(BigDoubleArrayList lon, BigDoubleArrayList lat) {
|
|
||||||
this.lon = lon;
|
|
||||||
this.lat = lat;
|
|
||||||
}
|
|
||||||
|
|
||||||
public SafeValuesIter reset(Ordinals.Docs.Iter ordsIter) {
|
|
||||||
this.ordsIter = ordsIter;
|
|
||||||
this.ord = ordsIter.next();
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasNext() {
|
|
||||||
return ord != 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public GeoPoint next() {
|
|
||||||
GeoPoint value = new GeoPoint(lat.get(ord), lon.get(ord));
|
|
||||||
ord = ordsIter.next();
|
|
||||||
return value;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -307,9 +235,7 @@ public abstract class GeoPointDoubleArrayAtomicFieldData extends AtomicGeoPointF
|
||||||
private final BigDoubleArrayList lon;
|
private final BigDoubleArrayList lon;
|
||||||
private final BigDoubleArrayList lat;
|
private final BigDoubleArrayList lat;
|
||||||
private final FixedBitSet set;
|
private final FixedBitSet set;
|
||||||
|
|
||||||
private final GeoPoint scratch = new GeoPoint();
|
private final GeoPoint scratch = new GeoPoint();
|
||||||
private final Iter.Single iter = new Iter.Single();
|
|
||||||
|
|
||||||
|
|
||||||
GeoPointValuesSingleFixedSet(BigDoubleArrayList lon, BigDoubleArrayList lat, FixedBitSet set) {
|
GeoPointValuesSingleFixedSet(BigDoubleArrayList lon, BigDoubleArrayList lat, FixedBitSet set) {
|
||||||
|
@ -332,33 +258,6 @@ public abstract class GeoPointDoubleArrayAtomicFieldData extends AtomicGeoPointF
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public GeoPoint getValueSafe(int docId) {
|
|
||||||
if (set.get(docId)) {
|
|
||||||
return new GeoPoint(lat.get(docId), lon.get(docId));
|
|
||||||
} else {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Iter getIter(int docId) {
|
|
||||||
if (set.get(docId)) {
|
|
||||||
return iter.reset(scratch.reset(lat.get(docId), lon.get(docId)));
|
|
||||||
} else {
|
|
||||||
return Iter.Empty.INSTANCE;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Iter getIterSafe(int docId) {
|
|
||||||
if (set.get(docId)) {
|
|
||||||
return iter.reset(new GeoPoint(lat.get(docId), lon.get(docId)));
|
|
||||||
} else {
|
|
||||||
return Iter.Empty.INSTANCE;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -412,7 +311,6 @@ public abstract class GeoPointDoubleArrayAtomicFieldData extends AtomicGeoPointF
|
||||||
private final BigDoubleArrayList lat;
|
private final BigDoubleArrayList lat;
|
||||||
|
|
||||||
private final GeoPoint scratch = new GeoPoint();
|
private final GeoPoint scratch = new GeoPoint();
|
||||||
private final Iter.Single iter = new Iter.Single();
|
|
||||||
|
|
||||||
|
|
||||||
GeoPointValuesSingle(BigDoubleArrayList lon, BigDoubleArrayList lat) {
|
GeoPointValuesSingle(BigDoubleArrayList lon, BigDoubleArrayList lat) {
|
||||||
|
@ -430,21 +328,6 @@ public abstract class GeoPointDoubleArrayAtomicFieldData extends AtomicGeoPointF
|
||||||
public GeoPoint getValue(int docId) {
|
public GeoPoint getValue(int docId) {
|
||||||
return scratch.reset(lat.get(docId), lon.get(docId));
|
return scratch.reset(lat.get(docId), lon.get(docId));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public GeoPoint getValueSafe(int docId) {
|
|
||||||
return new GeoPoint(lat.get(docId), lon.get(docId));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Iter getIter(int docId) {
|
|
||||||
return iter.reset(scratch.reset(lat.get(docId), lon.get(docId)));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Iter getIterSafe(int docId) {
|
|
||||||
return iter.reset(new GeoPoint(lat.get(docId), lon.get(docId)));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -154,7 +154,8 @@ public abstract class PackedArrayAtomicFieldData extends AbstractAtomicNumericFi
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long getValueByOrd(long ord) {
|
public long getValueByOrd(long ord) {
|
||||||
return ord == 0 ? 0L : values.get(ord - 1);
|
assert ord != Ordinals.MISSING_ORDINAL;
|
||||||
|
return values.get(ord - 1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -169,7 +170,8 @@ public abstract class PackedArrayAtomicFieldData extends AbstractAtomicNumericFi
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double getValueByOrd(long ord) {
|
public double getValueByOrd(long ord) {
|
||||||
return ord == 0 ? 0L : values.get(ord - 1);
|
assert ord != Ordinals.MISSING_ORDINAL;
|
||||||
|
return values.get(ord - 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -333,7 +335,7 @@ public abstract class PackedArrayAtomicFieldData extends AbstractAtomicNumericFi
|
||||||
return new DoubleValues(values, minValue);
|
return new DoubleValues(values, minValue);
|
||||||
}
|
}
|
||||||
|
|
||||||
static class LongValues extends org.elasticsearch.index.fielddata.LongValues.Dense {
|
static class LongValues extends DenseLongValues {
|
||||||
|
|
||||||
private final PackedInts.Mutable values;
|
private final PackedInts.Mutable values;
|
||||||
private final long minValue;
|
private final long minValue;
|
||||||
|
@ -349,9 +351,15 @@ public abstract class PackedArrayAtomicFieldData extends AbstractAtomicNumericFi
|
||||||
return minValue + values.get(docId);
|
return minValue + values.get(docId);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long nextValue() {
|
||||||
|
return minValue + values.get(docId);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static class DoubleValues extends org.elasticsearch.index.fielddata.DoubleValues.Dense {
|
static class DoubleValues extends DenseDoubleValues {
|
||||||
|
|
||||||
private final PackedInts.Mutable values;
|
private final PackedInts.Mutable values;
|
||||||
private final long minValue;
|
private final long minValue;
|
||||||
|
@ -366,6 +374,11 @@ public abstract class PackedArrayAtomicFieldData extends AbstractAtomicNumericFi
|
||||||
public double getValue(int docId) {
|
public double getValue(int docId) {
|
||||||
return minValue + values.get(docId);
|
return minValue + values.get(docId);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public double nextValue() {
|
||||||
|
return minValue + values.get(docId);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -165,7 +165,7 @@ public class PackedArrayIndexFieldData extends AbstractIndexFieldData<AtomicNume
|
||||||
}
|
}
|
||||||
for (int i = 0; i < reader.maxDoc(); i++) {
|
for (int i = 0; i < reader.maxDoc(); i++) {
|
||||||
final long ord = ordinals.getOrd(i);
|
final long ord = ordinals.getOrd(i);
|
||||||
if (ord > 0) {
|
if (ord != Ordinals.MISSING_ORDINAL) {
|
||||||
sValues.set(i, values.get(ord - 1) - minValue);
|
sValues.set(i, values.get(ord - 1) - minValue);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -107,15 +107,13 @@ public class PagedBytesAtomicFieldData implements AtomicFieldData.WithOrdinals<S
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BytesValues.WithOrdinals getBytesValues() {
|
public BytesValues.WithOrdinals getBytesValues() {
|
||||||
return ordinals.isMultiValued() ? new BytesValues.Multi(bytes, termOrdToBytesOffset, ordinals.ordinals()) : new BytesValues.Single(
|
return new BytesValues(bytes, termOrdToBytesOffset, ordinals.ordinals());
|
||||||
bytes, termOrdToBytesOffset, ordinals.ordinals());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public org.elasticsearch.index.fielddata.BytesValues.WithOrdinals getHashedBytesValues() {
|
public org.elasticsearch.index.fielddata.BytesValues.WithOrdinals getHashedBytesValues() {
|
||||||
final BigIntArray hashes = getHashes();
|
final BigIntArray hashes = getHashes();
|
||||||
return ordinals.isMultiValued() ? new BytesValues.MultiHashed(hashes, bytes, termOrdToBytesOffset, ordinals.ordinals())
|
return new BytesValues.HashedBytesValues(hashes, bytes, termOrdToBytesOffset, ordinals.ordinals());
|
||||||
: new BytesValues.SingleHashed(hashes, bytes, termOrdToBytesOffset, ordinals.ordinals());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -123,14 +121,12 @@ public class PagedBytesAtomicFieldData implements AtomicFieldData.WithOrdinals<S
|
||||||
return new ScriptDocValues.Strings(getBytesValues());
|
return new ScriptDocValues.Strings(getBytesValues());
|
||||||
}
|
}
|
||||||
|
|
||||||
static abstract class BytesValues extends org.elasticsearch.index.fielddata.BytesValues.WithOrdinals {
|
static class BytesValues extends org.elasticsearch.index.fielddata.BytesValues.WithOrdinals {
|
||||||
|
|
||||||
protected final PagedBytes.Reader bytes;
|
protected final PagedBytes.Reader bytes;
|
||||||
protected final MonotonicAppendingLongBuffer termOrdToBytesOffset;
|
protected final MonotonicAppendingLongBuffer termOrdToBytesOffset;
|
||||||
protected final Ordinals.Docs ordinals;
|
protected final Ordinals.Docs ordinals;
|
||||||
|
|
||||||
protected final BytesRef scratch = new BytesRef();
|
|
||||||
|
|
||||||
BytesValues(PagedBytes.Reader bytes, MonotonicAppendingLongBuffer termOrdToBytesOffset, Ordinals.Docs ordinals) {
|
BytesValues(PagedBytes.Reader bytes, MonotonicAppendingLongBuffer termOrdToBytesOffset, Ordinals.Docs ordinals) {
|
||||||
super(ordinals);
|
super(ordinals);
|
||||||
this.bytes = bytes;
|
this.bytes = bytes;
|
||||||
|
@ -139,116 +135,60 @@ public class PagedBytesAtomicFieldData implements AtomicFieldData.WithOrdinals<S
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BytesRef makeSafe(BytesRef bytes) {
|
public BytesRef copyShared() {
|
||||||
// when we fill from the pages bytes, we just reference an existing buffer slice, its enough
|
// when we fill from the pages bytes, we just reference an existing buffer slice, its enough
|
||||||
// to create a shallow copy of the bytes to be safe for "reads".
|
// to create a shallow copy of the bytes to be safe for "reads".
|
||||||
return new BytesRef(bytes.bytes, bytes.offset, bytes.length);
|
return new BytesRef(scratch.bytes, scratch.offset, scratch.length);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Ordinals.Docs ordinals() {
|
public final Ordinals.Docs ordinals() {
|
||||||
return this.ordinals;
|
return this.ordinals;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BytesRef getValueScratchByOrd(long ord, BytesRef ret) {
|
public final BytesRef getValueByOrd(long ord) {
|
||||||
bytes.fill(ret, termOrdToBytesOffset.get(ord));
|
assert ord != Ordinals.MISSING_ORDINAL;
|
||||||
return ret;
|
bytes.fill(scratch, termOrdToBytesOffset.get(ord));
|
||||||
|
return scratch;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
static class Single extends BytesValues {
|
public final BytesRef getValue(int docId) {
|
||||||
|
final long ord = ordinals.getOrd(docId);
|
||||||
private final Iter.Single iter;
|
if (ord == Ordinals.MISSING_ORDINAL) {
|
||||||
|
scratch.length = 0;
|
||||||
Single(PagedBytes.Reader bytes, MonotonicAppendingLongBuffer termOrdToBytesOffset, Ordinals.Docs ordinals) {
|
return scratch;
|
||||||
super(bytes, termOrdToBytesOffset, ordinals);
|
|
||||||
assert !ordinals.isMultiValued();
|
|
||||||
iter = newSingleIter();
|
|
||||||
}
|
}
|
||||||
|
bytes.fill(scratch, termOrdToBytesOffset.get(ord));
|
||||||
@Override
|
return scratch;
|
||||||
public Iter getIter(int docId) {
|
|
||||||
long ord = ordinals.getOrd(docId);
|
|
||||||
if (ord == 0) return Iter.Empty.INSTANCE;
|
|
||||||
bytes.fill(scratch, termOrdToBytesOffset.get(ord));
|
|
||||||
return iter.reset(scratch, ord);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static final class SingleHashed extends Single {
|
@Override
|
||||||
|
public final BytesRef nextValue() {
|
||||||
|
bytes.fill(scratch, termOrdToBytesOffset.get(ordinals.nextOrd()));
|
||||||
|
return scratch;
|
||||||
|
}
|
||||||
|
|
||||||
|
static final class HashedBytesValues extends BytesValues {
|
||||||
private final BigIntArray hashes;
|
private final BigIntArray hashes;
|
||||||
|
|
||||||
SingleHashed(BigIntArray hashes, Reader bytes, MonotonicAppendingLongBuffer termOrdToBytesOffset, Docs ordinals) {
|
|
||||||
|
HashedBytesValues(BigIntArray hashes, Reader bytes, MonotonicAppendingLongBuffer termOrdToBytesOffset, Docs ordinals) {
|
||||||
super(bytes, termOrdToBytesOffset, ordinals);
|
super(bytes, termOrdToBytesOffset, ordinals);
|
||||||
this.hashes = hashes;
|
this.hashes = hashes;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Iter.Single newSingleIter() {
|
public int currentValueHash() {
|
||||||
return new Iter.Single() {
|
assert ordinals.currentOrd() >= 0;
|
||||||
public int hash() {
|
return hashes.get(ordinals.currentOrd());
|
||||||
return hashes.get(ord);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int getValueHashed(int docId, BytesRef ret) {
|
|
||||||
final long ord = ordinals.getOrd(docId);
|
|
||||||
getValueScratchByOrd(ord, ret);
|
|
||||||
return hashes.get(ord);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
static class Multi extends BytesValues {
|
|
||||||
|
|
||||||
private final Iter.Multi iter;
|
|
||||||
|
|
||||||
Multi(PagedBytes.Reader bytes, MonotonicAppendingLongBuffer termOrdToBytesOffset, Ordinals.Docs ordinals) {
|
|
||||||
super(bytes, termOrdToBytesOffset, ordinals);
|
|
||||||
assert ordinals.isMultiValued();
|
|
||||||
this.iter = newMultiIter();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Iter getIter(int docId) {
|
|
||||||
return iter.reset(ordinals.getIter(docId));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static final class MultiHashed extends Multi {
|
|
||||||
|
|
||||||
private final BigIntArray hashes;
|
|
||||||
|
|
||||||
MultiHashed(BigIntArray hashes, Reader bytes, MonotonicAppendingLongBuffer termOrdToBytesOffset, Docs ordinals) {
|
|
||||||
super(bytes, termOrdToBytesOffset, ordinals);
|
|
||||||
this.hashes = hashes;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected Iter.Multi newMultiIter() {
|
|
||||||
return new Iter.Multi(this) {
|
|
||||||
public int hash() {
|
|
||||||
return hashes.get(ord);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int getValueHashed(int docId, BytesRef ret) {
|
|
||||||
long ord = ordinals.getOrd(docId);
|
|
||||||
getValueScratchByOrd(ord, ret);
|
|
||||||
return hashes.get(ord);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static class Empty extends PagedBytesAtomicFieldData {
|
private final static class Empty extends PagedBytesAtomicFieldData {
|
||||||
|
|
||||||
Empty(int numDocs) {
|
Empty(int numDocs) {
|
||||||
super(emptyBytes(), 0, new MonotonicAppendingLongBuffer(), new EmptyOrdinals(numDocs));
|
super(emptyBytes(), 0, new MonotonicAppendingLongBuffer(), new EmptyOrdinals(numDocs));
|
||||||
|
@ -282,7 +222,7 @@ public class PagedBytesAtomicFieldData implements AtomicFieldData.WithOrdinals<S
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BytesValues.WithOrdinals getBytesValues() {
|
public BytesValues.WithOrdinals getBytesValues() {
|
||||||
return new BytesValues.WithOrdinals.Empty(ordinals.ordinals());
|
return new EmptyByteValuesWithOrdinals(ordinals.ordinals());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -113,67 +113,46 @@ abstract class SortedSetDVAtomicFieldData {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static abstract class AbstractSortedSetValues extends BytesValues.WithOrdinals {
|
static class SortedSetValues extends BytesValues.WithOrdinals {
|
||||||
|
|
||||||
protected final SortedSetDocValues values;
|
protected final SortedSetDocValues values;
|
||||||
protected BytesValues.Iter.Multi iter;
|
|
||||||
|
|
||||||
AbstractSortedSetValues(AtomicReader reader, String field, SortedSetDocValues values) {
|
SortedSetValues(AtomicReader reader, String field, SortedSetDocValues values) {
|
||||||
super(new SortedSetDocs(new SortedSetOrdinals(reader, field, values.getValueCount()), values));
|
super(new SortedSetDocs(new SortedSetOrdinals(reader, field, values.getValueCount()), values));
|
||||||
this.values = values;
|
this.values = values;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BytesRef getValueScratchByOrd(long ord, BytesRef ret) {
|
public BytesRef getValueByOrd(long ord) {
|
||||||
if (ord == 0) {
|
assert ord != Ordinals.MISSING_ORDINAL;
|
||||||
ret.length = 0;
|
values.lookupOrd(ord - 1, scratch);
|
||||||
return ret;
|
return scratch;
|
||||||
}
|
|
||||||
values.lookupOrd(ord - 1, ret);
|
|
||||||
return ret;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Iter getIter(int docId) {
|
public BytesRef nextValue() {
|
||||||
return iter.reset(ordinals.getIter(docId));
|
values.lookupOrd(ordinals.nextOrd()-1, scratch);
|
||||||
|
return scratch;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static class SortedSetValues extends AbstractSortedSetValues {
|
static final class SortedSetHashedValues extends SortedSetValues {
|
||||||
|
|
||||||
SortedSetValues(AtomicReader reader, String field, SortedSetDocValues values) {
|
|
||||||
super(reader, field, values);
|
|
||||||
this.iter = new Iter.Multi(this);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
static class SortedSetHashedValues extends AbstractSortedSetValues {
|
|
||||||
|
|
||||||
private final IntArray hashes;
|
private final IntArray hashes;
|
||||||
|
|
||||||
SortedSetHashedValues(AtomicReader reader, String field, SortedSetDocValues values, IntArray hashes) {
|
SortedSetHashedValues(AtomicReader reader, String field, SortedSetDocValues values, IntArray hashes) {
|
||||||
super(reader, field, values);
|
super(reader, field, values);
|
||||||
this.hashes = hashes;
|
this.hashes = hashes;
|
||||||
this.iter = new Iter.Multi(this) {
|
|
||||||
@Override
|
|
||||||
public int hash() {
|
|
||||||
return SortedSetHashedValues.this.hashes.get(ord);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int getValueHashed(int docId, BytesRef spare) {
|
public int currentValueHash() {
|
||||||
long ord = ordinals.getOrd(docId);
|
assert ordinals.currentOrd() >= 0;
|
||||||
getValueScratchByOrd(ord, spare);
|
return hashes.get(ordinals.currentOrd());
|
||||||
return hashes.get(ord);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static class SortedSetOrdinals implements Ordinals {
|
static final class SortedSetOrdinals implements Ordinals {
|
||||||
|
|
||||||
// We don't store SortedSetDocValues as a member because Ordinals must be thread-safe
|
// We don't store SortedSetDocValues as a member because Ordinals must be thread-safe
|
||||||
private final AtomicReader reader;
|
private final AtomicReader reader;
|
||||||
|
@ -187,16 +166,6 @@ abstract class SortedSetDVAtomicFieldData {
|
||||||
this.numOrds = numOrds;
|
this.numOrds = numOrds;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasSingleArrayBackingStorage() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Object getBackingStorage() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long getMemorySizeInBytes() {
|
public long getMemorySizeInBytes() {
|
||||||
// Ordinals can't be distinguished from the atomic field data instance
|
// Ordinals can't be distinguished from the atomic field data instance
|
||||||
|
@ -237,7 +206,8 @@ abstract class SortedSetDVAtomicFieldData {
|
||||||
private final SortedSetOrdinals ordinals;
|
private final SortedSetOrdinals ordinals;
|
||||||
private final SortedSetDocValues values;
|
private final SortedSetDocValues values;
|
||||||
private final LongsRef longScratch;
|
private final LongsRef longScratch;
|
||||||
private final LongsIter iter = new LongsIter();
|
private int ordIndex = Integer.MAX_VALUE;
|
||||||
|
private long currentOrdinal = -1;
|
||||||
|
|
||||||
SortedSetDocs(SortedSetOrdinals ordinals, SortedSetDocValues values) {
|
SortedSetDocs(SortedSetOrdinals ordinals, SortedSetDocValues values) {
|
||||||
this.ordinals = ordinals;
|
this.ordinals = ordinals;
|
||||||
|
@ -273,7 +243,7 @@ abstract class SortedSetDVAtomicFieldData {
|
||||||
@Override
|
@Override
|
||||||
public long getOrd(int docId) {
|
public long getOrd(int docId) {
|
||||||
values.setDocument(docId);
|
values.setDocument(docId);
|
||||||
return 1 + values.nextOrd();
|
return currentOrdinal = 1 + values.nextOrd();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -289,34 +259,23 @@ abstract class SortedSetDVAtomicFieldData {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Iter getIter(int docId) {
|
public long nextOrd() {
|
||||||
// For now, we consume all ords and pass them to the iter instead of doing it in a streaming way because Lucene's
|
assert ordIndex < longScratch.length;
|
||||||
// SORTED_SET doc values are cached per thread, you can't have a fully independent instance
|
return currentOrdinal = longScratch.longs[ordIndex++];
|
||||||
iter.reset(getOrds(docId));
|
|
||||||
return iter;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
static class LongsIter implements Ordinals.Docs.Iter {
|
|
||||||
|
|
||||||
private LongsRef ords;
|
|
||||||
private int i;
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long next() {
|
public int setDocument(int docId) {
|
||||||
if (i == ords.length) {
|
// For now, we consume all ords and pass them to the iter instead of doing it in a streaming way because Lucene's
|
||||||
return 0L;
|
// SORTED_SET doc values are cached per thread, you can't have a fully independent instance
|
||||||
}
|
final LongsRef ords = getOrds(docId);
|
||||||
return ords.longs[i++];
|
ordIndex = 0;
|
||||||
|
return ords.length;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void reset(LongsRef ords) {
|
@Override
|
||||||
this.ords = ords;
|
public long currentOrd() {
|
||||||
assert ords.offset == 0;
|
return currentOrdinal;
|
||||||
i = 0;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,7 +24,14 @@ import org.elasticsearch.index.fielddata.AtomicFieldData;
|
||||||
import org.elasticsearch.index.fielddata.ScriptDocValues;
|
import org.elasticsearch.index.fielddata.ScriptDocValues;
|
||||||
import org.elasticsearch.index.fielddata.ScriptDocValues.Strings;
|
import org.elasticsearch.index.fielddata.ScriptDocValues.Strings;
|
||||||
|
|
||||||
public class SortedSetDVBytesAtomicFieldData extends SortedSetDVAtomicFieldData implements AtomicFieldData.WithOrdinals<ScriptDocValues.Strings> {
|
/**
|
||||||
|
* An {@link AtomicFieldData} implementation that uses Lucene {@link org.apache.lucene.index.SortedSetDocValues}.
|
||||||
|
*/
|
||||||
|
public final class SortedSetDVBytesAtomicFieldData extends SortedSetDVAtomicFieldData implements AtomicFieldData.WithOrdinals<ScriptDocValues.Strings> {
|
||||||
|
|
||||||
|
/* NOTE: This class inherits the methods getBytesValues() and getHashedBytesValues()
|
||||||
|
* from SortedSetDVAtomicFieldData. This can cause confusion since the are
|
||||||
|
* part of the interface this class implements.*/
|
||||||
|
|
||||||
SortedSetDVBytesAtomicFieldData(AtomicReader reader, String field) {
|
SortedSetDVBytesAtomicFieldData(AtomicReader reader, String field) {
|
||||||
super(reader, field);
|
super(reader, field);
|
||||||
|
@ -39,5 +46,4 @@ public class SortedSetDVBytesAtomicFieldData extends SortedSetDVAtomicFieldData
|
||||||
public Strings getScriptValues() {
|
public Strings getScriptValues() {
|
||||||
return new ScriptDocValues.Strings(getBytesValues());
|
return new ScriptDocValues.Strings(getBytesValues());
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,6 +23,7 @@ import org.apache.lucene.index.AtomicReader;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.elasticsearch.index.fielddata.*;
|
import org.elasticsearch.index.fielddata.*;
|
||||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
|
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
|
||||||
|
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
||||||
|
|
||||||
public class SortedSetDVNumericAtomicFieldData extends SortedSetDVAtomicFieldData implements AtomicNumericFieldData {
|
public class SortedSetDVNumericAtomicFieldData extends SortedSetDVAtomicFieldData implements AtomicNumericFieldData {
|
||||||
|
|
||||||
|
@ -53,9 +54,7 @@ public class SortedSetDVNumericAtomicFieldData extends SortedSetDVAtomicFieldDat
|
||||||
return new LongValues.WithOrdinals(values.ordinals()) {
|
return new LongValues.WithOrdinals(values.ordinals()) {
|
||||||
@Override
|
@Override
|
||||||
public long getValueByOrd(long ord) {
|
public long getValueByOrd(long ord) {
|
||||||
if (ord == 0L) {
|
assert ord != Ordinals.MISSING_ORDINAL;
|
||||||
return 0L;
|
|
||||||
}
|
|
||||||
return numericType.toLong(values.getValueByOrd(ord));
|
return numericType.toLong(values.getValueByOrd(ord));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -67,9 +66,7 @@ public class SortedSetDVNumericAtomicFieldData extends SortedSetDVAtomicFieldDat
|
||||||
return new DoubleValues.WithOrdinals(values.ordinals()) {
|
return new DoubleValues.WithOrdinals(values.ordinals()) {
|
||||||
@Override
|
@Override
|
||||||
public double getValueByOrd(long ord) {
|
public double getValueByOrd(long ord) {
|
||||||
if (ord == 0L) {
|
assert ord != Ordinals.MISSING_ORDINAL;
|
||||||
return 0d;
|
|
||||||
}
|
|
||||||
return numericType.toDouble(values.getValueByOrd(ord));
|
return numericType.toDouble(values.getValueByOrd(ord));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -79,52 +76,23 @@ public class SortedSetDVNumericAtomicFieldData extends SortedSetDVAtomicFieldDat
|
||||||
public BytesValues.WithOrdinals getBytesValues() {
|
public BytesValues.WithOrdinals getBytesValues() {
|
||||||
final BytesValues.WithOrdinals values = super.getBytesValues();
|
final BytesValues.WithOrdinals values = super.getBytesValues();
|
||||||
return new BytesValues.WithOrdinals(values.ordinals()) {
|
return new BytesValues.WithOrdinals(values.ordinals()) {
|
||||||
|
final BytesRef spare = new BytesRef(16);
|
||||||
BytesRef spare = new BytesRef(16);
|
private BytesRef convert(BytesRef input, BytesRef output) {
|
||||||
Iter inIter;
|
if (input.length == 0) {
|
||||||
Iter iter = new Iter() {
|
return input;
|
||||||
|
|
||||||
BytesRef current = null;
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasNext() {
|
|
||||||
return inIter.hasNext();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public BytesRef next() {
|
|
||||||
return current = convert(inIter.next());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int hash() {
|
|
||||||
return current.hashCode();
|
|
||||||
}
|
|
||||||
|
|
||||||
};
|
|
||||||
|
|
||||||
private BytesRef convert(BytesRef spare) {
|
|
||||||
if (spare.length == 0) {
|
|
||||||
return spare;
|
|
||||||
}
|
}
|
||||||
if (numericType.isFloatingPoint()) {
|
if (numericType.isFloatingPoint()) {
|
||||||
return new BytesRef(Double.toString(numericType.toDouble(spare)));
|
output.copyChars(Double.toString(numericType.toDouble(input)));
|
||||||
} else {
|
} else {
|
||||||
return new BytesRef(Long.toString(numericType.toLong(spare)));
|
output.copyChars(Long.toString(numericType.toLong(input)));
|
||||||
}
|
}
|
||||||
|
return output;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BytesRef getValueScratchByOrd(long ord, BytesRef ret) {
|
public BytesRef getValueByOrd(long ord) {
|
||||||
return convert(values.getValueScratchByOrd(ord, spare));
|
return convert(values.getValueByOrd(ord), scratch);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public Iter getIter(int docId) {
|
|
||||||
inIter = values.getIter(docId);
|
|
||||||
return iter;
|
|
||||||
}
|
|
||||||
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -51,24 +51,24 @@ final class QueriesLoaderCollector extends Collector {
|
||||||
@Override
|
@Override
|
||||||
public void collect(int doc) throws IOException {
|
public void collect(int doc) throws IOException {
|
||||||
// the _source is the query
|
// the _source is the query
|
||||||
BytesRef id = idValues.getValue(doc);
|
|
||||||
if (id == null) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
fieldsVisitor.reset();
|
|
||||||
reader.document(doc, fieldsVisitor);
|
|
||||||
|
|
||||||
try {
|
if (idValues.setDocument(doc) > 0) {
|
||||||
// id is only used for logging, if we fail we log the id in the catch statement
|
BytesRef id = idValues.nextValue();
|
||||||
final Query parseQuery = percolator.parsePercolatorDocument(null, fieldsVisitor.source());
|
fieldsVisitor.reset();
|
||||||
if (parseQuery != null) {
|
reader.document(doc, fieldsVisitor);
|
||||||
queries.put(new HashedBytesRef(idValues.makeSafe(id)), parseQuery);
|
|
||||||
} else {
|
try {
|
||||||
logger.warn("failed to add query [{}] - parser returned null", id);
|
// id is only used for logging, if we fail we log the id in the catch statement
|
||||||
|
final Query parseQuery = percolator.parsePercolatorDocument(null, fieldsVisitor.source());
|
||||||
|
if (parseQuery != null) {
|
||||||
|
queries.put(new HashedBytesRef(idValues.copyShared()), parseQuery);
|
||||||
|
} else {
|
||||||
|
logger.warn("failed to add query [{}] - parser returned null", id);
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.warn("failed to add query [{}]", e, id.utf8ToString());
|
||||||
}
|
}
|
||||||
|
|
||||||
} catch (Exception e) {
|
|
||||||
logger.warn("failed to add query [{}]", e, id.utf8ToString());
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -25,7 +25,9 @@ import org.apache.lucene.search.Filter;
|
||||||
import org.apache.lucene.util.Bits;
|
import org.apache.lucene.util.Bits;
|
||||||
import org.apache.lucene.util.NumericUtils;
|
import org.apache.lucene.util.NumericUtils;
|
||||||
import org.elasticsearch.common.lucene.docset.MatchDocIdSet;
|
import org.elasticsearch.common.lucene.docset.MatchDocIdSet;
|
||||||
import org.elasticsearch.index.fielddata.*;
|
import org.elasticsearch.index.fielddata.DoubleValues;
|
||||||
|
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||||
|
import org.elasticsearch.index.fielddata.LongValues;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
@ -132,25 +134,7 @@ public abstract class NumericRangeFieldDataFilter<T> extends Filter {
|
||||||
return null;
|
return null;
|
||||||
|
|
||||||
final LongValues values = indexFieldData.load(ctx).getLongValues();
|
final LongValues values = indexFieldData.load(ctx).getLongValues();
|
||||||
return new MatchDocIdSet(ctx.reader().maxDoc(), acceptedDocs) {
|
return new LongRangeMatchDocIdSet(ctx.reader().maxDoc(), acceptedDocs, values, inclusiveLowerPoint, inclusiveUpperPoint);
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isCacheable() {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected boolean matchDoc(int doc) {
|
|
||||||
LongValues.Iter iter = values.getIter(doc);
|
|
||||||
while (iter.hasNext()) {
|
|
||||||
long value = iter.next();
|
|
||||||
if (value >= inclusiveLowerPoint && value <= inclusiveUpperPoint) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -182,25 +166,7 @@ public abstract class NumericRangeFieldDataFilter<T> extends Filter {
|
||||||
return null;
|
return null;
|
||||||
|
|
||||||
final LongValues values = indexFieldData.load(ctx).getLongValues();
|
final LongValues values = indexFieldData.load(ctx).getLongValues();
|
||||||
return new MatchDocIdSet(ctx.reader().maxDoc(), acceptedDocs) {
|
return new LongRangeMatchDocIdSet(ctx.reader().maxDoc(), acceptedDocs, values, inclusiveLowerPoint, inclusiveUpperPoint);
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isCacheable() {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected boolean matchDoc(int doc) {
|
|
||||||
LongValues.Iter iter = values.getIter(doc);
|
|
||||||
while (iter.hasNext()) {
|
|
||||||
long value = iter.next();
|
|
||||||
if (value >= inclusiveLowerPoint && value <= inclusiveUpperPoint) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -231,25 +197,7 @@ public abstract class NumericRangeFieldDataFilter<T> extends Filter {
|
||||||
return null;
|
return null;
|
||||||
|
|
||||||
final LongValues values = indexFieldData.load(ctx).getLongValues();
|
final LongValues values = indexFieldData.load(ctx).getLongValues();
|
||||||
return new MatchDocIdSet(ctx.reader().maxDoc(), acceptedDocs) {
|
return new LongRangeMatchDocIdSet(ctx.reader().maxDoc(), acceptedDocs, values, inclusiveLowerPoint, inclusiveUpperPoint);
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isCacheable() {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected boolean matchDoc(int doc) {
|
|
||||||
LongValues.Iter iter = values.getIter(doc);
|
|
||||||
while (iter.hasNext()) {
|
|
||||||
long value = iter.next();
|
|
||||||
if (value >= inclusiveLowerPoint && value <= inclusiveUpperPoint) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -280,25 +228,8 @@ public abstract class NumericRangeFieldDataFilter<T> extends Filter {
|
||||||
return null;
|
return null;
|
||||||
|
|
||||||
final LongValues values = indexFieldData.load(ctx).getLongValues();
|
final LongValues values = indexFieldData.load(ctx).getLongValues();
|
||||||
return new MatchDocIdSet(ctx.reader().maxDoc(), acceptedDocs) {
|
return new LongRangeMatchDocIdSet(ctx.reader().maxDoc(), acceptedDocs, values, inclusiveLowerPoint, inclusiveUpperPoint);
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isCacheable() {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected boolean matchDoc(int doc) {
|
|
||||||
LongValues.Iter iter = values.getIter(doc);
|
|
||||||
while (iter.hasNext()) {
|
|
||||||
long value = iter.next();
|
|
||||||
if (value >= inclusiveLowerPoint && value <= inclusiveUpperPoint) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -333,25 +264,7 @@ public abstract class NumericRangeFieldDataFilter<T> extends Filter {
|
||||||
return null;
|
return null;
|
||||||
|
|
||||||
final DoubleValues values = indexFieldData.load(ctx).getDoubleValues();
|
final DoubleValues values = indexFieldData.load(ctx).getDoubleValues();
|
||||||
return new MatchDocIdSet(ctx.reader().maxDoc(), acceptedDocs) {
|
return new DoubleRangeMatchDocIdSet(ctx.reader().maxDoc(), acceptedDocs, values, inclusiveLowerPoint, inclusiveUpperPoint);
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isCacheable() {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected boolean matchDoc(int doc) {
|
|
||||||
DoubleValues.Iter iter = values.getIter(doc);
|
|
||||||
while (iter.hasNext()) {
|
|
||||||
double value = iter.next();
|
|
||||||
if (value >= inclusiveLowerPoint && value <= inclusiveUpperPoint) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -386,26 +299,71 @@ public abstract class NumericRangeFieldDataFilter<T> extends Filter {
|
||||||
return null;
|
return null;
|
||||||
|
|
||||||
final DoubleValues values = indexFieldData.load(ctx).getDoubleValues();
|
final DoubleValues values = indexFieldData.load(ctx).getDoubleValues();
|
||||||
return new MatchDocIdSet(ctx.reader().maxDoc(), acceptedDocs) {
|
return new DoubleRangeMatchDocIdSet(ctx.reader().maxDoc(), acceptedDocs, values, inclusiveLowerPoint, inclusiveUpperPoint);
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isCacheable() {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected boolean matchDoc(int doc) {
|
|
||||||
DoubleValues.Iter iter = values.getIter(doc);
|
|
||||||
while (iter.hasNext()) {
|
|
||||||
double value = iter.next();
|
|
||||||
if (value >= inclusiveLowerPoint && value <= inclusiveUpperPoint) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static final class DoubleRangeMatchDocIdSet extends MatchDocIdSet {
|
||||||
|
private final DoubleValues values;
|
||||||
|
private final double inclusiveLowerPoint;
|
||||||
|
private final double inclusiveUpperPoint;
|
||||||
|
|
||||||
|
protected DoubleRangeMatchDocIdSet(int maxDoc, Bits acceptDocs, final DoubleValues values,final double inclusiveLowerPoint, final double inclusiveUpperPoint ) {
|
||||||
|
super(maxDoc, acceptDocs);
|
||||||
|
this.inclusiveLowerPoint = inclusiveLowerPoint;
|
||||||
|
this.inclusiveUpperPoint = inclusiveUpperPoint;
|
||||||
|
this.values = values;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean isCacheable() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean matchDoc(int doc) {
|
||||||
|
int numValues = values.setDocument(doc);
|
||||||
|
for (int i = 0; i < numValues; i++) {
|
||||||
|
double value = values.nextValue();
|
||||||
|
if (value >= inclusiveLowerPoint && value <= inclusiveUpperPoint) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private static final class LongRangeMatchDocIdSet extends MatchDocIdSet {
|
||||||
|
private final LongValues values;
|
||||||
|
private final long inclusiveLowerPoint;
|
||||||
|
private final long inclusiveUpperPoint;
|
||||||
|
|
||||||
|
protected LongRangeMatchDocIdSet(int maxDoc, Bits acceptDocs, final LongValues values,final long inclusiveLowerPoint, final long inclusiveUpperPoint ) {
|
||||||
|
super(maxDoc, acceptDocs);
|
||||||
|
this.inclusiveLowerPoint = inclusiveLowerPoint;
|
||||||
|
this.inclusiveUpperPoint = inclusiveUpperPoint;
|
||||||
|
this.values = values;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean isCacheable() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean matchDoc(int doc) {
|
||||||
|
int numValues = values.setDocument(doc);
|
||||||
|
for (int i = 0; i < numValues; i++) {
|
||||||
|
long value = values.nextValue();
|
||||||
|
if (value >= inclusiveLowerPoint && value <= inclusiveUpperPoint) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -177,27 +177,15 @@ public class GeoDistanceFilter extends Filter {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected boolean matchDoc(int doc) {
|
protected boolean matchDoc(int doc) {
|
||||||
if (!values.hasValue(doc)) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (values.isMultiValued()) {
|
final int length = values.setDocument(doc);
|
||||||
GeoPointValues.Iter iter = values.getIter(doc);
|
for (int i = 0; i < length; i++) {
|
||||||
while (iter.hasNext()) {
|
GeoPoint point = values.nextValue();
|
||||||
GeoPoint point = iter.next();
|
|
||||||
if (distanceBoundingCheck.isWithin(point.lat(), point.lon())) {
|
|
||||||
double d = fixedSourceDistance.calculate(point.lat(), point.lon());
|
|
||||||
if (d < distance) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
} else {
|
|
||||||
GeoPoint point = values.getValue(doc);
|
|
||||||
if (distanceBoundingCheck.isWithin(point.lat(), point.lon())) {
|
if (distanceBoundingCheck.isWithin(point.lat(), point.lon())) {
|
||||||
double d = fixedSourceDistance.calculate(point.lat(), point.lon());
|
double d = fixedSourceDistance.calculate(point.lat(), point.lon());
|
||||||
return d < distance;
|
if (d < distance) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
|
|
|
@ -195,32 +195,17 @@ public class GeoDistanceRangeFilter extends Filter {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected boolean matchDoc(int doc) {
|
protected boolean matchDoc(int doc) {
|
||||||
if (!values.hasValue(doc)) {
|
final int length = values.setDocument(doc);
|
||||||
return false;
|
for (int i = 0; i < length; i++) {
|
||||||
}
|
GeoPoint point = values.nextValue();
|
||||||
|
|
||||||
if (values.isMultiValued()) {
|
|
||||||
GeoPointValues.Iter iter = values.getIter(doc);
|
|
||||||
while (iter.hasNext()) {
|
|
||||||
GeoPoint point = iter.next();
|
|
||||||
if (distanceBoundingCheck.isWithin(point.lat(), point.lon())) {
|
|
||||||
double d = fixedSourceDistance.calculate(point.lat(), point.lon());
|
|
||||||
if (d >= inclusiveLowerPoint && d <= inclusiveUpperPoint) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
} else {
|
|
||||||
GeoPoint point = values.getValue(doc);
|
|
||||||
if (distanceBoundingCheck.isWithin(point.lat(), point.lon())) {
|
if (distanceBoundingCheck.isWithin(point.lat(), point.lon())) {
|
||||||
double d = fixedSourceDistance.calculate(point.lat(), point.lon());
|
double d = fixedSourceDistance.calculate(point.lat(), point.lon());
|
||||||
if (d >= inclusiveLowerPoint && d <= inclusiveUpperPoint) {
|
if (d >= inclusiveLowerPoint && d <= inclusiveUpperPoint) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return false;
|
|
||||||
}
|
}
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -82,21 +82,12 @@ public class GeoPolygonFilter extends Filter {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected boolean matchDoc(int doc) {
|
protected boolean matchDoc(int doc) {
|
||||||
if (!values.hasValue(doc)) {
|
final int length = values.setDocument(doc);
|
||||||
return false;
|
for (int i = 0; i < length; i++) {
|
||||||
}
|
GeoPoint point = values.nextValue();
|
||||||
|
if (pointInPolygon(points, point.lat(), point.lon())) {
|
||||||
if (values.isMultiValued()) {
|
return true;
|
||||||
GeoPointValues.Iter iter = values.getIter(doc);
|
|
||||||
while (iter.hasNext()) {
|
|
||||||
GeoPoint point = iter.next();
|
|
||||||
if (pointInPolygon(points, point.lat(), point.lon())) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
GeoPoint point = values.getValue(doc);
|
|
||||||
return pointInPolygon(points, point.lat(), point.lon());
|
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
|
@ -95,22 +95,9 @@ public class InMemoryGeoBoundingBoxFilter extends Filter {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected boolean matchDoc(int doc) {
|
protected boolean matchDoc(int doc) {
|
||||||
if (!values.hasValue(doc)) {
|
final int length = values.setDocument(doc);
|
||||||
return false;
|
for (int i = 0; i < length; i++) {
|
||||||
}
|
GeoPoint point = values.nextValue();
|
||||||
|
|
||||||
if (values.isMultiValued()) {
|
|
||||||
GeoPointValues.Iter iter = values.getIter(doc);
|
|
||||||
while (iter.hasNext()) {
|
|
||||||
GeoPoint point = iter.next();
|
|
||||||
if (((topLeft.lon() <= point.lon() || bottomRight.lon() >= point.lon())) &&
|
|
||||||
(topLeft.lat() >= point.lat() && bottomRight.lat() <= point.lat())) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
GeoPoint point = values.getValue(doc);
|
|
||||||
|
|
||||||
if (((topLeft.lon() <= point.lon() || bottomRight.lon() >= point.lon())) &&
|
if (((topLeft.lon() <= point.lon() || bottomRight.lon() >= point.lon())) &&
|
||||||
(topLeft.lat() >= point.lat() && bottomRight.lat() <= point.lat())) {
|
(topLeft.lat() >= point.lat() && bottomRight.lat() <= point.lat())) {
|
||||||
return true;
|
return true;
|
||||||
|
@ -139,21 +126,9 @@ public class InMemoryGeoBoundingBoxFilter extends Filter {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected boolean matchDoc(int doc) {
|
protected boolean matchDoc(int doc) {
|
||||||
if (!values.hasValue(doc)) {
|
final int length = values.setDocument(doc);
|
||||||
return false;
|
for (int i = 0; i < length; i++) {
|
||||||
}
|
GeoPoint point = values.nextValue();
|
||||||
|
|
||||||
if (values.isMultiValued()) {
|
|
||||||
GeoPointValues.Iter iter = values.getIter(doc);
|
|
||||||
while (iter.hasNext()) {
|
|
||||||
GeoPoint point = iter.next();
|
|
||||||
if (topLeft.lon() <= point.lon() && bottomRight.lon() >= point.lon()
|
|
||||||
&& topLeft.lat() >= point.lat() && bottomRight.lat() <= point.lat()) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
GeoPoint point = values.getValue(doc);
|
|
||||||
if (topLeft.lon() <= point.lon() && bottomRight.lon() >= point.lon()
|
if (topLeft.lon() <= point.lon() && bottomRight.lon() >= point.lon()
|
||||||
&& topLeft.lat() >= point.lat() && bottomRight.lat() <= point.lat()) {
|
&& topLeft.lat() >= point.lat() && bottomRight.lat() <= point.lat()) {
|
||||||
return true;
|
return true;
|
||||||
|
|
|
@ -715,8 +715,12 @@ public class PercolatorService extends AbstractComponent {
|
||||||
int segmentIdx = ReaderUtil.subIndex(scoreDoc.doc, percolatorSearcher.reader().leaves());
|
int segmentIdx = ReaderUtil.subIndex(scoreDoc.doc, percolatorSearcher.reader().leaves());
|
||||||
AtomicReaderContext atomicReaderContext = percolatorSearcher.reader().leaves().get(segmentIdx);
|
AtomicReaderContext atomicReaderContext = percolatorSearcher.reader().leaves().get(segmentIdx);
|
||||||
BytesValues values = idFieldData.load(atomicReaderContext).getBytesValues();
|
BytesValues values = idFieldData.load(atomicReaderContext).getBytesValues();
|
||||||
spare.hash = values.getValueHashed(scoreDoc.doc - atomicReaderContext.docBase, spare.bytes);
|
final int localDocId = scoreDoc.doc - atomicReaderContext.docBase;
|
||||||
matches.add(values.makeSafe(spare.bytes));
|
assert values.hasValue(localDocId);
|
||||||
|
spare.bytes = values.getValue(localDocId);
|
||||||
|
|
||||||
|
spare.hash = values.currentValueHash();
|
||||||
|
matches.add(values.copyShared());
|
||||||
if (hls != null) {
|
if (hls != null) {
|
||||||
Query query = context.percolateQueries().get(spare);
|
Query query = context.percolateQueries().get(spare);
|
||||||
context.parsedQuery(new ParsedQuery(query, ImmutableMap.<String, Filter>of()));
|
context.parsedQuery(new ParsedQuery(query, ImmutableMap.<String, Filter>of()));
|
||||||
|
|
|
@ -152,7 +152,7 @@ abstract class QueryCollector extends Collector {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void collect(int doc) throws IOException {
|
public void collect(int doc) throws IOException {
|
||||||
spare.hash = values.getValueHashed(doc, spare.bytes);
|
spare.reset(values.getValue(doc), values.currentValueHash());
|
||||||
Query query = queries.get(spare);
|
Query query = queries.get(spare);
|
||||||
if (query == null) {
|
if (query == null) {
|
||||||
// log???
|
// log???
|
||||||
|
@ -169,7 +169,7 @@ abstract class QueryCollector extends Collector {
|
||||||
searcher.search(query, collector);
|
searcher.search(query, collector);
|
||||||
if (collector.exists()) {
|
if (collector.exists()) {
|
||||||
if (!limit || counter < size) {
|
if (!limit || counter < size) {
|
||||||
matches.add(values.makeSafe(spare.bytes));
|
matches.add(values.copyShared());
|
||||||
if (context.highlight() != null) {
|
if (context.highlight() != null) {
|
||||||
highlightPhase.hitExecute(context, context.hitContext());
|
highlightPhase.hitExecute(context, context.hitContext());
|
||||||
hls.add(context.hitContext().hit().getHighlightFields());
|
hls.add(context.hitContext().hit().getHighlightFields());
|
||||||
|
@ -210,7 +210,7 @@ abstract class QueryCollector extends Collector {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void collect(int doc) throws IOException {
|
public void collect(int doc) throws IOException {
|
||||||
spare.hash = values.getValueHashed(doc, spare.bytes);
|
spare.reset(values.getValue(doc), values.currentValueHash());
|
||||||
Query query = queries.get(spare);
|
Query query = queries.get(spare);
|
||||||
if (query == null) {
|
if (query == null) {
|
||||||
// log???
|
// log???
|
||||||
|
@ -273,7 +273,7 @@ abstract class QueryCollector extends Collector {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void collect(int doc) throws IOException {
|
public void collect(int doc) throws IOException {
|
||||||
spare.hash = values.getValueHashed(doc, spare.bytes);
|
spare.reset(values.getValue(doc), values.currentValueHash());
|
||||||
Query query = queries.get(spare);
|
Query query = queries.get(spare);
|
||||||
if (query == null) {
|
if (query == null) {
|
||||||
// log???
|
// log???
|
||||||
|
@ -289,7 +289,7 @@ abstract class QueryCollector extends Collector {
|
||||||
searcher.search(query, collector);
|
searcher.search(query, collector);
|
||||||
if (collector.exists()) {
|
if (collector.exists()) {
|
||||||
if (!limit || counter < size) {
|
if (!limit || counter < size) {
|
||||||
matches.add(values.makeSafe(spare.bytes));
|
matches.add(values.copyShared());
|
||||||
scores.add(scorer.score());
|
scores.add(scorer.score());
|
||||||
if (context.highlight() != null) {
|
if (context.highlight() != null) {
|
||||||
highlightPhase.hitExecute(context, context.hitContext());
|
highlightPhase.hitExecute(context, context.hitContext());
|
||||||
|
@ -338,7 +338,7 @@ abstract class QueryCollector extends Collector {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void collect(int doc) throws IOException {
|
public void collect(int doc) throws IOException {
|
||||||
spare.hash = values.getValueHashed(doc, spare.bytes);
|
spare.reset(values.getValue(doc), values.currentValueHash());
|
||||||
Query query = queries.get(spare);
|
Query query = queries.get(spare);
|
||||||
if (query == null) {
|
if (query == null) {
|
||||||
// log???
|
// log???
|
||||||
|
|
|
@ -19,7 +19,6 @@
|
||||||
package org.elasticsearch.search.facet;
|
package org.elasticsearch.search.facet;
|
||||||
|
|
||||||
import org.elasticsearch.index.fielddata.DoubleValues;
|
import org.elasticsearch.index.fielddata.DoubleValues;
|
||||||
import org.elasticsearch.index.fielddata.DoubleValues.Iter;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Simple Facet aggregator base class for {@link DoubleValues}
|
* Simple Facet aggregator base class for {@link DoubleValues}
|
||||||
|
@ -29,15 +28,14 @@ public abstract class DoubleFacetAggregatorBase {
|
||||||
private int missing;
|
private int missing;
|
||||||
|
|
||||||
public void onDoc(int docId, DoubleValues values) {
|
public void onDoc(int docId, DoubleValues values) {
|
||||||
if (values.hasValue(docId)) {
|
int numValues = values.setDocument(docId);
|
||||||
final Iter iter = values.getIter(docId);
|
int tempMissing = 1;
|
||||||
while(iter.hasNext()) {
|
for (int i = 0; i < numValues; i++) {
|
||||||
onValue(docId, iter.next());
|
tempMissing = 0;
|
||||||
total++;
|
onValue(docId, values.nextValue());
|
||||||
}
|
total++;
|
||||||
} else {
|
|
||||||
missing++;
|
|
||||||
}
|
}
|
||||||
|
missing += tempMissing;
|
||||||
}
|
}
|
||||||
|
|
||||||
protected abstract void onValue(int docId, double next);
|
protected abstract void onValue(int docId, double next);
|
||||||
|
|
|
@ -19,7 +19,6 @@
|
||||||
package org.elasticsearch.search.facet;
|
package org.elasticsearch.search.facet;
|
||||||
|
|
||||||
import org.elasticsearch.index.fielddata.LongValues;
|
import org.elasticsearch.index.fielddata.LongValues;
|
||||||
import org.elasticsearch.index.fielddata.LongValues.Iter;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Simple Facet aggregator base class for {@link LongValues}
|
* Simple Facet aggregator base class for {@link LongValues}
|
||||||
|
@ -29,15 +28,14 @@ public abstract class LongFacetAggregatorBase {
|
||||||
private int missing;
|
private int missing;
|
||||||
|
|
||||||
public void onDoc(int docId, LongValues values) {
|
public void onDoc(int docId, LongValues values) {
|
||||||
if (values.hasValue(docId)) {
|
final int numValues = values.setDocument(docId);
|
||||||
final Iter iter = values.getIter(docId);
|
int tempMissing = 1;
|
||||||
while(iter.hasNext()) {
|
for (int i = 0; i < numValues; i++) {
|
||||||
onValue(docId, iter.next());
|
tempMissing = 0;
|
||||||
total++;
|
onValue(docId, values.nextValue());
|
||||||
}
|
total++;
|
||||||
} else {
|
|
||||||
missing++;
|
|
||||||
}
|
}
|
||||||
|
missing += tempMissing;
|
||||||
}
|
}
|
||||||
|
|
||||||
protected abstract void onValue(int docId, long next);
|
protected abstract void onValue(int docId, long next);
|
||||||
|
|
|
@ -26,7 +26,6 @@ import org.elasticsearch.common.geo.GeoDistance;
|
||||||
import org.elasticsearch.common.geo.GeoPoint;
|
import org.elasticsearch.common.geo.GeoPoint;
|
||||||
import org.elasticsearch.common.unit.DistanceUnit;
|
import org.elasticsearch.common.unit.DistanceUnit;
|
||||||
import org.elasticsearch.index.fielddata.GeoPointValues;
|
import org.elasticsearch.index.fielddata.GeoPointValues;
|
||||||
import org.elasticsearch.index.fielddata.GeoPointValues.Iter;
|
|
||||||
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||||
import org.elasticsearch.search.facet.FacetExecutor;
|
import org.elasticsearch.search.facet.FacetExecutor;
|
||||||
import org.elasticsearch.search.facet.InternalFacet;
|
import org.elasticsearch.search.facet.InternalFacet;
|
||||||
|
@ -106,9 +105,9 @@ public class GeoDistanceFacetExecutor extends FacetExecutor {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void onDoc(int docId, GeoPointValues values) {
|
public void onDoc(int docId, GeoPointValues values) {
|
||||||
final Iter iter = values.getIter(docId);
|
final int length = values.setDocument(docId);
|
||||||
while(iter.hasNext()) {
|
for (int i = 0; i < length; i++) {
|
||||||
final GeoPoint next = iter.next();
|
final GeoPoint next = values.nextValue();
|
||||||
double distance = fixedSourceDistance.calculate(next.getLat(), next.getLon());
|
double distance = fixedSourceDistance.calculate(next.getLat(), next.getLon());
|
||||||
for (GeoDistanceFacet.Entry entry : entries) {
|
for (GeoDistanceFacet.Entry entry : entries) {
|
||||||
if (entry.foundInDoc) {
|
if (entry.foundInDoc) {
|
||||||
|
|
|
@ -19,8 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.search.facet.geodistance;
|
package org.elasticsearch.search.facet.geodistance;
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
|
|
||||||
import org.apache.lucene.index.AtomicReaderContext;
|
import org.apache.lucene.index.AtomicReaderContext;
|
||||||
import org.elasticsearch.common.geo.GeoDistance;
|
import org.elasticsearch.common.geo.GeoDistance;
|
||||||
import org.elasticsearch.common.unit.DistanceUnit;
|
import org.elasticsearch.common.unit.DistanceUnit;
|
||||||
|
@ -29,6 +27,8 @@ import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
|
@ -73,9 +73,9 @@ public class ValueGeoDistanceFacetExecutor extends GeoDistanceFacetExecutor {
|
||||||
protected void collectGeoPoint(GeoDistanceFacet.Entry entry, int docId, double distance) {
|
protected void collectGeoPoint(GeoDistanceFacet.Entry entry, int docId, double distance) {
|
||||||
entry.foundInDoc = true;
|
entry.foundInDoc = true;
|
||||||
entry.count++;
|
entry.count++;
|
||||||
DoubleValues.Iter iter = valueValues.getIter(docId);
|
int seek = valueValues.setDocument(docId);
|
||||||
while(iter.hasNext()) {
|
for (int i = 0; i < seek; i++) {
|
||||||
double value = iter.next();
|
double value = valueValues.nextValue();
|
||||||
entry.totalCount++;
|
entry.totalCount++;
|
||||||
entry.total += value;
|
entry.total += value;
|
||||||
if (value < entry.min) {
|
if (value < entry.min) {
|
||||||
|
|
|
@ -102,29 +102,14 @@ public class KeyValueRangeFacetExecutor extends FacetExecutor {
|
||||||
if (value >= entry.getFrom() && value < entry.getTo()) {
|
if (value >= entry.getFrom() && value < entry.getTo()) {
|
||||||
entry.foundInDoc = true;
|
entry.foundInDoc = true;
|
||||||
entry.count++;
|
entry.count++;
|
||||||
if (valueValues.isMultiValued()) {
|
int seek = valueValues.setDocument(docId);
|
||||||
for (DoubleValues.Iter iter = valueValues.getIter(docId); iter.hasNext(); ) {
|
for (int i = 0; i < seek; i++) {
|
||||||
double valueValue = iter.next();
|
double valueValue = valueValues.nextValue();
|
||||||
entry.total += valueValue;
|
|
||||||
if (valueValue < entry.min) {
|
|
||||||
entry.min = valueValue;
|
|
||||||
}
|
|
||||||
if (valueValue > entry.max) {
|
|
||||||
entry.max = valueValue;
|
|
||||||
}
|
|
||||||
entry.totalCount++;
|
|
||||||
}
|
|
||||||
} else if (valueValues.hasValue(docId)) {
|
|
||||||
double valueValue = valueValues.getValue(docId);
|
|
||||||
entry.totalCount++;
|
|
||||||
entry.total += valueValue;
|
entry.total += valueValue;
|
||||||
if (valueValue < entry.min) {
|
entry.min = Math.min(entry.min, valueValue);
|
||||||
entry.min = valueValue;
|
entry.max = Math.max(entry.max, valueValue);
|
||||||
}
|
|
||||||
if (valueValue > entry.max) {
|
|
||||||
entry.max = valueValue;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
entry.totalCount+=seek;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -78,7 +78,7 @@ public class TermsDoubleFacetExecutor extends FacetExecutor {
|
||||||
if (values instanceof DoubleValues.WithOrdinals) {
|
if (values instanceof DoubleValues.WithOrdinals) {
|
||||||
DoubleValues.WithOrdinals valuesWithOrds = (DoubleValues.WithOrdinals) values;
|
DoubleValues.WithOrdinals valuesWithOrds = (DoubleValues.WithOrdinals) values;
|
||||||
Ordinals.Docs ordinals = valuesWithOrds.ordinals();
|
Ordinals.Docs ordinals = valuesWithOrds.ordinals();
|
||||||
for (int ord = 1; ord < ordinals.getMaxOrd(); ord++) {
|
for (long ord = Ordinals.MIN_ORDINAL; ord < ordinals.getMaxOrd(); ord++) {
|
||||||
facets.v().putIfAbsent(valuesWithOrds.getValueByOrd(ord), 0);
|
facets.v().putIfAbsent(valuesWithOrds.getValueByOrd(ord), 0);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
@ -88,10 +88,10 @@ public class TermsDoubleFacetExecutor extends FacetExecutor {
|
||||||
if (!values.hasValue(docId)) {
|
if (!values.hasValue(docId)) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
int numValues = values.setDocument(docId);
|
||||||
DoubleValues.Iter iter = values.getIter(docId);
|
DoubleIntOpenHashMap map = facets.v();
|
||||||
while (iter.hasNext()) {
|
for (int i = 0; i < numValues; i++) {
|
||||||
facets.v().putIfAbsent(iter.next(), 0);
|
map.putIfAbsent(values.nextValue(), 0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -77,20 +77,17 @@ public class TermsLongFacetExecutor extends FacetExecutor {
|
||||||
if (values instanceof LongValues.WithOrdinals) {
|
if (values instanceof LongValues.WithOrdinals) {
|
||||||
LongValues.WithOrdinals valuesWithOrds = (LongValues.WithOrdinals) values;
|
LongValues.WithOrdinals valuesWithOrds = (LongValues.WithOrdinals) values;
|
||||||
Ordinals.Docs ordinals = valuesWithOrds.ordinals();
|
Ordinals.Docs ordinals = valuesWithOrds.ordinals();
|
||||||
for (int ord = 1; ord < ordinals.getMaxOrd(); ord++) {
|
for (long ord = Ordinals.MIN_ORDINAL; ord < ordinals.getMaxOrd(); ord++) {
|
||||||
facets.v().putIfAbsent(valuesWithOrds.getValueByOrd(ord), 0);
|
facets.v().putIfAbsent(valuesWithOrds.getValueByOrd(ord), 0);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Shouldn't be true, otherwise it is WithOrdinals... just to be sure...
|
// Shouldn't be true, otherwise it is WithOrdinals... just to be sure...
|
||||||
if (values.isMultiValued()) {
|
if (values.isMultiValued()) {
|
||||||
for (int docId = 0; docId < maxDoc; docId++) {
|
for (int docId = 0; docId < maxDoc; docId++) {
|
||||||
if (!values.hasValue(docId)) {
|
final int numValues = values.setDocument(docId);
|
||||||
continue;
|
final LongIntOpenHashMap v = facets.v();
|
||||||
}
|
for (int i = 0; i < numValues; i++) {
|
||||||
|
v.putIfAbsent(values.nextValue(), 0);
|
||||||
LongValues.Iter iter = values.getIter(docId);
|
|
||||||
while (iter.hasNext()) {
|
|
||||||
facets.v().putIfAbsent(iter.next(), 0);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -26,7 +26,6 @@ import org.apache.lucene.util.BytesRefHash;
|
||||||
import org.elasticsearch.common.collect.BoundedTreeSet;
|
import org.elasticsearch.common.collect.BoundedTreeSet;
|
||||||
import org.elasticsearch.common.lucene.HashedBytesRef;
|
import org.elasticsearch.common.lucene.HashedBytesRef;
|
||||||
import org.elasticsearch.index.fielddata.BytesValues;
|
import org.elasticsearch.index.fielddata.BytesValues;
|
||||||
import org.elasticsearch.index.fielddata.BytesValues.Iter;
|
|
||||||
import org.elasticsearch.search.facet.InternalFacet;
|
import org.elasticsearch.search.facet.InternalFacet;
|
||||||
import org.elasticsearch.search.facet.terms.TermsFacet;
|
import org.elasticsearch.search.facet.terms.TermsFacet;
|
||||||
import org.elasticsearch.search.facet.terms.support.EntryPriorityQueue;
|
import org.elasticsearch.search.facet.terms.support.EntryPriorityQueue;
|
||||||
|
@ -44,15 +43,15 @@ public class HashedAggregator {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void onDoc(int docId, BytesValues values) {
|
public void onDoc(int docId, BytesValues values) {
|
||||||
if (values.hasValue(docId)) {
|
final int length = values.setDocument(docId);
|
||||||
final Iter iter = values.getIter(docId);
|
int pendingMissing = 1;
|
||||||
while (iter.hasNext()) {
|
total += length;
|
||||||
onValue(docId, iter.next(), iter.hash(), values);
|
for (int i = 0; i < length; i++) {
|
||||||
total++;
|
final BytesRef value = values.nextValue();
|
||||||
}
|
onValue(docId, value, values.currentValueHash(), values);
|
||||||
} else {
|
pendingMissing = 0;
|
||||||
missing++;
|
|
||||||
}
|
}
|
||||||
|
missing += pendingMissing;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void addValue(BytesRef value, int hashCode, BytesValues values) {
|
public void addValue(BytesRef value, int hashCode, BytesValues values) {
|
||||||
|
@ -232,9 +231,7 @@ public class HashedAggregator {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final class AssertingHashCount implements HashCount { // simple
|
private static final class AssertingHashCount implements HashCount { // simple
|
||||||
// implemenation
|
// implementation for assertions
|
||||||
// for
|
|
||||||
// assertions
|
|
||||||
private final ObjectIntOpenHashMap<HashedBytesRef> valuesAndCount = new ObjectIntOpenHashMap<HashedBytesRef>();
|
private final ObjectIntOpenHashMap<HashedBytesRef> valuesAndCount = new ObjectIntOpenHashMap<HashedBytesRef>();
|
||||||
private HashedBytesRef spare = new HashedBytesRef();
|
private HashedBytesRef spare = new HashedBytesRef();
|
||||||
|
|
||||||
|
@ -244,7 +241,7 @@ public class HashedAggregator {
|
||||||
assert adjustedValue >= 1;
|
assert adjustedValue >= 1;
|
||||||
if (adjustedValue == 1) { // only if we added the spare we create a
|
if (adjustedValue == 1) { // only if we added the spare we create a
|
||||||
// new instance
|
// new instance
|
||||||
spare.bytes = values.makeSafe(spare.bytes);
|
spare.bytes = BytesRef.deepCopyOf(value);
|
||||||
spare = new HashedBytesRef();
|
spare = new HashedBytesRef();
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -268,9 +265,8 @@ public class HashedAggregator {
|
||||||
@Override
|
@Override
|
||||||
public boolean addNoCount(BytesRef value, int hashCode, BytesValues values) {
|
public boolean addNoCount(BytesRef value, int hashCode, BytesValues values) {
|
||||||
if (!valuesAndCount.containsKey(spare.reset(value, hashCode))) {
|
if (!valuesAndCount.containsKey(spare.reset(value, hashCode))) {
|
||||||
valuesAndCount.addTo(spare.reset(value, hashCode), 0);
|
valuesAndCount.addTo(spare.reset(BytesRef.deepCopyOf(value), hashCode), 0);
|
||||||
spare.bytes = values.makeSafe(spare.bytes);
|
spare = new HashedBytesRef(); // reset the reference since we just added to the hash
|
||||||
spare = new HashedBytesRef();
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
|
|
|
@ -76,7 +76,7 @@ public final class HashedScriptAggregator extends HashedAggregator {
|
||||||
script.setNextDocId(docId);
|
script.setNextDocId(docId);
|
||||||
// LUCENE 4 UPGRADE: needs optimization -- maybe a CharSequence
|
// LUCENE 4 UPGRADE: needs optimization -- maybe a CharSequence
|
||||||
// does the job here?
|
// does the job here?
|
||||||
// we only creat that string if we really need
|
// we only create that string if we really need
|
||||||
script.setNextVar("term", spare.toString());
|
script.setNextVar("term", spare.toString());
|
||||||
Object scriptValue = script.run();
|
Object scriptValue = script.run();
|
||||||
if (scriptValue == null) {
|
if (scriptValue == null) {
|
||||||
|
@ -87,8 +87,6 @@ public final class HashedScriptAggregator extends HashedAggregator {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// LUCENE 4 UPGRADE: should be possible to convert directly
|
|
||||||
// to BR
|
|
||||||
scriptSpare.copyChars(scriptValue.toString());
|
scriptSpare.copyChars(scriptValue.toString());
|
||||||
hashCode = scriptSpare.hashCode();
|
hashCode = scriptSpare.hashCode();
|
||||||
super.onValue(docId, scriptSpare, hashCode, values);
|
super.onValue(docId, scriptSpare, hashCode, values);
|
||||||
|
|
|
@ -126,39 +126,24 @@ public class TermsStringFacetExecutor extends FacetExecutor {
|
||||||
}
|
}
|
||||||
|
|
||||||
static void loadAllTerms(SearchContext context, IndexFieldData indexFieldData, HashedAggregator aggregator) {
|
static void loadAllTerms(SearchContext context, IndexFieldData indexFieldData, HashedAggregator aggregator) {
|
||||||
|
|
||||||
for (AtomicReaderContext readerContext : context.searcher().getTopReaderContext().leaves()) {
|
for (AtomicReaderContext readerContext : context.searcher().getTopReaderContext().leaves()) {
|
||||||
int maxDoc = readerContext.reader().maxDoc();
|
int maxDoc = readerContext.reader().maxDoc();
|
||||||
if (indexFieldData instanceof IndexFieldData.WithOrdinals) {
|
if (indexFieldData instanceof IndexFieldData.WithOrdinals) {
|
||||||
BytesValues.WithOrdinals values = ((IndexFieldData.WithOrdinals) indexFieldData).load(readerContext).getBytesValues();
|
BytesValues.WithOrdinals values = ((IndexFieldData.WithOrdinals) indexFieldData).load(readerContext).getBytesValues();
|
||||||
Ordinals.Docs ordinals = values.ordinals();
|
Ordinals.Docs ordinals = values.ordinals();
|
||||||
// 0 = docs with no value for field, so start from 1 instead
|
// 0 = docs with no value for field, so start from 1 instead
|
||||||
for (int ord = 1; ord < ordinals.getMaxOrd(); ord++) {
|
for (long ord = Ordinals.MIN_ORDINAL; ord < ordinals.getMaxOrd(); ord++) {
|
||||||
BytesRef value = values.getValueByOrd(ord);
|
BytesRef value = values.getValueByOrd(ord);
|
||||||
aggregator.addValue(value, value.hashCode(), values);
|
aggregator.addValue(value, value.hashCode(), values);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
BytesValues values = indexFieldData.load(readerContext).getBytesValues();
|
BytesValues values = indexFieldData.load(readerContext).getBytesValues();
|
||||||
// Shouldn't be true, otherwise it is WithOrdinals... just to be sure...
|
for (int docId = 0; docId < maxDoc; docId++) {
|
||||||
if (values.isMultiValued()) {
|
final int size = values.setDocument(docId);
|
||||||
for (int docId = 0; docId < maxDoc; docId++) {
|
for (int i = 0; i < size; i++) {
|
||||||
if (!values.hasValue(docId)) {
|
final BytesRef value = values.nextValue();
|
||||||
continue;
|
aggregator.addValue(value, values.currentValueHash(), values);
|
||||||
}
|
|
||||||
|
|
||||||
BytesValues.Iter iter = values.getIter(docId);
|
|
||||||
while (iter.hasNext()) {
|
|
||||||
aggregator.addValue(iter.next(), iter.hash(), values);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
BytesRef spare = new BytesRef();
|
|
||||||
for (int docId = 0; docId < maxDoc; docId++) {
|
|
||||||
if (!values.hasValue(docId)) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
int hash = values.getValueHashed(docId, spare);
|
|
||||||
aggregator.addValue(spare, hash, values);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -32,7 +32,6 @@ import org.elasticsearch.common.util.IntArrays;
|
||||||
import org.elasticsearch.index.fielddata.BytesValues;
|
import org.elasticsearch.index.fielddata.BytesValues;
|
||||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||||
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
||||||
import org.elasticsearch.index.fielddata.ordinals.Ordinals.Docs.Iter;
|
|
||||||
import org.elasticsearch.search.facet.FacetExecutor;
|
import org.elasticsearch.search.facet.FacetExecutor;
|
||||||
import org.elasticsearch.search.facet.InternalFacet;
|
import org.elasticsearch.search.facet.InternalFacet;
|
||||||
import org.elasticsearch.search.facet.terms.TermsFacet;
|
import org.elasticsearch.search.facet.terms.TermsFacet;
|
||||||
|
@ -115,7 +114,7 @@ public class TermsStringOrdinalsFacetExecutor extends FacetExecutor {
|
||||||
|
|
||||||
while (queue.size() > 0) {
|
while (queue.size() > 0) {
|
||||||
ReaderAggregator agg = queue.top();
|
ReaderAggregator agg = queue.top();
|
||||||
BytesRef value = agg.values.makeSafe(agg.current); // we need to makeSafe it, since we end up pushing it... (can we get around this?)
|
BytesRef value = agg.copyCurrent(); // we need to makeSafe it, since we end up pushing it... (can we get around this?)
|
||||||
int count = 0;
|
int count = 0;
|
||||||
do {
|
do {
|
||||||
count += agg.counts.get(agg.position);
|
count += agg.counts.get(agg.position);
|
||||||
|
@ -155,7 +154,7 @@ public class TermsStringOrdinalsFacetExecutor extends FacetExecutor {
|
||||||
|
|
||||||
while (queue.size() > 0) {
|
while (queue.size() > 0) {
|
||||||
ReaderAggregator agg = queue.top();
|
ReaderAggregator agg = queue.top();
|
||||||
BytesRef value = agg.values.makeSafe(agg.current); // we need to makeSafe it, since we end up pushing it... (can we work around that?)
|
BytesRef value = agg.copyCurrent(); // we need to makeSafe it, since we end up pushing it... (can we work around that?)
|
||||||
int count = 0;
|
int count = 0;
|
||||||
do {
|
do {
|
||||||
count += agg.counts.get(agg.position);
|
count += agg.counts.get(agg.position);
|
||||||
|
@ -211,12 +210,13 @@ public class TermsStringOrdinalsFacetExecutor extends FacetExecutor {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void collect(int doc) throws IOException {
|
public void collect(int doc) throws IOException {
|
||||||
Iter iter = ordinals.getIter(doc);
|
final int length = ordinals.setDocument(doc);
|
||||||
long ord = iter.next();
|
int missing = 1;
|
||||||
current.onOrdinal(doc, ord);
|
for (int i = 0; i < length; i++) {
|
||||||
while ((ord = iter.next()) != 0) {
|
current.onOrdinal(doc, ordinals.nextOrd());
|
||||||
current.onOrdinal(doc, ord);
|
missing = 0;
|
||||||
}
|
}
|
||||||
|
current.incrementMissing(missing);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -237,13 +237,14 @@ public class TermsStringOrdinalsFacetExecutor extends FacetExecutor {
|
||||||
|
|
||||||
public static final class ReaderAggregator {
|
public static final class ReaderAggregator {
|
||||||
|
|
||||||
|
private final long maxOrd;
|
||||||
|
|
||||||
final BytesValues.WithOrdinals values;
|
final BytesValues.WithOrdinals values;
|
||||||
final IntArray counts;
|
final IntArray counts;
|
||||||
|
|
||||||
long position = 0;
|
long position = 0;
|
||||||
BytesRef current;
|
BytesRef current;
|
||||||
int total;
|
int total;
|
||||||
private final long maxOrd;
|
|
||||||
|
|
||||||
public ReaderAggregator(BytesValues.WithOrdinals values, int ordinalsCacheLimit, CacheRecycler cacheRecycler) {
|
public ReaderAggregator(BytesValues.WithOrdinals values, int ordinalsCacheLimit, CacheRecycler cacheRecycler) {
|
||||||
this.values = values;
|
this.values = values;
|
||||||
|
@ -256,6 +257,11 @@ public class TermsStringOrdinalsFacetExecutor extends FacetExecutor {
|
||||||
total++;
|
total++;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
final void incrementMissing(int numMissing) {
|
||||||
|
counts.increment(0, numMissing);
|
||||||
|
total += numMissing;
|
||||||
|
}
|
||||||
|
|
||||||
public boolean nextPosition() {
|
public boolean nextPosition() {
|
||||||
if (++position >= maxOrd) {
|
if (++position >= maxOrd) {
|
||||||
return false;
|
return false;
|
||||||
|
@ -263,6 +269,10 @@ public class TermsStringOrdinalsFacetExecutor extends FacetExecutor {
|
||||||
current = values.getValueByOrd(position);
|
current = values.getValueByOrd(position);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public BytesRef copyCurrent() {
|
||||||
|
return values.copyShared();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class AggregatorPriorityQueue extends PriorityQueue<ReaderAggregator> {
|
public static class AggregatorPriorityQueue extends PriorityQueue<ReaderAggregator> {
|
||||||
|
|
|
@ -170,7 +170,7 @@ public class TermsStatsStringFacetExecutor extends FacetExecutor {
|
||||||
spare.reset(value, hashCode);
|
spare.reset(value, hashCode);
|
||||||
InternalTermsStatsStringFacet.StringEntry stringEntry = entries.get(spare);
|
InternalTermsStatsStringFacet.StringEntry stringEntry = entries.get(spare);
|
||||||
if (stringEntry == null) {
|
if (stringEntry == null) {
|
||||||
HashedBytesRef theValue = new HashedBytesRef(values.makeSafe(value), hashCode);
|
HashedBytesRef theValue = new HashedBytesRef(values.copyShared(), hashCode);
|
||||||
stringEntry = new InternalTermsStatsStringFacet.StringEntry(theValue, 0, 0, 0, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY);
|
stringEntry = new InternalTermsStatsStringFacet.StringEntry(theValue, 0, 0, 0, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY);
|
||||||
entries.put(theValue, stringEntry);
|
entries.put(theValue, stringEntry);
|
||||||
}
|
}
|
||||||
|
@ -210,7 +210,7 @@ public class TermsStatsStringFacetExecutor extends FacetExecutor {
|
||||||
spare.reset(value, hashCode);
|
spare.reset(value, hashCode);
|
||||||
InternalTermsStatsStringFacet.StringEntry stringEntry = entries.get(spare);
|
InternalTermsStatsStringFacet.StringEntry stringEntry = entries.get(spare);
|
||||||
if (stringEntry == null) {
|
if (stringEntry == null) {
|
||||||
HashedBytesRef theValue = new HashedBytesRef(values.makeSafe(value), hashCode);
|
HashedBytesRef theValue = new HashedBytesRef(values.copyShared(), hashCode);
|
||||||
stringEntry = new InternalTermsStatsStringFacet.StringEntry(theValue, 1, 0, 0, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY);
|
stringEntry = new InternalTermsStatsStringFacet.StringEntry(theValue, 1, 0, 0, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY);
|
||||||
entries.put(theValue, stringEntry);
|
entries.put(theValue, stringEntry);
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -23,6 +23,7 @@ import org.apache.lucene.index.AtomicReaderContext;
|
||||||
import org.apache.lucene.index.DirectoryReader;
|
import org.apache.lucene.index.DirectoryReader;
|
||||||
import org.apache.lucene.search.*;
|
import org.apache.lucene.search.*;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.lucene.HashedBytesRef;
|
import org.elasticsearch.common.lucene.HashedBytesRef;
|
||||||
import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource;
|
import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource;
|
||||||
import org.elasticsearch.index.fielddata.fieldcomparator.SortMode;
|
import org.elasticsearch.index.fielddata.fieldcomparator.SortMode;
|
||||||
|
@ -93,19 +94,9 @@ public abstract class AbstractFieldDataImplTests extends AbstractFieldDataTests
|
||||||
assertThat(bytesValues.getValue(1), equalTo(new BytesRef(one())));
|
assertThat(bytesValues.getValue(1), equalTo(new BytesRef(one())));
|
||||||
assertThat(bytesValues.getValue(2), equalTo(new BytesRef(three())));
|
assertThat(bytesValues.getValue(2), equalTo(new BytesRef(three())));
|
||||||
|
|
||||||
BytesRef bytesRef = new BytesRef();
|
assertValues(bytesValues, 0, two());
|
||||||
assertThat(bytesValues.getValueScratch(0, bytesRef), equalTo(new BytesRef(two())));
|
assertValues(bytesValues, 1, one());
|
||||||
assertThat(bytesRef, equalTo(new BytesRef(two())));
|
assertValues(bytesValues, 2, three());
|
||||||
assertThat(bytesValues.getValueScratch(1, bytesRef), equalTo(new BytesRef(one())));
|
|
||||||
assertThat(bytesRef, equalTo(new BytesRef(one())));
|
|
||||||
assertThat(bytesValues.getValueScratch(2, bytesRef), equalTo(new BytesRef(three())));
|
|
||||||
assertThat(bytesRef, equalTo(new BytesRef(three())));
|
|
||||||
|
|
||||||
|
|
||||||
BytesValues.Iter bytesValuesIter = bytesValues.getIter(0);
|
|
||||||
assertThat(bytesValuesIter.hasNext(), equalTo(true));
|
|
||||||
assertThat(bytesValuesIter.next(), equalTo(new BytesRef(two())));
|
|
||||||
assertThat(bytesValuesIter.hasNext(), equalTo(false));
|
|
||||||
|
|
||||||
BytesValues hashedBytesValues = fieldData.getBytesValues();
|
BytesValues hashedBytesValues = fieldData.getBytesValues();
|
||||||
|
|
||||||
|
@ -116,11 +107,9 @@ public abstract class AbstractFieldDataImplTests extends AbstractFieldDataTests
|
||||||
assertThat(convert(hashedBytesValues, 0), equalTo(new HashedBytesRef(two())));
|
assertThat(convert(hashedBytesValues, 0), equalTo(new HashedBytesRef(two())));
|
||||||
assertThat(convert(hashedBytesValues, 1), equalTo(new HashedBytesRef(one())));
|
assertThat(convert(hashedBytesValues, 1), equalTo(new HashedBytesRef(one())));
|
||||||
assertThat(convert(hashedBytesValues, 2), equalTo(new HashedBytesRef(three())));
|
assertThat(convert(hashedBytesValues, 2), equalTo(new HashedBytesRef(three())));
|
||||||
|
assertHashedValues(hashedBytesValues, 0, two());
|
||||||
BytesValues.Iter hashedBytesValuesIter = hashedBytesValues.getIter(0);
|
assertHashedValues(hashedBytesValues, 1, one());
|
||||||
assertThat(hashedBytesValuesIter.hasNext(), equalTo(true));
|
assertHashedValues(hashedBytesValues, 2, three());
|
||||||
assertThat(new HashedBytesRef(hashedBytesValuesIter.next(), hashedBytesValuesIter.hash()), equalTo(new HashedBytesRef(two())));
|
|
||||||
assertThat(hashedBytesValuesIter.hasNext(), equalTo(false));
|
|
||||||
|
|
||||||
IndexSearcher searcher = new IndexSearcher(readerContext.reader());
|
IndexSearcher searcher = new IndexSearcher(readerContext.reader());
|
||||||
TopFieldDocs topDocs;
|
TopFieldDocs topDocs;
|
||||||
|
@ -144,11 +133,47 @@ public abstract class AbstractFieldDataImplTests extends AbstractFieldDataTests
|
||||||
}
|
}
|
||||||
|
|
||||||
private HashedBytesRef convert(BytesValues values, int doc) {
|
private HashedBytesRef convert(BytesValues values, int doc) {
|
||||||
BytesRef ref = new BytesRef();
|
if (values.hasValue(doc)) {
|
||||||
return new HashedBytesRef(ref, values.getValueHashed(doc, ref));
|
return new HashedBytesRef(BytesRef.deepCopyOf(values.getValue(doc)), values.currentValueHash());
|
||||||
|
} else {
|
||||||
|
return new HashedBytesRef(new BytesRef());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
protected abstract void fillSingleValueWithMissing() throws Exception;
|
protected abstract void fillSingleValueWithMissing() throws Exception;
|
||||||
|
|
||||||
|
public void assertValues(BytesValues values, int docId, BytesRef... actualValues) {
|
||||||
|
assertThat(values.setDocument(docId), equalTo(actualValues.length));
|
||||||
|
for (int i = 0; i < actualValues.length; i++) {
|
||||||
|
assertThat(values.nextValue(), equalTo(actualValues[i]));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void assertValues(BytesValues values, int docId, String... actualValues) {
|
||||||
|
assertThat(values.setDocument(docId), equalTo(actualValues.length));
|
||||||
|
for (int i = 0; i < actualValues.length; i++) {
|
||||||
|
assertThat(values.nextValue(), equalTo(new BytesRef(actualValues[i])));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void assertHashedValues(BytesValues values, int docId, BytesRef... actualValues) {
|
||||||
|
assertThat(values.setDocument(docId), equalTo(actualValues.length));
|
||||||
|
BytesRef r = new BytesRef();
|
||||||
|
for (int i = 0; i < actualValues.length; i++) {
|
||||||
|
assertThat(values.nextValue(), equalTo(new HashedBytesRef(actualValues[i]).bytes));
|
||||||
|
assertThat(values.currentValueHash(), equalTo(new HashedBytesRef(actualValues[i]).hash));
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void assertHashedValues(BytesValues values, int docId, String... actualValues) {
|
||||||
|
assertThat(values.setDocument(docId), equalTo(actualValues.length));
|
||||||
|
for (int i = 0; i < actualValues.length; i++) {
|
||||||
|
assertThat(values.nextValue(), equalTo(new HashedBytesRef(actualValues[i]).bytes));
|
||||||
|
assertThat(values.currentValueHash(), equalTo(new HashedBytesRef(actualValues[i]).hash));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSingleValueWithMissing() throws Exception {
|
public void testSingleValueWithMissing() throws Exception {
|
||||||
|
@ -169,26 +194,13 @@ public abstract class AbstractFieldDataImplTests extends AbstractFieldDataTests
|
||||||
assertThat(bytesValues.hasValue(2), equalTo(true));
|
assertThat(bytesValues.hasValue(2), equalTo(true));
|
||||||
|
|
||||||
assertThat(bytesValues.getValue(0), equalTo(new BytesRef(two())));
|
assertThat(bytesValues.getValue(0), equalTo(new BytesRef(two())));
|
||||||
assertThat(bytesValues.getValue(1), nullValue());
|
assertThat(bytesValues.getValue(1), equalTo(new BytesRef()));
|
||||||
assertThat(bytesValues.getValue(2), equalTo(new BytesRef(three())));
|
assertThat(bytesValues.getValue(2), equalTo(new BytesRef(three())));
|
||||||
|
|
||||||
BytesRef bytesRef = new BytesRef();
|
assertValues(bytesValues, 0, two());
|
||||||
assertThat(bytesValues.getValueScratch(0, bytesRef), equalTo(new BytesRef(two())));
|
assertValues(bytesValues, 1, Strings.EMPTY_ARRAY);
|
||||||
assertThat(bytesRef, equalTo(new BytesRef(two())));
|
|
||||||
assertThat(bytesValues.getValueScratch(1, bytesRef), equalTo(new BytesRef()));
|
|
||||||
assertThat(bytesRef, equalTo(new BytesRef()));
|
|
||||||
assertThat(bytesValues.getValueScratch(2, bytesRef), equalTo(new BytesRef(three())));
|
|
||||||
assertThat(bytesRef, equalTo(new BytesRef(three())));
|
|
||||||
|
|
||||||
|
|
||||||
BytesValues.Iter bytesValuesIter = bytesValues.getIter(0);
|
|
||||||
assertThat(bytesValuesIter.hasNext(), equalTo(true));
|
|
||||||
assertThat(bytesValuesIter.next(), equalTo(new BytesRef(two())));
|
|
||||||
assertThat(bytesValuesIter.hasNext(), equalTo(false));
|
|
||||||
|
|
||||||
bytesValuesIter = bytesValues.getIter(1);
|
|
||||||
assertThat(bytesValuesIter.hasNext(), equalTo(false));
|
|
||||||
|
|
||||||
BytesValues hashedBytesValues = fieldData.getBytesValues();
|
BytesValues hashedBytesValues = fieldData.getBytesValues();
|
||||||
|
|
||||||
assertThat(hashedBytesValues.hasValue(0), equalTo(true));
|
assertThat(hashedBytesValues.hasValue(0), equalTo(true));
|
||||||
|
@ -199,13 +211,9 @@ public abstract class AbstractFieldDataImplTests extends AbstractFieldDataTests
|
||||||
assertThat(convert(hashedBytesValues, 1), equalTo(new HashedBytesRef(new BytesRef())));
|
assertThat(convert(hashedBytesValues, 1), equalTo(new HashedBytesRef(new BytesRef())));
|
||||||
assertThat(convert(hashedBytesValues, 2), equalTo(new HashedBytesRef(three())));
|
assertThat(convert(hashedBytesValues, 2), equalTo(new HashedBytesRef(three())));
|
||||||
|
|
||||||
BytesValues.Iter hashedBytesValuesIter = hashedBytesValues.getIter(0);
|
assertHashedValues(hashedBytesValues, 0, two());
|
||||||
assertThat(hashedBytesValuesIter.hasNext(), equalTo(true));
|
assertHashedValues(hashedBytesValues, 1, Strings.EMPTY_ARRAY);
|
||||||
assertThat(new HashedBytesRef(hashedBytesValuesIter.next(), hashedBytesValuesIter.hash()), equalTo(new HashedBytesRef(two())));
|
|
||||||
assertThat(hashedBytesValuesIter.hasNext(), equalTo(false));
|
|
||||||
|
|
||||||
hashedBytesValuesIter = hashedBytesValues.getIter(1);
|
|
||||||
assertThat(hashedBytesValuesIter.hasNext(), equalTo(false));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
protected abstract void fillMultiValueAllSet() throws Exception;
|
protected abstract void fillMultiValueAllSet() throws Exception;
|
||||||
|
@ -231,21 +239,7 @@ public abstract class AbstractFieldDataImplTests extends AbstractFieldDataTests
|
||||||
assertThat(bytesValues.getValue(1), equalTo(new BytesRef(one())));
|
assertThat(bytesValues.getValue(1), equalTo(new BytesRef(one())));
|
||||||
assertThat(bytesValues.getValue(2), equalTo(new BytesRef(three())));
|
assertThat(bytesValues.getValue(2), equalTo(new BytesRef(three())));
|
||||||
|
|
||||||
BytesRef bytesRef = new BytesRef();
|
assertValues(bytesValues, 0, two(), four());
|
||||||
assertThat(bytesValues.getValueScratch(0, bytesRef), equalTo(new BytesRef(two())));
|
|
||||||
assertThat(bytesRef, equalTo(new BytesRef(two())));
|
|
||||||
assertThat(bytesValues.getValueScratch(1, bytesRef), equalTo(new BytesRef(one())));
|
|
||||||
assertThat(bytesRef, equalTo(new BytesRef(one())));
|
|
||||||
assertThat(bytesValues.getValueScratch(2, bytesRef), equalTo(new BytesRef(three())));
|
|
||||||
assertThat(bytesRef, equalTo(new BytesRef(three())));
|
|
||||||
|
|
||||||
|
|
||||||
BytesValues.Iter bytesValuesIter = bytesValues.getIter(0);
|
|
||||||
assertThat(bytesValuesIter.hasNext(), equalTo(true));
|
|
||||||
assertThat(bytesValuesIter.next(), equalTo(new BytesRef(two())));
|
|
||||||
assertThat(bytesValuesIter.hasNext(), equalTo(true));
|
|
||||||
assertThat(bytesValuesIter.next(), equalTo(new BytesRef(four())));
|
|
||||||
assertThat(bytesValuesIter.hasNext(), equalTo(false));
|
|
||||||
|
|
||||||
BytesValues hashedBytesValues = fieldData.getBytesValues();
|
BytesValues hashedBytesValues = fieldData.getBytesValues();
|
||||||
|
|
||||||
|
@ -257,13 +251,8 @@ public abstract class AbstractFieldDataImplTests extends AbstractFieldDataTests
|
||||||
assertThat(convert(hashedBytesValues, 1), equalTo(new HashedBytesRef(one())));
|
assertThat(convert(hashedBytesValues, 1), equalTo(new HashedBytesRef(one())));
|
||||||
assertThat(convert(hashedBytesValues, 2), equalTo(new HashedBytesRef(three())));
|
assertThat(convert(hashedBytesValues, 2), equalTo(new HashedBytesRef(three())));
|
||||||
|
|
||||||
BytesValues.Iter hashedBytesValuesIter = hashedBytesValues.getIter(0);
|
assertHashedValues(hashedBytesValues, 0, two(), four());
|
||||||
assertThat(hashedBytesValuesIter.hasNext(), equalTo(true));
|
|
||||||
assertThat(new HashedBytesRef(hashedBytesValuesIter.next(), hashedBytesValuesIter.hash()), equalTo(new HashedBytesRef(two())));
|
|
||||||
assertThat(hashedBytesValuesIter.hasNext(), equalTo(true));
|
|
||||||
assertThat(new HashedBytesRef(hashedBytesValuesIter.next(), hashedBytesValuesIter.hash()), equalTo(new HashedBytesRef(four())));
|
|
||||||
assertThat(hashedBytesValuesIter.hasNext(), equalTo(false));
|
|
||||||
|
|
||||||
IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer, true));
|
IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer, true));
|
||||||
TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(new SortField("value", indexFieldData.comparatorSource(null, SortMode.MIN))));
|
TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(new SortField("value", indexFieldData.comparatorSource(null, SortMode.MIN))));
|
||||||
assertThat(topDocs.totalHits, equalTo(3));
|
assertThat(topDocs.totalHits, equalTo(3));
|
||||||
|
@ -300,27 +289,11 @@ public abstract class AbstractFieldDataImplTests extends AbstractFieldDataTests
|
||||||
assertThat(bytesValues.hasValue(2), equalTo(true));
|
assertThat(bytesValues.hasValue(2), equalTo(true));
|
||||||
|
|
||||||
assertThat(bytesValues.getValue(0), equalTo(new BytesRef(two())));
|
assertThat(bytesValues.getValue(0), equalTo(new BytesRef(two())));
|
||||||
assertThat(bytesValues.getValue(1), nullValue());
|
assertThat(bytesValues.getValue(1), equalTo(new BytesRef()));
|
||||||
assertThat(bytesValues.getValue(2), equalTo(new BytesRef(three())));
|
assertThat(bytesValues.getValue(2), equalTo(new BytesRef(three())));
|
||||||
|
|
||||||
BytesRef bytesRef = new BytesRef();
|
assertValues(bytesValues, 0, two(), four());
|
||||||
assertThat(bytesValues.getValueScratch(0, bytesRef), equalTo(new BytesRef(two())));
|
assertValues(bytesValues, 1, Strings.EMPTY_ARRAY);
|
||||||
assertThat(bytesRef, equalTo(new BytesRef(two())));
|
|
||||||
assertThat(bytesValues.getValueScratch(1, bytesRef), equalTo(new BytesRef()));
|
|
||||||
assertThat(bytesRef, equalTo(new BytesRef()));
|
|
||||||
assertThat(bytesValues.getValueScratch(2, bytesRef), equalTo(new BytesRef(three())));
|
|
||||||
assertThat(bytesRef, equalTo(new BytesRef(three())));
|
|
||||||
|
|
||||||
|
|
||||||
BytesValues.Iter bytesValuesIter = bytesValues.getIter(0);
|
|
||||||
assertThat(bytesValuesIter.hasNext(), equalTo(true));
|
|
||||||
assertThat(bytesValuesIter.next(), equalTo(new BytesRef(two())));
|
|
||||||
assertThat(bytesValuesIter.hasNext(), equalTo(true));
|
|
||||||
assertThat(bytesValuesIter.next(), equalTo(new BytesRef(four())));
|
|
||||||
assertThat(bytesValuesIter.hasNext(), equalTo(false));
|
|
||||||
|
|
||||||
bytesValuesIter = bytesValues.getIter(1);
|
|
||||||
assertThat(bytesValuesIter.hasNext(), equalTo(false));
|
|
||||||
|
|
||||||
BytesValues hashedBytesValues = fieldData.getBytesValues();
|
BytesValues hashedBytesValues = fieldData.getBytesValues();
|
||||||
|
|
||||||
|
@ -332,15 +305,10 @@ public abstract class AbstractFieldDataImplTests extends AbstractFieldDataTests
|
||||||
assertThat(convert(hashedBytesValues, 1), equalTo(new HashedBytesRef(new BytesRef())));
|
assertThat(convert(hashedBytesValues, 1), equalTo(new HashedBytesRef(new BytesRef())));
|
||||||
assertThat(convert(hashedBytesValues, 2), equalTo(new HashedBytesRef(three())));
|
assertThat(convert(hashedBytesValues, 2), equalTo(new HashedBytesRef(three())));
|
||||||
|
|
||||||
BytesValues.Iter hashedBytesValuesIter = hashedBytesValues.getIter(0);
|
assertHashedValues(bytesValues, 0, two(), four());
|
||||||
assertThat(hashedBytesValuesIter.hasNext(), equalTo(true));
|
assertHashedValues(bytesValues, 1, Strings.EMPTY_ARRAY);
|
||||||
assertThat(new HashedBytesRef(hashedBytesValuesIter.next(), hashedBytesValuesIter.hash()), equalTo(new HashedBytesRef(two())));
|
assertHashedValues(hashedBytesValues, 0, two(), four());
|
||||||
assertThat(hashedBytesValuesIter.hasNext(), equalTo(true));
|
assertHashedValues(hashedBytesValues, 1, Strings.EMPTY_ARRAY);
|
||||||
assertThat(new HashedBytesRef(hashedBytesValuesIter.next(), hashedBytesValuesIter.hash()), equalTo(new HashedBytesRef(four())));
|
|
||||||
assertThat(hashedBytesValuesIter.hasNext(), equalTo(false));
|
|
||||||
|
|
||||||
hashedBytesValuesIter = hashedBytesValues.getIter(1);
|
|
||||||
assertThat(hashedBytesValuesIter.hasNext(), equalTo(false));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testMissingValueForAll() throws Exception {
|
public void testMissingValueForAll() throws Exception {
|
||||||
|
@ -360,45 +328,25 @@ public abstract class AbstractFieldDataImplTests extends AbstractFieldDataTests
|
||||||
assertThat(bytesValues.hasValue(1), equalTo(false));
|
assertThat(bytesValues.hasValue(1), equalTo(false));
|
||||||
assertThat(bytesValues.hasValue(2), equalTo(false));
|
assertThat(bytesValues.hasValue(2), equalTo(false));
|
||||||
|
|
||||||
assertThat(bytesValues.getValue(0), nullValue());
|
assertThat(bytesValues.getValue(0), equalTo(new BytesRef()));
|
||||||
assertThat(bytesValues.getValue(1), nullValue());
|
assertThat(bytesValues.getValue(1), equalTo(new BytesRef()));
|
||||||
assertThat(bytesValues.getValue(2), nullValue());
|
assertThat(bytesValues.getValue(2), equalTo(new BytesRef()));
|
||||||
|
|
||||||
BytesRef bytesRef = new BytesRef();
|
|
||||||
assertThat(bytesValues.getValueScratch(0, bytesRef), equalTo(new BytesRef()));
|
|
||||||
assertThat(bytesRef, equalTo(new BytesRef()));
|
|
||||||
assertThat(bytesValues.getValueScratch(1, bytesRef), equalTo(new BytesRef()));
|
|
||||||
assertThat(bytesRef, equalTo(new BytesRef()));
|
|
||||||
assertThat(bytesValues.getValueScratch(2, bytesRef), equalTo(new BytesRef()));
|
|
||||||
assertThat(bytesRef, equalTo(new BytesRef()));
|
|
||||||
|
|
||||||
BytesValues.Iter bytesValuesIter = bytesValues.getIter(0);
|
|
||||||
assertThat(bytesValuesIter.hasNext(), equalTo(false));
|
|
||||||
|
|
||||||
bytesValuesIter = bytesValues.getIter(1);
|
|
||||||
assertThat(bytesValuesIter.hasNext(), equalTo(false));
|
|
||||||
|
|
||||||
bytesValuesIter = bytesValues.getIter(2);
|
|
||||||
assertThat(bytesValuesIter.hasNext(), equalTo(false));
|
|
||||||
|
|
||||||
|
assertValues(bytesValues, 0, Strings.EMPTY_ARRAY);
|
||||||
|
assertValues(bytesValues, 1, Strings.EMPTY_ARRAY);
|
||||||
|
assertValues(bytesValues, 2, Strings.EMPTY_ARRAY);
|
||||||
BytesValues hashedBytesValues = fieldData.getBytesValues();
|
BytesValues hashedBytesValues = fieldData.getBytesValues();
|
||||||
|
|
||||||
assertThat(hashedBytesValues.hasValue(0), equalTo(false));
|
assertThat(hashedBytesValues.hasValue(0), equalTo(false));
|
||||||
assertThat(hashedBytesValues.hasValue(1), equalTo(false));
|
assertThat(hashedBytesValues.hasValue(1), equalTo(false));
|
||||||
assertThat(hashedBytesValues.hasValue(2), equalTo(false));
|
assertThat(hashedBytesValues.hasValue(2), equalTo(false));
|
||||||
|
|
||||||
assertThat(hashedBytesValues.getValue(0), nullValue());
|
assertThat(hashedBytesValues.getValue(0), equalTo(new BytesRef()));
|
||||||
assertThat(hashedBytesValues.getValue(1), nullValue());
|
assertThat(hashedBytesValues.getValue(1), equalTo(new BytesRef()));
|
||||||
assertThat(hashedBytesValues.getValue(2), nullValue());
|
assertThat(hashedBytesValues.getValue(2), equalTo(new BytesRef()));
|
||||||
|
assertValues(hashedBytesValues, 0, Strings.EMPTY_ARRAY);
|
||||||
BytesValues.Iter hashedBytesValuesIter = hashedBytesValues.getIter(0);
|
assertValues(hashedBytesValues, 1, Strings.EMPTY_ARRAY);
|
||||||
assertThat(hashedBytesValuesIter.hasNext(), equalTo(false));
|
assertValues(hashedBytesValues, 2, Strings.EMPTY_ARRAY);
|
||||||
|
|
||||||
hashedBytesValuesIter = hashedBytesValues.getIter(1);
|
|
||||||
assertThat(hashedBytesValuesIter.hasNext(), equalTo(false));
|
|
||||||
|
|
||||||
hashedBytesValuesIter = hashedBytesValues.getIter(2);
|
|
||||||
assertThat(hashedBytesValuesIter.hasNext(), equalTo(false));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
protected abstract void fillAllMissing() throws Exception;
|
protected abstract void fillAllMissing() throws Exception;
|
||||||
|
|
|
@ -58,21 +58,14 @@ public abstract class AbstractNumericFieldDataTests extends AbstractFieldDataImp
|
||||||
assertThat(longValues.getValueMissing(0, -1), equalTo(2l));
|
assertThat(longValues.getValueMissing(0, -1), equalTo(2l));
|
||||||
assertThat(longValues.getValueMissing(1, -1), equalTo(1l));
|
assertThat(longValues.getValueMissing(1, -1), equalTo(1l));
|
||||||
assertThat(longValues.getValueMissing(2, -1), equalTo(3l));
|
assertThat(longValues.getValueMissing(2, -1), equalTo(3l));
|
||||||
|
assertThat(longValues.setDocument(0), equalTo(1));
|
||||||
|
assertThat(longValues.nextValue(), equalTo(2l));
|
||||||
|
|
||||||
LongValues.Iter longValuesIter = longValues.getIter(0);
|
assertThat(longValues.setDocument(1), equalTo(1));
|
||||||
assertThat(longValuesIter.hasNext(), equalTo(true));
|
assertThat(longValues.nextValue(), equalTo(1l));
|
||||||
assertThat(longValuesIter.next(), equalTo(2l));
|
|
||||||
assertThat(longValuesIter.hasNext(), equalTo(false));
|
|
||||||
|
|
||||||
longValuesIter = longValues.getIter(1);
|
assertThat(longValues.setDocument(2), equalTo(1));
|
||||||
assertThat(longValuesIter.hasNext(), equalTo(true));
|
assertThat(longValues.nextValue(), equalTo(3l));
|
||||||
assertThat(longValuesIter.next(), equalTo(1l));
|
|
||||||
assertThat(longValuesIter.hasNext(), equalTo(false));
|
|
||||||
|
|
||||||
longValuesIter = longValues.getIter(2);
|
|
||||||
assertThat(longValuesIter.hasNext(), equalTo(true));
|
|
||||||
assertThat(longValuesIter.next(), equalTo(3l));
|
|
||||||
assertThat(longValuesIter.hasNext(), equalTo(false));
|
|
||||||
|
|
||||||
DoubleValues doubleValues = fieldData.getDoubleValues();
|
DoubleValues doubleValues = fieldData.getDoubleValues();
|
||||||
|
|
||||||
|
@ -90,20 +83,14 @@ public abstract class AbstractNumericFieldDataTests extends AbstractFieldDataImp
|
||||||
assertThat(doubleValues.getValueMissing(1, -1), equalTo(1d));
|
assertThat(doubleValues.getValueMissing(1, -1), equalTo(1d));
|
||||||
assertThat(doubleValues.getValueMissing(2, -1), equalTo(3d));
|
assertThat(doubleValues.getValueMissing(2, -1), equalTo(3d));
|
||||||
|
|
||||||
DoubleValues.Iter doubleValuesIter = doubleValues.getIter(0);
|
assertThat(1, equalTo(doubleValues.setDocument(0)));
|
||||||
assertThat(doubleValuesIter.hasNext(), equalTo(true));
|
assertThat(doubleValues.nextValue(), equalTo(2d));
|
||||||
assertThat(doubleValuesIter.next(), equalTo(2d));
|
|
||||||
assertThat(doubleValuesIter.hasNext(), equalTo(false));
|
|
||||||
|
|
||||||
doubleValuesIter = doubleValues.getIter(1);
|
assertThat(1, equalTo(doubleValues.setDocument(1)));
|
||||||
assertThat(doubleValuesIter.hasNext(), equalTo(true));
|
assertThat(doubleValues.nextValue(), equalTo(1d));
|
||||||
assertThat(doubleValuesIter.next(), equalTo(1d));
|
|
||||||
assertThat(doubleValuesIter.hasNext(), equalTo(false));
|
|
||||||
|
|
||||||
doubleValuesIter = doubleValues.getIter(2);
|
assertThat(1, equalTo(doubleValues.setDocument(2)));
|
||||||
assertThat(doubleValuesIter.hasNext(), equalTo(true));
|
assertThat(doubleValues.nextValue(), equalTo(3d));
|
||||||
assertThat(doubleValuesIter.next(), equalTo(3d));
|
|
||||||
assertThat(doubleValuesIter.hasNext(), equalTo(false));
|
|
||||||
|
|
||||||
IndexSearcher searcher = new IndexSearcher(readerContext.reader());
|
IndexSearcher searcher = new IndexSearcher(readerContext.reader());
|
||||||
TopFieldDocs topDocs;
|
TopFieldDocs topDocs;
|
||||||
|
@ -145,19 +132,14 @@ public abstract class AbstractNumericFieldDataTests extends AbstractFieldDataImp
|
||||||
assertThat(longValues.getValueMissing(0, -1), equalTo(2l));
|
assertThat(longValues.getValueMissing(0, -1), equalTo(2l));
|
||||||
assertThat(longValues.getValueMissing(1, -1), equalTo(-1l));
|
assertThat(longValues.getValueMissing(1, -1), equalTo(-1l));
|
||||||
assertThat(longValues.getValueMissing(2, -1), equalTo(3l));
|
assertThat(longValues.getValueMissing(2, -1), equalTo(3l));
|
||||||
|
|
||||||
|
assertThat(longValues.setDocument(0), equalTo(1));
|
||||||
|
assertThat(longValues.nextValue(), equalTo(2l));
|
||||||
|
|
||||||
LongValues.Iter longValuesIter = longValues.getIter(0);
|
assertThat(longValues.setDocument(1), equalTo(0));
|
||||||
assertThat(longValuesIter.hasNext(), equalTo(true));
|
|
||||||
assertThat(longValuesIter.next(), equalTo(2l));
|
assertThat(longValues.setDocument(2), equalTo(1));
|
||||||
assertThat(longValuesIter.hasNext(), equalTo(false));
|
assertThat(longValues.nextValue(), equalTo(3l));
|
||||||
|
|
||||||
longValuesIter = longValues.getIter(1);
|
|
||||||
assertThat(longValuesIter.hasNext(), equalTo(false));
|
|
||||||
|
|
||||||
longValuesIter = longValues.getIter(2);
|
|
||||||
assertThat(longValuesIter.hasNext(), equalTo(true));
|
|
||||||
assertThat(longValuesIter.next(), equalTo(3l));
|
|
||||||
assertThat(longValuesIter.hasNext(), equalTo(false));
|
|
||||||
|
|
||||||
DoubleValues doubleValues = fieldData.getDoubleValues();
|
DoubleValues doubleValues = fieldData.getDoubleValues();
|
||||||
|
|
||||||
|
@ -174,19 +156,14 @@ public abstract class AbstractNumericFieldDataTests extends AbstractFieldDataImp
|
||||||
assertThat(doubleValues.getValueMissing(1, -1), equalTo(-1d));
|
assertThat(doubleValues.getValueMissing(1, -1), equalTo(-1d));
|
||||||
assertThat(doubleValues.getValueMissing(2, -1), equalTo(3d));
|
assertThat(doubleValues.getValueMissing(2, -1), equalTo(3d));
|
||||||
|
|
||||||
DoubleValues.Iter doubleValuesIter = doubleValues.getIter(0);
|
assertThat(1, equalTo(doubleValues.setDocument(0)));
|
||||||
assertThat(doubleValuesIter.hasNext(), equalTo(true));
|
assertThat(doubleValues.nextValue(), equalTo(2d));
|
||||||
assertThat(doubleValuesIter.next(), equalTo(2d));
|
|
||||||
assertThat(doubleValuesIter.hasNext(), equalTo(false));
|
|
||||||
|
|
||||||
doubleValuesIter = doubleValues.getIter(1);
|
assertThat(0, equalTo(doubleValues.setDocument(1)));
|
||||||
assertThat(doubleValuesIter.hasNext(), equalTo(false));
|
|
||||||
|
|
||||||
doubleValuesIter = doubleValues.getIter(2);
|
|
||||||
assertThat(doubleValuesIter.hasNext(), equalTo(true));
|
|
||||||
assertThat(doubleValuesIter.next(), equalTo(3d));
|
|
||||||
assertThat(doubleValuesIter.hasNext(), equalTo(false));
|
|
||||||
|
|
||||||
|
assertThat(1, equalTo(doubleValues.setDocument(2)));
|
||||||
|
assertThat(doubleValues.nextValue(), equalTo(3d));
|
||||||
|
|
||||||
IndexSearcher searcher = new IndexSearcher(readerContext.reader());
|
IndexSearcher searcher = new IndexSearcher(readerContext.reader());
|
||||||
TopFieldDocs topDocs;
|
TopFieldDocs topDocs;
|
||||||
|
|
||||||
|
@ -257,22 +234,15 @@ public abstract class AbstractNumericFieldDataTests extends AbstractFieldDataImp
|
||||||
assertThat(longValues.getValueMissing(1, -1), equalTo(1l));
|
assertThat(longValues.getValueMissing(1, -1), equalTo(1l));
|
||||||
assertThat(longValues.getValueMissing(2, -1), equalTo(3l));
|
assertThat(longValues.getValueMissing(2, -1), equalTo(3l));
|
||||||
|
|
||||||
LongValues.Iter longValuesIter = longValues.getIter(0);
|
assertThat(longValues.setDocument(0), equalTo(2));
|
||||||
assertThat(longValuesIter.hasNext(), equalTo(true));
|
assertThat(longValues.nextValue(), equalTo(2l));
|
||||||
assertThat(longValuesIter.next(), equalTo(2l));
|
assertThat(longValues.nextValue(), equalTo(4l));
|
||||||
assertThat(longValuesIter.hasNext(), equalTo(true));
|
|
||||||
assertThat(longValuesIter.next(), equalTo(4l));
|
|
||||||
assertThat(longValuesIter.hasNext(), equalTo(false));
|
|
||||||
|
|
||||||
longValuesIter = longValues.getIter(1);
|
assertThat(longValues.setDocument(1), equalTo(1));
|
||||||
assertThat(longValuesIter.hasNext(), equalTo(true));
|
assertThat(longValues.nextValue(), equalTo(1l));
|
||||||
assertThat(longValuesIter.next(), equalTo(1l));
|
|
||||||
assertThat(longValuesIter.hasNext(), equalTo(false));
|
|
||||||
|
|
||||||
longValuesIter = longValues.getIter(2);
|
assertThat(longValues.setDocument(2), equalTo(1));
|
||||||
assertThat(longValuesIter.hasNext(), equalTo(true));
|
assertThat(longValues.nextValue(), equalTo(3l));
|
||||||
assertThat(longValuesIter.next(), equalTo(3l));
|
|
||||||
assertThat(longValuesIter.hasNext(), equalTo(false));
|
|
||||||
|
|
||||||
DoubleValues doubleValues = fieldData.getDoubleValues();
|
DoubleValues doubleValues = fieldData.getDoubleValues();
|
||||||
|
|
||||||
|
@ -290,22 +260,15 @@ public abstract class AbstractNumericFieldDataTests extends AbstractFieldDataImp
|
||||||
assertThat(doubleValues.getValueMissing(1, -1), equalTo(1d));
|
assertThat(doubleValues.getValueMissing(1, -1), equalTo(1d));
|
||||||
assertThat(doubleValues.getValueMissing(2, -1), equalTo(3d));
|
assertThat(doubleValues.getValueMissing(2, -1), equalTo(3d));
|
||||||
|
|
||||||
DoubleValues.Iter doubleValuesIter = doubleValues.getIter(0);
|
assertThat(2, equalTo(doubleValues.setDocument(0)));
|
||||||
assertThat(doubleValuesIter.hasNext(), equalTo(true));
|
assertThat(doubleValues.nextValue(), equalTo(2d));
|
||||||
assertThat(doubleValuesIter.next(), equalTo(2d));
|
assertThat(doubleValues.nextValue(), equalTo(4d));
|
||||||
assertThat(doubleValuesIter.hasNext(), equalTo(true));
|
|
||||||
assertThat(doubleValuesIter.next(), equalTo(4d));
|
|
||||||
assertThat(doubleValuesIter.hasNext(), equalTo(false));
|
|
||||||
|
|
||||||
doubleValuesIter = doubleValues.getIter(1);
|
assertThat(1, equalTo(doubleValues.setDocument(1)));
|
||||||
assertThat(doubleValuesIter.hasNext(), equalTo(true));
|
assertThat(doubleValues.nextValue(), equalTo(1d));
|
||||||
assertThat(doubleValuesIter.next(), equalTo(1d));
|
|
||||||
assertThat(doubleValuesIter.hasNext(), equalTo(false));
|
|
||||||
|
|
||||||
doubleValuesIter = doubleValues.getIter(2);
|
assertThat(1, equalTo(doubleValues.setDocument(2)));
|
||||||
assertThat(doubleValuesIter.hasNext(), equalTo(true));
|
assertThat(doubleValues.nextValue(), equalTo(3d));
|
||||||
assertThat(doubleValuesIter.next(), equalTo(3d));
|
|
||||||
assertThat(doubleValuesIter.hasNext(), equalTo(false));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -331,20 +294,14 @@ public abstract class AbstractNumericFieldDataTests extends AbstractFieldDataImp
|
||||||
assertThat(longValues.getValueMissing(1, -1), equalTo(-1l));
|
assertThat(longValues.getValueMissing(1, -1), equalTo(-1l));
|
||||||
assertThat(longValues.getValueMissing(2, -1), equalTo(3l));
|
assertThat(longValues.getValueMissing(2, -1), equalTo(3l));
|
||||||
|
|
||||||
LongValues.Iter longValuesIter = longValues.getIter(0);
|
assertThat(longValues.setDocument(0), equalTo(2));
|
||||||
assertThat(longValuesIter.hasNext(), equalTo(true));
|
assertThat(longValues.nextValue(), equalTo(2l));
|
||||||
assertThat(longValuesIter.next(), equalTo(2l));
|
assertThat(longValues.nextValue(), equalTo(4l));
|
||||||
assertThat(longValuesIter.hasNext(), equalTo(true));
|
|
||||||
assertThat(longValuesIter.next(), equalTo(4l));
|
|
||||||
assertThat(longValuesIter.hasNext(), equalTo(false));
|
|
||||||
|
|
||||||
longValuesIter = longValues.getIter(1);
|
assertThat(longValues.setDocument(1), equalTo(0));
|
||||||
assertThat(longValuesIter.hasNext(), equalTo(false));
|
|
||||||
|
|
||||||
longValuesIter = longValues.getIter(2);
|
assertThat(longValues.setDocument(2), equalTo(1));
|
||||||
assertThat(longValuesIter.hasNext(), equalTo(true));
|
assertThat(longValues.nextValue(), equalTo(3l));
|
||||||
assertThat(longValuesIter.next(), equalTo(3l));
|
|
||||||
assertThat(longValuesIter.hasNext(), equalTo(false));
|
|
||||||
|
|
||||||
DoubleValues doubleValues = fieldData.getDoubleValues();
|
DoubleValues doubleValues = fieldData.getDoubleValues();
|
||||||
|
|
||||||
|
@ -361,20 +318,15 @@ public abstract class AbstractNumericFieldDataTests extends AbstractFieldDataImp
|
||||||
assertThat(doubleValues.getValueMissing(1, -1), equalTo(-1d));
|
assertThat(doubleValues.getValueMissing(1, -1), equalTo(-1d));
|
||||||
assertThat(doubleValues.getValueMissing(2, -1), equalTo(3d));
|
assertThat(doubleValues.getValueMissing(2, -1), equalTo(3d));
|
||||||
|
|
||||||
DoubleValues.Iter doubleValuesIter = doubleValues.getIter(0);
|
assertThat(2, equalTo(doubleValues.setDocument(0)));
|
||||||
assertThat(doubleValuesIter.hasNext(), equalTo(true));
|
assertThat(doubleValues.nextValue(), equalTo(2d));
|
||||||
assertThat(doubleValuesIter.next(), equalTo(2d));
|
assertThat(doubleValues.nextValue(), equalTo(4d));
|
||||||
assertThat(doubleValuesIter.hasNext(), equalTo(true));
|
|
||||||
assertThat(doubleValuesIter.next(), equalTo(4d));
|
|
||||||
assertThat(doubleValuesIter.hasNext(), equalTo(false));
|
|
||||||
|
|
||||||
doubleValuesIter = doubleValues.getIter(1);
|
assertThat(0, equalTo(doubleValues.setDocument(1)));
|
||||||
assertThat(doubleValuesIter.hasNext(), equalTo(false));
|
|
||||||
|
|
||||||
doubleValuesIter = doubleValues.getIter(2);
|
assertThat(1, equalTo(doubleValues.setDocument(2)));
|
||||||
assertThat(doubleValuesIter.hasNext(), equalTo(true));
|
assertThat(doubleValues.nextValue(), equalTo(3d));
|
||||||
assertThat(doubleValuesIter.next(), equalTo(3d));
|
|
||||||
assertThat(doubleValuesIter.hasNext(), equalTo(false));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -399,14 +351,10 @@ public abstract class AbstractNumericFieldDataTests extends AbstractFieldDataImp
|
||||||
assertThat(longValues.getValueMissing(1, -1), equalTo(-1l));
|
assertThat(longValues.getValueMissing(1, -1), equalTo(-1l));
|
||||||
assertThat(longValues.getValueMissing(2, -1), equalTo(-1l));
|
assertThat(longValues.getValueMissing(2, -1), equalTo(-1l));
|
||||||
|
|
||||||
LongValues.Iter longValuesIter = longValues.getIter(0);
|
|
||||||
assertThat(longValuesIter.hasNext(), equalTo(false));
|
assertThat(longValues.setDocument(0), equalTo(0));
|
||||||
|
assertThat(longValues.setDocument(1), equalTo(0));
|
||||||
longValuesIter = longValues.getIter(1);
|
assertThat(longValues.setDocument(2), equalTo(0));
|
||||||
assertThat(longValuesIter.hasNext(), equalTo(false));
|
|
||||||
|
|
||||||
longValuesIter = longValues.getIter(2);
|
|
||||||
assertThat(longValuesIter.hasNext(), equalTo(false));
|
|
||||||
|
|
||||||
// double values
|
// double values
|
||||||
|
|
||||||
|
@ -422,15 +370,13 @@ public abstract class AbstractNumericFieldDataTests extends AbstractFieldDataImp
|
||||||
assertThat(doubleValues.getValueMissing(1, -1), equalTo(-1d));
|
assertThat(doubleValues.getValueMissing(1, -1), equalTo(-1d));
|
||||||
assertThat(doubleValues.getValueMissing(2, -1), equalTo(-1d));
|
assertThat(doubleValues.getValueMissing(2, -1), equalTo(-1d));
|
||||||
|
|
||||||
DoubleValues.Iter doubleValuesIter = doubleValues.getIter(0);
|
assertThat(0, equalTo(doubleValues.setDocument(0)));
|
||||||
assertThat(doubleValuesIter.hasNext(), equalTo(false));
|
|
||||||
|
|
||||||
doubleValuesIter = doubleValues.getIter(1);
|
assertThat(0, equalTo(doubleValues.setDocument(1)));
|
||||||
assertThat(doubleValuesIter.hasNext(), equalTo(false));
|
|
||||||
|
assertThat(0, equalTo(doubleValues.setDocument(2)));
|
||||||
|
}
|
||||||
|
|
||||||
doubleValuesIter = doubleValues.getIter(2);
|
|
||||||
assertThat(doubleValuesIter.hasNext(), equalTo(false));
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void fillAllMissing() throws Exception {
|
protected void fillAllMissing() throws Exception {
|
||||||
Document d = new Document();
|
Document d = new Document();
|
||||||
|
|
|
@ -27,15 +27,14 @@ import org.apache.lucene.util.English;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.NumericUtils;
|
import org.apache.lucene.util.NumericUtils;
|
||||||
import org.elasticsearch.common.settings.ImmutableSettings;
|
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||||
import org.elasticsearch.index.fielddata.BytesValues.Iter;
|
|
||||||
import org.elasticsearch.index.mapper.FieldMapper;
|
import org.elasticsearch.index.mapper.FieldMapper;
|
||||||
|
import org.hamcrest.Matchers;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
import java.util.Map.Entry;
|
import java.util.Map.Entry;
|
||||||
|
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
import static org.hamcrest.Matchers.nullValue;
|
|
||||||
|
|
||||||
public class DuelFieldDataTests extends AbstractFieldDataTests {
|
public class DuelFieldDataTests extends AbstractFieldDataTests {
|
||||||
|
|
||||||
|
@ -349,50 +348,55 @@ public class DuelFieldDataTests extends AbstractFieldDataTests {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
private static void duelFieldDataBytes(Random random, AtomicReaderContext context, IndexFieldData left, IndexFieldData right, Preprocessor pre) throws Exception {
|
private static void duelFieldDataBytes(Random random, AtomicReaderContext context, IndexFieldData<?> left, IndexFieldData<?> right, Preprocessor pre) throws Exception {
|
||||||
AtomicFieldData leftData = random.nextBoolean() ? left.load(context) : left.loadDirect(context);
|
AtomicFieldData<?> leftData = random.nextBoolean() ? left.load(context) : left.loadDirect(context);
|
||||||
AtomicFieldData rightData = random.nextBoolean() ? right.load(context) : right.loadDirect(context);
|
AtomicFieldData<?> rightData = random.nextBoolean() ? right.load(context) : right.loadDirect(context);
|
||||||
assertThat(leftData.getNumDocs(), equalTo(rightData.getNumDocs()));
|
assertThat(leftData.getNumDocs(), equalTo(rightData.getNumDocs()));
|
||||||
|
|
||||||
int numDocs = leftData.getNumDocs();
|
int numDocs = leftData.getNumDocs();
|
||||||
BytesValues leftBytesValues = random.nextBoolean() ? leftData.getBytesValues() : leftData.getHashedBytesValues();
|
BytesValues leftBytesValues = random.nextBoolean() ? leftData.getBytesValues() : leftData.getHashedBytesValues();
|
||||||
BytesValues rightBytesValues = random.nextBoolean() ? rightData.getBytesValues() : rightData.getHashedBytesValues();
|
BytesValues rightBytesValues = random.nextBoolean() ? rightData.getBytesValues() : rightData.getHashedBytesValues();
|
||||||
|
BytesRef leftSpare = new BytesRef();
|
||||||
|
BytesRef rightSpare = new BytesRef();
|
||||||
for (int i = 0; i < numDocs; i++) {
|
for (int i = 0; i < numDocs; i++) {
|
||||||
assertThat(leftBytesValues.hasValue(i), equalTo(rightBytesValues.hasValue(i)));
|
assertThat(leftBytesValues.hasValue(i), equalTo(rightBytesValues.hasValue(i)));
|
||||||
if (leftBytesValues.hasValue(i)) {
|
if (leftBytesValues.hasValue(i)) {
|
||||||
assertThat(pre.toString(leftBytesValues.getValue(i)), equalTo(pre.toString(rightBytesValues.getValue(i))));
|
assertThat(pre.toString(leftBytesValues.getValue(i)), equalTo(pre.toString(rightBytesValues.getValue(i))));
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
assertThat(leftBytesValues.getValue(i), nullValue());
|
assertThat(leftBytesValues.getValue(i), equalTo(new BytesRef()));
|
||||||
assertThat(rightBytesValues.getValue(i), nullValue());
|
assertThat(rightBytesValues.getValue(i), equalTo(new BytesRef()));
|
||||||
}
|
}
|
||||||
|
|
||||||
boolean hasValue = leftBytesValues.hasValue(i);
|
|
||||||
Iter leftIter = leftBytesValues.getIter(i);
|
int numValues = 0;
|
||||||
Iter rightIter = rightBytesValues.getIter(i);
|
if (leftBytesValues.hasValue(i)) {
|
||||||
assertThat(leftIter.hasNext(), equalTo(rightIter.hasNext()));
|
assertThat(rightBytesValues.hasValue(i), equalTo(true));
|
||||||
assertThat(leftIter.hasNext(), equalTo(hasValue));
|
assertThat(leftBytesValues.setDocument(i), Matchers.greaterThanOrEqualTo(1));
|
||||||
|
assertThat(rightBytesValues.setDocument(i), Matchers.greaterThanOrEqualTo(1));
|
||||||
while (leftIter.hasNext()) {
|
} else {
|
||||||
assertThat(hasValue, equalTo(true));
|
assertThat(rightBytesValues.hasValue(i), equalTo(false));
|
||||||
assertThat(leftIter.hasNext(), equalTo(rightIter.hasNext()));
|
assertThat(leftBytesValues.setDocument(i), equalTo(0));
|
||||||
BytesRef rightBytes = rightIter.next();
|
assertThat(rightBytesValues.setDocument(i), equalTo(0));
|
||||||
BytesRef leftBytes = leftIter.next();
|
}
|
||||||
|
|
||||||
assertThat(pre.toString(leftBytes), equalTo(pre.toString(rightBytes)));
|
assertThat((numValues = leftBytesValues.setDocument(i)), equalTo(rightBytesValues.setDocument(i)));
|
||||||
if (rightBytes.equals(leftBytes)) {
|
for (int j = 0; j < numValues; j++) {
|
||||||
assertThat(leftIter.hash(), equalTo(rightIter.hash()));// call twice
|
rightSpare.copyBytes(rightBytesValues.nextValue());
|
||||||
assertThat(leftIter.hash(), equalTo(rightIter.hash()));
|
leftSpare.copyBytes(leftBytesValues.nextValue());
|
||||||
assertThat(leftIter.hash(), equalTo(rightBytes.hashCode()));
|
assertThat(rightSpare.hashCode(), equalTo(rightBytesValues.currentValueHash()));
|
||||||
assertThat(rightIter.hash(), equalTo(leftBytes.hashCode()));
|
assertThat(leftSpare.hashCode(), equalTo(leftBytesValues.currentValueHash()));
|
||||||
|
pre.toString(rightSpare);
|
||||||
|
pre.toString(leftSpare);
|
||||||
|
assertThat(pre.toString(leftSpare), equalTo(pre.toString(rightSpare)));
|
||||||
|
if (leftSpare.equals(rightSpare)) {
|
||||||
|
assertThat(leftBytesValues.currentValueHash(), equalTo(rightBytesValues.currentValueHash()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
assertThat(leftIter.hasNext(), equalTo(rightIter.hasNext()));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
private static void duelFieldDataDouble(Random random, AtomicReaderContext context, IndexNumericFieldData left, IndexNumericFieldData right) throws Exception {
|
private static void duelFieldDataDouble(Random random, AtomicReaderContext context, IndexNumericFieldData<?> left, IndexNumericFieldData<?> right) throws Exception {
|
||||||
AtomicNumericFieldData leftData = random.nextBoolean() ? left.load(context) : left.loadDirect(context);
|
AtomicNumericFieldData leftData = random.nextBoolean() ? left.load(context) : left.loadDirect(context);
|
||||||
AtomicNumericFieldData rightData = random.nextBoolean() ? right.load(context) : right.loadDirect(context);
|
AtomicNumericFieldData rightData = random.nextBoolean() ? right.load(context) : right.loadDirect(context);
|
||||||
|
|
||||||
|
@ -405,27 +409,16 @@ public class DuelFieldDataTests extends AbstractFieldDataTests {
|
||||||
assertThat(leftDoubleValues.hasValue(i), equalTo(rightDoubleValues.hasValue(i)));
|
assertThat(leftDoubleValues.hasValue(i), equalTo(rightDoubleValues.hasValue(i)));
|
||||||
if (leftDoubleValues.hasValue(i)) {
|
if (leftDoubleValues.hasValue(i)) {
|
||||||
assertThat(leftDoubleValues.getValue(i), equalTo(rightDoubleValues.getValue(i)));
|
assertThat(leftDoubleValues.getValue(i), equalTo(rightDoubleValues.getValue(i)));
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
assertThat(leftDoubleValues.getValue(i), equalTo(0d));
|
assertThat(leftDoubleValues.getValue(i), equalTo(0d));
|
||||||
assertThat(rightDoubleValues.getValue(i), equalTo(0d));
|
assertThat(rightDoubleValues.getValue(i), equalTo(0d));
|
||||||
}
|
}
|
||||||
|
|
||||||
boolean hasValue = leftDoubleValues.hasValue(i);
|
int numValues = 0;
|
||||||
DoubleValues.Iter leftIter = leftDoubleValues.getIter(i);
|
assertThat((numValues = leftDoubleValues.setDocument(i)), equalTo(rightDoubleValues.setDocument(i)));
|
||||||
DoubleValues.Iter rightIter = rightDoubleValues.getIter(i);
|
for (int j = 0; j < numValues; j++) {
|
||||||
assertThat(leftIter.hasNext(), equalTo(rightIter.hasNext()));
|
assertThat(leftDoubleValues.nextValue(), equalTo(rightDoubleValues.nextValue()));
|
||||||
assertThat(leftIter.hasNext(), equalTo(hasValue));
|
|
||||||
|
|
||||||
while (leftIter.hasNext()) {
|
|
||||||
assertThat(hasValue, equalTo(true));
|
|
||||||
assertThat(leftIter.hasNext(), equalTo(rightIter.hasNext()));
|
|
||||||
double rightValue = rightIter.next();
|
|
||||||
double leftValue = leftIter.next();
|
|
||||||
|
|
||||||
assertThat(leftValue, equalTo(rightValue));
|
|
||||||
}
|
}
|
||||||
assertThat(leftIter.hasNext(), equalTo(rightIter.hasNext()));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -436,33 +429,23 @@ public class DuelFieldDataTests extends AbstractFieldDataTests {
|
||||||
assertThat(leftData.getNumDocs(), equalTo(rightData.getNumDocs()));
|
assertThat(leftData.getNumDocs(), equalTo(rightData.getNumDocs()));
|
||||||
|
|
||||||
int numDocs = leftData.getNumDocs();
|
int numDocs = leftData.getNumDocs();
|
||||||
LongValues leftDoubleValues = leftData.getLongValues();
|
LongValues leftLongValues = leftData.getLongValues();
|
||||||
LongValues rightDoubleValues = rightData.getLongValues();
|
LongValues rightLongValues = rightData.getLongValues();
|
||||||
for (int i = 0; i < numDocs; i++) {
|
for (int i = 0; i < numDocs; i++) {
|
||||||
assertThat(leftDoubleValues.hasValue(i), equalTo(rightDoubleValues.hasValue(i)));
|
assertThat(leftLongValues.hasValue(i), equalTo(rightLongValues.hasValue(i)));
|
||||||
if (leftDoubleValues.hasValue(i)) {
|
if (leftLongValues.hasValue(i)) {
|
||||||
assertThat(leftDoubleValues.getValue(i), equalTo(rightDoubleValues.getValue(i)));
|
assertThat(leftLongValues.getValue(i), equalTo(rightLongValues.getValue(i)));
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
assertThat(leftDoubleValues.getValue(i), equalTo(0l));
|
assertThat(leftLongValues.getValue(i), equalTo(0l));
|
||||||
assertThat(rightDoubleValues.getValue(i), equalTo(0l));
|
assertThat(rightLongValues.getValue(i), equalTo(0l));
|
||||||
}
|
}
|
||||||
|
|
||||||
boolean hasValue = leftDoubleValues.hasValue(i);
|
int numValues = 0;
|
||||||
LongValues.Iter leftIter = leftDoubleValues.getIter(i);
|
assertThat((numValues = leftLongValues.setDocument(i)), equalTo(rightLongValues.setDocument(i)));
|
||||||
LongValues.Iter rightIter = rightDoubleValues.getIter(i);
|
for (int j = 0; j < numValues; j++) {
|
||||||
assertThat(leftIter.hasNext(), equalTo(rightIter.hasNext()));
|
assertThat(leftLongValues.nextValue(), equalTo(rightLongValues.nextValue()));
|
||||||
assertThat(leftIter.hasNext(), equalTo(hasValue));
|
|
||||||
|
|
||||||
while (leftIter.hasNext()) {
|
|
||||||
assertThat(hasValue, equalTo(true));
|
|
||||||
assertThat(leftIter.hasNext(), equalTo(rightIter.hasNext()));
|
|
||||||
long rightValue = rightIter.next();
|
|
||||||
long leftValue = leftIter.next();
|
|
||||||
|
|
||||||
assertThat(leftValue, equalTo(rightValue));
|
|
||||||
}
|
}
|
||||||
assertThat(leftIter.hasNext(), equalTo(rightIter.hasNext()));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -476,6 +459,7 @@ public class DuelFieldDataTests extends AbstractFieldDataTests {
|
||||||
|
|
||||||
private static class ToDoublePreprocessor extends Preprocessor {
|
private static class ToDoublePreprocessor extends Preprocessor {
|
||||||
public String toString(BytesRef ref) {
|
public String toString(BytesRef ref) {
|
||||||
|
assert ref.length > 0;
|
||||||
return Double.toString(Double.parseDouble(super.toString(ref)));
|
return Double.toString(Double.parseDouble(super.toString(ref)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -486,3 +470,4 @@ public class DuelFieldDataTests extends AbstractFieldDataTests {
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -334,8 +334,9 @@ public class LongFieldDataTests extends AbstractNumericFieldDataTests {
|
||||||
}
|
}
|
||||||
|
|
||||||
set.clear();
|
set.clear();
|
||||||
for (LongValues.Iter iter = data.getIter(i); iter.hasNext(); ) {
|
int numValues = data.setDocument(i);
|
||||||
set.add(iter.next());
|
for (int j = 0; j < numValues; j++) {
|
||||||
|
set.add(data.nextValue());
|
||||||
}
|
}
|
||||||
assertThat(set, equalTo(v));
|
assertThat(set, equalTo(v));
|
||||||
|
|
||||||
|
@ -348,8 +349,9 @@ public class LongFieldDataTests extends AbstractNumericFieldDataTests {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
doubleSet.clear();
|
doubleSet.clear();
|
||||||
for (DoubleValues.Iter iter = doubleData.getIter(i); iter.hasNext(); ) {
|
numValues = doubleData.setDocument(i);
|
||||||
doubleSet.add(iter.next());
|
for (int j = 0; j < numValues; j++) {
|
||||||
|
doubleSet.add(doubleData.nextValue());
|
||||||
}
|
}
|
||||||
assertThat(doubleSet, equalTo(doubleV));
|
assertThat(doubleSet, equalTo(doubleV));
|
||||||
}
|
}
|
||||||
|
|
|
@ -124,7 +124,7 @@ public class MultiOrdinalsTests extends ElasticsearchTestCase {
|
||||||
for (int i = 0; i < array.length; i++) {
|
for (int i = 0; i < array.length; i++) {
|
||||||
array[i] = docOrds.get(i);
|
array[i] = docOrds.get(i);
|
||||||
}
|
}
|
||||||
assertIter(docs.getIter(docId), array);
|
assertIter(docs, docId, array);
|
||||||
}
|
}
|
||||||
for (int i = docId + 1; i < ordAndId.id; i++) {
|
for (int i = docId + 1; i < ordAndId.id; i++) {
|
||||||
assertThat(docs.getOrd(i), equalTo(0L));
|
assertThat(docs.getOrd(i), equalTo(0L));
|
||||||
|
@ -215,12 +215,11 @@ public class MultiOrdinalsTests extends ElasticsearchTestCase {
|
||||||
assertEquals(docs, ordinalPlan);
|
assertEquals(docs, ordinalPlan);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected static void assertIter(Ordinals.Docs.Iter iter, long... expectedOrdinals) {
|
protected static void assertIter(Ordinals.Docs docs, int docId, long... expectedOrdinals) {
|
||||||
|
assertThat(docs.setDocument(docId), equalTo(expectedOrdinals.length));
|
||||||
for (long expectedOrdinal : expectedOrdinals) {
|
for (long expectedOrdinal : expectedOrdinals) {
|
||||||
assertThat(iter.next(), equalTo(expectedOrdinal));
|
assertThat(docs.nextOrd(), equalTo(expectedOrdinal));
|
||||||
}
|
}
|
||||||
assertThat(iter.next(), equalTo(0L)); // Last one should always be 0
|
|
||||||
assertThat(iter.next(), equalTo(0L)); // Just checking it stays 0
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -284,7 +283,7 @@ public class MultiOrdinalsTests extends ElasticsearchTestCase {
|
||||||
assertThat(ref.offset, equalTo(0));
|
assertThat(ref.offset, equalTo(0));
|
||||||
long[] ords = ordinalPlan[doc];
|
long[] ords = ordinalPlan[doc];
|
||||||
assertThat(ref, equalTo(new LongsRef(ords, 0, ords.length)));
|
assertThat(ref, equalTo(new LongsRef(ords, 0, ords.length)));
|
||||||
assertIter(docs.getIter(doc), ords);
|
assertIter(docs, doc, ords);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue