Refactor FieldData iterations
This commit primarily folds [Double|Bytes|Long|GeoPoint]Values.Iter into [Double|Bytes|Long|GeoPoint]Values. Iterations now don't require a auxillary class (Iter) but instead driven by native for loops. All [Double|Bytes|Long|GeoPoint]Values are stateful and provide `setDocId` and `nextValue` methods to iterate over all values in a document. This has several advantage: * The amout of specialized classes is reduced * Iteration is clearly stateful ie. Iters can't be confused to be local. * All iterations are size bounded which prevents runtime checks and allows JIT optimizations / loop un-rolling and most iterations are branch free. * Due to the bounded iteration the need for a `hasNext` method call is removed. * Value iterations feels more native. This commit also adds consistent documentation and unifies the calcualtion if SortMode is involved. This commit also changes the runtime behavior of BytesValues#getValue() such that it will never return `null` anymore. If a document has no value in a field this method still returns a `BytesRef` with a `length` of 0. To identify documents with no values #hasValue() or #setDocument(int) should be used. The latter should be preferred if the value will be consumed in the case the document has a value.
This commit is contained in:
parent
7bd1a55f6e
commit
7867de4f5b
|
@ -31,10 +31,6 @@ public abstract class AbstractAtomicNumericFieldData implements AtomicNumericFie
|
|||
this.isFloat = isFloat;
|
||||
}
|
||||
|
||||
public abstract LongValues getLongValues();
|
||||
|
||||
public abstract DoubleValues getDoubleValues();
|
||||
|
||||
|
||||
@Override
|
||||
public ScriptDocValues getScriptValues() {
|
||||
|
@ -50,45 +46,31 @@ public abstract class AbstractAtomicNumericFieldData implements AtomicNumericFie
|
|||
if (isFloat) {
|
||||
final DoubleValues values = getDoubleValues();
|
||||
return new BytesValues(values.isMultiValued()) {
|
||||
|
||||
@Override
|
||||
public boolean hasValue(int docId) {
|
||||
return values.hasValue(docId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesRef getValueScratch(int docId, BytesRef ret) {
|
||||
public BytesRef getValue(int docId) {
|
||||
if (values.hasValue(docId)) {
|
||||
ret.copyChars(Double.toString(values.getValue(docId)));
|
||||
scratch.copyChars(Double.toString(values.getValue(docId)));
|
||||
} else {
|
||||
ret.length = 0;
|
||||
scratch.length = 0;
|
||||
}
|
||||
return ret;
|
||||
return scratch;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iter getIter(int docId) {
|
||||
final DoubleValues.Iter iter = values.getIter(docId);
|
||||
return new BytesValues.Iter() {
|
||||
private final BytesRef spare = new BytesRef();
|
||||
public int setDocument(int docId) {
|
||||
this.docId = docId;
|
||||
return values.setDocument(docId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return iter.hasNext();
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesRef next() {
|
||||
spare.copyChars(Double.toString(iter.next()));
|
||||
return spare;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hash() {
|
||||
return spare.hashCode();
|
||||
}
|
||||
|
||||
};
|
||||
@Override
|
||||
public BytesRef nextValue() {
|
||||
scratch.copyChars(Double.toString(values.nextValue()));
|
||||
return scratch;
|
||||
}
|
||||
};
|
||||
} else {
|
||||
|
@ -101,38 +83,25 @@ public abstract class AbstractAtomicNumericFieldData implements AtomicNumericFie
|
|||
}
|
||||
|
||||
@Override
|
||||
public BytesRef getValueScratch(int docId, BytesRef ret) {
|
||||
public BytesRef getValue(int docId) {
|
||||
if (values.hasValue(docId)) {
|
||||
ret.copyChars(Long.toString(values.getValue(docId)));
|
||||
scratch.copyChars(Long.toString(values.getValue(docId)));
|
||||
} else {
|
||||
ret.length = 0;
|
||||
scratch.length = 0;
|
||||
}
|
||||
return ret;
|
||||
return scratch;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iter getIter(int docId) {
|
||||
final LongValues.Iter iter = values.getIter(docId);
|
||||
return new BytesValues.Iter() {
|
||||
private final BytesRef spare = new BytesRef();
|
||||
public int setDocument(int docId) {
|
||||
this.docId = docId;
|
||||
return values.setDocument(docId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return iter.hasNext();
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesRef next() {
|
||||
spare.copyChars(Long.toString(iter.next()));
|
||||
return spare;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hash() {
|
||||
return spare.hashCode();
|
||||
}
|
||||
|
||||
};
|
||||
@Override
|
||||
public BytesRef nextValue() {
|
||||
scratch.copyChars(Long.toString(values.nextValue()));
|
||||
return scratch;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -142,5 +111,4 @@ public abstract class AbstractAtomicNumericFieldData implements AtomicNumericFie
|
|||
public BytesValues getHashedBytesValues() {
|
||||
return getBytesValues();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.index.fielddata;
|
|||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.geo.GeoHashUtils;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.index.fielddata.BytesValues.Iter;
|
||||
|
||||
/**
|
||||
*/
|
||||
|
@ -41,41 +40,30 @@ public abstract class AtomicGeoPointFieldData<Script extends ScriptDocValues> im
|
|||
}
|
||||
|
||||
@Override
|
||||
public BytesRef getValueScratch(int docId, BytesRef ret) {
|
||||
public BytesRef getValue(int docId) {
|
||||
GeoPoint value = values.getValue(docId);
|
||||
if (value != null) {
|
||||
ret.copyChars(GeoHashUtils.encode(value.lat(), value.lon()));
|
||||
scratch.copyChars(GeoHashUtils.encode(value.lat(), value.lon()));
|
||||
} else {
|
||||
ret.length = 0;
|
||||
scratch.length = 0;
|
||||
return scratch;
|
||||
}
|
||||
return ret;
|
||||
return scratch;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iter getIter(int docId) {
|
||||
final GeoPointValues.Iter iter = values.getIter(docId);
|
||||
return new BytesValues.Iter() {
|
||||
private final BytesRef spare = new BytesRef();
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return iter.hasNext();
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesRef next() {
|
||||
GeoPoint value = iter.next();
|
||||
spare.copyChars(GeoHashUtils.encode(value.lat(), value.lon()));
|
||||
return spare;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hash() {
|
||||
return spare.hashCode();
|
||||
}
|
||||
|
||||
};
|
||||
public int setDocument(int docId) {
|
||||
this.docId = docId;
|
||||
return values.setDocument(docId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesRef nextValue() {
|
||||
GeoPoint value = values.nextValue();
|
||||
scratch.copyChars(GeoHashUtils.encode(value.lat(), value.lon()));
|
||||
return scratch;
|
||||
}
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -25,13 +25,35 @@ import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
|||
import org.elasticsearch.index.fielddata.ordinals.Ordinals.Docs;
|
||||
|
||||
/**
|
||||
* A state-full lightweight per document set of <code>byte[]</code> values.
|
||||
*
|
||||
* To iterate over values in a document use the following pattern:
|
||||
* <pre>
|
||||
* BytesValues values = ..;
|
||||
* final int numValues = values.setDocId(docId);
|
||||
* for (int i = 0; i < numValues; i++) {
|
||||
* BytesRef value = values.nextValue();
|
||||
* // process value
|
||||
* }
|
||||
* </pre>
|
||||
*/
|
||||
public abstract class BytesValues {
|
||||
|
||||
/**
|
||||
* An empty {@link BytesValues instance}
|
||||
*/
|
||||
public static final BytesValues EMPTY = new Empty();
|
||||
|
||||
private boolean multiValued;
|
||||
|
||||
protected final BytesRef scratch = new BytesRef();
|
||||
|
||||
protected int docId = -1;
|
||||
|
||||
/**
|
||||
* Creates a new {@link BytesValues} instance
|
||||
* @param multiValued <code>true</code> iff this instance is multivalued. Otherwise <code>false</code>.
|
||||
*/
|
||||
protected BytesValues(boolean multiValued) {
|
||||
this.multiValued = multiValued;
|
||||
}
|
||||
|
@ -44,191 +66,70 @@ public abstract class BytesValues {
|
|||
}
|
||||
|
||||
/**
|
||||
* Is there a value for this doc?
|
||||
* Returns <code>true</code> if the given document ID has a value in this. Otherwise <code>false</code>.
|
||||
*/
|
||||
public abstract boolean hasValue(int docId);
|
||||
|
||||
/**
|
||||
* Converts the provided bytes to "safe" ones from a "non" safe call made (if needed). Note,
|
||||
* Converts the current shared {@link BytesRef} to a stable instance. Note,
|
||||
* this calls makes the bytes safe for *reads*, not writes (into the same BytesRef). For example,
|
||||
* it makes it safe to be placed in a map.
|
||||
*/
|
||||
public BytesRef makeSafe(BytesRef bytes) {
|
||||
return BytesRef.deepCopyOf(bytes);
|
||||
public BytesRef copyShared() {
|
||||
return BytesRef.deepCopyOf(scratch);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a bytes value for a docId. Note, the content of it might be shared across invocation.
|
||||
* Returns a value for the given document id. If the document
|
||||
* has more than one value the returned value is one of the values
|
||||
* associated with the document.
|
||||
*
|
||||
* Note: the {@link BytesRef} might be shared across invocations.
|
||||
*
|
||||
* @param docId the documents id.
|
||||
* @return a value for the given document id or a {@link BytesRef} with a length of <tt>0</tt>if the document
|
||||
* has no value.
|
||||
*/
|
||||
public BytesRef getValue(int docId) {
|
||||
if (hasValue(docId)) {
|
||||
return getValueScratch(docId, scratch);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
public abstract BytesRef getValue(int docId);
|
||||
|
||||
/**
|
||||
* Returns the bytes value for the docId, with the provided "ret" which will be filled with the
|
||||
* result which will also be returned. If there is no value for this docId, the length will be 0.
|
||||
* Implementations can either change the {@link BytesRef#bytes bytes reference} of the {@link BytesRef}
|
||||
* to point to an internal structure or modify the content of the {@link BytesRef} but should
|
||||
* always do it in a consistent way. For example, it is illegal to change the bytes content in
|
||||
* some call and to change the reference to point to an internal structure in another call, this
|
||||
* will lead to bugs. It is also illegal for callers to write into the {@link BytesRef#bytes bytes}
|
||||
* after this method has returned.
|
||||
* Sets iteration to the specified docID and returns the number of
|
||||
* values for this document ID,
|
||||
* @param docId document ID
|
||||
*
|
||||
* @see #nextValue()
|
||||
*/
|
||||
public abstract BytesRef getValueScratch(int docId, BytesRef ret);
|
||||
|
||||
public int setDocument(int docId) {
|
||||
this.docId = docId;
|
||||
return hasValue(docId) ? 1 : 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fills the given spare for the given doc ID and returns the hashcode of the reference as defined by
|
||||
* {@link BytesRef#hashCode()}
|
||||
* Returns the next value for the current docID set to {@link #setDocument(int)}.
|
||||
* This method should only be called <tt>N</tt> times where <tt>N</tt> is the number
|
||||
* returned from {@link #setDocument(int)}. If called more than <tt>N</tt> times the behavior
|
||||
* is undefined.
|
||||
*
|
||||
* Note: the returned {@link BytesRef} might be shared across invocations.
|
||||
*
|
||||
* @return the next value for the current docID set to {@link #setDocument(int)}.
|
||||
*/
|
||||
public int getValueHashed(int docId, BytesRef spare) {
|
||||
return getValueScratch(docId, spare).hashCode();
|
||||
public BytesRef nextValue() {
|
||||
assert docId != -1;
|
||||
return getValue(docId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a bytes value iterator for a docId. Note, the content of it might be shared across invocation.
|
||||
* Returns the hash value of the previously returned shared {@link BytesRef} instances.
|
||||
*
|
||||
* @return the hash value of the previously returned shared {@link BytesRef} instances.
|
||||
*/
|
||||
public abstract Iter getIter(int docId); // TODO: maybe this should return null for no values so we can safe one call?
|
||||
|
||||
|
||||
public static interface Iter {
|
||||
|
||||
/**
|
||||
* Returns whether this iterator still contains elements.
|
||||
*/
|
||||
boolean hasNext();
|
||||
|
||||
/**
|
||||
* Returns the next element of this iterator. Please note that the returned bytes may be
|
||||
* reused across invocations so they should be copied for later reference. The behavior of
|
||||
* this method is undefined if the iterator is exhausted.
|
||||
*/
|
||||
BytesRef next();
|
||||
|
||||
/**
|
||||
* Returns the hash value of the last {@link BytesRef} returned by {@link #next()}. The
|
||||
* behavior is undefined if this iterator is not positioned or exhausted.
|
||||
*/
|
||||
int hash();
|
||||
|
||||
public static class Empty implements Iter {
|
||||
|
||||
public static final Empty INSTANCE = new Empty();
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesRef next() {
|
||||
throw new ElasticSearchIllegalStateException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hash() {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
public static class Single implements Iter {
|
||||
|
||||
protected BytesRef value;
|
||||
protected long ord;
|
||||
protected boolean done;
|
||||
|
||||
public Single reset(BytesRef value, long ord) {
|
||||
this.value = value;
|
||||
this.ord = ord;
|
||||
this.done = false;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return !done;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesRef next() {
|
||||
assert !done;
|
||||
done = true;
|
||||
return value;
|
||||
}
|
||||
|
||||
public int hash() {
|
||||
return value.hashCode();
|
||||
}
|
||||
}
|
||||
|
||||
static class Multi implements Iter {
|
||||
|
||||
protected long innerOrd;
|
||||
protected long ord;
|
||||
protected BytesValues.WithOrdinals withOrds;
|
||||
protected Ordinals.Docs.Iter ordsIter;
|
||||
protected final BytesRef scratch = new BytesRef();
|
||||
|
||||
public Multi(WithOrdinals withOrds) {
|
||||
this.withOrds = withOrds;
|
||||
assert withOrds.isMultiValued();
|
||||
|
||||
}
|
||||
|
||||
public Multi reset(Ordinals.Docs.Iter ordsIter) {
|
||||
this.ordsIter = ordsIter;
|
||||
innerOrd = ord = ordsIter.next();
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return innerOrd != 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesRef next() {
|
||||
withOrds.getValueScratchByOrd(innerOrd, scratch);
|
||||
ord = innerOrd;
|
||||
innerOrd = ordsIter.next();
|
||||
return scratch;
|
||||
}
|
||||
|
||||
public int hash() {
|
||||
return scratch.hashCode();
|
||||
}
|
||||
}
|
||||
public int currentValueHash() {
|
||||
return scratch.hashCode();
|
||||
}
|
||||
|
||||
public static class Empty extends BytesValues {
|
||||
|
||||
public Empty() {
|
||||
super(false);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasValue(int docId) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iter getIter(int docId) {
|
||||
return Iter.Empty.INSTANCE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesRef getValueScratch(int docId, BytesRef ret) {
|
||||
ret.length = 0;
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Bytes values that are based on ordinals.
|
||||
* Ordinal based {@link BytesValues}.
|
||||
*/
|
||||
public static abstract class WithOrdinals extends BytesValues {
|
||||
|
||||
|
@ -239,76 +140,86 @@ public abstract class BytesValues {
|
|||
this.ordinals = ordinals;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the associated ordinals instance.
|
||||
* @return the associated ordinals instance.
|
||||
*/
|
||||
public Ordinals.Docs ordinals() {
|
||||
return ordinals;
|
||||
}
|
||||
|
||||
public BytesRef getValueByOrd(long ord) {
|
||||
return getValueScratchByOrd(ord, scratch);
|
||||
}
|
||||
|
||||
protected Iter.Multi newMultiIter() {
|
||||
assert this.isMultiValued();
|
||||
return new Iter.Multi(this);
|
||||
}
|
||||
|
||||
protected Iter.Single newSingleIter() {
|
||||
assert !this.isMultiValued();
|
||||
return new Iter.Single();
|
||||
}
|
||||
/**
|
||||
* Returns the value for the given ordinal.
|
||||
* @param ord the ordinal to lookup.
|
||||
* @return a shared {@link BytesRef} instance holding the value associated
|
||||
* with the given ordinal or <code>null</code> if ordinal is <tt>0</tt>
|
||||
*/
|
||||
public abstract BytesRef getValueByOrd(long ord);
|
||||
|
||||
@Override
|
||||
public boolean hasValue(int docId) {
|
||||
return ordinals.getOrd(docId) != 0;
|
||||
return ordinals.getOrd(docId) != Ordinals.MISSING_ORDINAL;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesRef getValue(int docId) {
|
||||
final long ord = ordinals.getOrd(docId);
|
||||
if (ord == 0) {
|
||||
return null;
|
||||
if (ord == Ordinals.MISSING_ORDINAL) {
|
||||
scratch.length = 0;
|
||||
return scratch;
|
||||
}
|
||||
return getValueScratchByOrd(ord, scratch);
|
||||
return getValueByOrd(ord);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesRef getValueScratch(int docId, BytesRef ret) {
|
||||
return getValueScratchByOrd(ordinals.getOrd(docId), ret);
|
||||
public int setDocument(int docId) {
|
||||
this.docId = docId;
|
||||
int length = ordinals.setDocument(docId);
|
||||
assert hasValue(docId) == length > 0 : "Doc: [" + docId + "] hasValue: [" + hasValue(docId) + "] but length is [" + length + "]";
|
||||
return length;
|
||||
}
|
||||
|
||||
public BytesRef getSafeValueByOrd(int ord) {
|
||||
return getValueScratchByOrd(ord, new BytesRef());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the bytes value for the docId, with the provided "ret" which will be filled with the
|
||||
* result which will also be returned. If there is no value for this docId, the length will be 0.
|
||||
* Implementations can either change the {@link BytesRef#bytes bytes reference} of the {@link BytesRef}
|
||||
* to point to an internal structure or modify the content of the {@link BytesRef} but should
|
||||
* always do it in a consistent way. For example, it is illegal to change the bytes content in
|
||||
* some call and to change the reference to point to an internal structure in another call, this
|
||||
* will lead to bugs. It is also illegal for callers to write into the {@link BytesRef#bytes bytes}
|
||||
* after this method has returned.
|
||||
*/
|
||||
public abstract BytesRef getValueScratchByOrd(long ord, BytesRef ret);
|
||||
|
||||
public static class Empty extends WithOrdinals {
|
||||
|
||||
public Empty(Ordinals.Docs ordinals) {
|
||||
super(ordinals);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesRef getValueScratchByOrd(long ord, BytesRef ret) {
|
||||
ret.length = 0;
|
||||
return ret;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iter getIter(int docId) {
|
||||
return Iter.Empty.INSTANCE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesRef nextValue() {
|
||||
assert docId != -1;
|
||||
return getValueByOrd(ordinals.nextOrd());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* An empty {@link BytesValues} implementation
|
||||
*/
|
||||
private final static class Empty extends BytesValues {
|
||||
|
||||
Empty() {
|
||||
super(false);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasValue(int docId) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesRef getValue(int docId) {
|
||||
scratch.length = 0;
|
||||
return scratch;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int setDocument(int docId) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesRef nextValue() {
|
||||
throw new ElasticSearchIllegalStateException("Empty BytesValues has no next value");
|
||||
}
|
||||
|
||||
@Override
|
||||
public int currentValueHash() {
|
||||
throw new ElasticSearchIllegalStateException("Empty BytesValues has no hash for the current Value");
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,14 +24,33 @@ import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
|||
import org.elasticsearch.index.fielddata.ordinals.Ordinals.Docs;
|
||||
|
||||
/**
|
||||
* A state-full lightweight per document set of <code>double</code> values.
|
||||
*
|
||||
* To iterate over values in a document use the following pattern:
|
||||
* <pre>
|
||||
* DoubleValues values = ..;
|
||||
* final int numValues = values.setDocId(docId);
|
||||
* for (int i = 0; i < numValues; i++) {
|
||||
* double value = values.nextValue();
|
||||
* // process value
|
||||
* }
|
||||
* </pre>
|
||||
*/
|
||||
public abstract class DoubleValues {
|
||||
|
||||
/**
|
||||
* An empty {@link DoubleValues instance}
|
||||
*/
|
||||
public static final DoubleValues EMPTY = new Empty();
|
||||
|
||||
private final boolean multiValued;
|
||||
protected final Iter.Single iter = new Iter.Single();
|
||||
|
||||
protected int docId;
|
||||
|
||||
/**
|
||||
* Creates a new {@link DoubleValues} instance
|
||||
* @param multiValued <code>true</code> iff this instance is multivalued. Otherwise <code>false</code>.
|
||||
*/
|
||||
protected DoubleValues(boolean multiValued) {
|
||||
this.multiValued = multiValued;
|
||||
}
|
||||
|
@ -44,12 +63,31 @@ public abstract class DoubleValues {
|
|||
}
|
||||
|
||||
/**
|
||||
* Is there a value for this doc?
|
||||
* Returns <code>true</code> if the given document ID has a value in this. Otherwise <code>false</code>.
|
||||
*/
|
||||
public abstract boolean hasValue(int docId);
|
||||
|
||||
/**
|
||||
* Returns a value for the given document id. If the document
|
||||
* has more than one value the returned value is one of the values
|
||||
* associated with the document.
|
||||
* @param docId the documents id.
|
||||
* @return a value for the given document id.
|
||||
*/
|
||||
public abstract double getValue(int docId);
|
||||
|
||||
|
||||
/**
|
||||
* Returns a value for the given document id or the given missing value if
|
||||
* {@link #hasValue(int)} returns <code>false</code> ie. the document has no
|
||||
* value associated with it.
|
||||
*
|
||||
* @param docId the documents id.
|
||||
* @param missingValue the missing value
|
||||
* @return a value for the given document id or the given missing value if
|
||||
* {@link #hasValue(int)} returns <code>false</code> ie. the document has no
|
||||
* value associated with it.
|
||||
*/
|
||||
public double getValueMissing(int docId, double missingValue) {
|
||||
if (hasValue(docId)) {
|
||||
return getValue(docId);
|
||||
|
@ -57,164 +95,99 @@ public abstract class DoubleValues {
|
|||
return missingValue;
|
||||
}
|
||||
|
||||
public Iter getIter(int docId) {
|
||||
assert !isMultiValued();
|
||||
if (hasValue(docId)) {
|
||||
return iter.reset(getValue(docId));
|
||||
} else {
|
||||
return Iter.Empty.INSTANCE;
|
||||
}
|
||||
/**
|
||||
* Sets iteration to the specified docID and returns the number of
|
||||
* values for this document ID,
|
||||
* @param docId document ID
|
||||
*
|
||||
* @see #nextValue()
|
||||
*/
|
||||
public int setDocument(int docId) {
|
||||
this.docId = docId;
|
||||
return hasValue(docId) ? 1 : 0;
|
||||
}
|
||||
|
||||
|
||||
public static abstract class Dense extends DoubleValues {
|
||||
|
||||
|
||||
protected Dense(boolean multiValued) {
|
||||
super(multiValued);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final boolean hasValue(int docId) {
|
||||
return true;
|
||||
}
|
||||
|
||||
public final double getValueMissing(int docId, double missingValue) {
|
||||
assert hasValue(docId);
|
||||
assert !isMultiValued();
|
||||
return getValue(docId);
|
||||
}
|
||||
|
||||
public final Iter getIter(int docId) {
|
||||
assert hasValue(docId);
|
||||
assert !isMultiValued();
|
||||
return iter.reset(getValue(docId));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the next value for the current docID set to {@link #setDocument(int)}.
|
||||
* This method should only be called <tt>N</tt> times where <tt>N</tt> is the number
|
||||
* returned from {@link #setDocument(int)}. If called more than <tt>N</tt> times the behavior
|
||||
* is undefined.
|
||||
*
|
||||
* @return the next value for the current docID set to {@link #setDocument(int)}.
|
||||
*/
|
||||
public double nextValue() {
|
||||
return getValue(docId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Ordinal based {@link DoubleValues}.
|
||||
*/
|
||||
public static abstract class WithOrdinals extends DoubleValues {
|
||||
|
||||
protected final Docs ordinals;
|
||||
private final Iter.Multi iter;
|
||||
|
||||
protected WithOrdinals(Ordinals.Docs ordinals) {
|
||||
super(ordinals.isMultiValued());
|
||||
this.ordinals = ordinals;
|
||||
iter = new Iter.Multi(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the associated ordinals instance.
|
||||
* @return the associated ordinals instance.
|
||||
*/
|
||||
public Docs ordinals() {
|
||||
return ordinals;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the value for the given ordinal.
|
||||
* @param ord the ordinal to lookup.
|
||||
* @return a double value associated with the given ordinal.
|
||||
*/
|
||||
public abstract double getValueByOrd(long ord);
|
||||
|
||||
@Override
|
||||
public final boolean hasValue(int docId) {
|
||||
return ordinals.getOrd(docId) != 0;
|
||||
return ordinals.getOrd(docId) != Ordinals.MISSING_ORDINAL;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final double getValue(int docId) {
|
||||
return getValueByOrd(ordinals.getOrd(docId));
|
||||
final long ord = ordinals.getOrd(docId);
|
||||
if (ord == Ordinals.MISSING_ORDINAL) {
|
||||
return 0d;
|
||||
}
|
||||
return getValueByOrd(ord);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int setDocument(int docId) {
|
||||
this.docId = docId;
|
||||
return ordinals.setDocument(docId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public double nextValue() {
|
||||
return getValueByOrd(ordinals.nextOrd());
|
||||
}
|
||||
|
||||
@Override
|
||||
public final double getValueMissing(int docId, double missingValue) {
|
||||
final long ord = ordinals.getOrd(docId);
|
||||
if (ord == 0) {
|
||||
if (ord == Ordinals.MISSING_ORDINAL) {
|
||||
return missingValue;
|
||||
} else {
|
||||
return getValueByOrd(ord);
|
||||
}
|
||||
}
|
||||
|
||||
public abstract double getValueByOrd(long ord);
|
||||
|
||||
@Override
|
||||
public final Iter getIter(int docId) {
|
||||
return iter.reset(ordinals.getIter(docId));
|
||||
}
|
||||
|
||||
}
|
||||
/**
|
||||
* An empty {@link DoubleValues} implementation
|
||||
*/
|
||||
private static class Empty extends DoubleValues {
|
||||
|
||||
public static interface Iter {
|
||||
|
||||
boolean hasNext();
|
||||
|
||||
double next();
|
||||
|
||||
public static class Empty implements Iter {
|
||||
|
||||
public static final Empty INSTANCE = new Empty();
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public double next() {
|
||||
throw new ElasticSearchIllegalStateException();
|
||||
}
|
||||
}
|
||||
|
||||
static class Single implements Iter {
|
||||
|
||||
public double value;
|
||||
public boolean done;
|
||||
|
||||
public Single reset(double value) {
|
||||
this.value = value;
|
||||
this.done = false;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return !done;
|
||||
}
|
||||
|
||||
@Override
|
||||
public double next() {
|
||||
assert !done;
|
||||
done = true;
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
static class Multi implements Iter {
|
||||
|
||||
private Ordinals.Docs.Iter ordsIter;
|
||||
private long ord;
|
||||
private WithOrdinals values;
|
||||
|
||||
public Multi(WithOrdinals values) {
|
||||
this.values = values;
|
||||
}
|
||||
|
||||
public Multi reset(Ordinals.Docs.Iter ordsIter) {
|
||||
this.ordsIter = ordsIter;
|
||||
this.ord = ordsIter.next();
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return ord != 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public double next() {
|
||||
double value = values.getValueByOrd(ord);
|
||||
ord = ordsIter.next();
|
||||
return value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static class Empty extends DoubleValues {
|
||||
|
||||
public Empty() {
|
||||
Empty() {
|
||||
super(false);
|
||||
}
|
||||
|
||||
|
@ -230,31 +203,13 @@ public abstract class DoubleValues {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Iter getIter(int docId) {
|
||||
return Iter.Empty.INSTANCE;
|
||||
public int setDocument(int docId) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static class Filtered extends DoubleValues {
|
||||
|
||||
protected final DoubleValues delegate;
|
||||
|
||||
public Filtered(DoubleValues delegate) {
|
||||
super(delegate.isMultiValued());
|
||||
this.delegate = delegate;
|
||||
}
|
||||
|
||||
public boolean hasValue(int docId) {
|
||||
return delegate.hasValue(docId);
|
||||
}
|
||||
|
||||
public double getValue(int docId) {
|
||||
return delegate.getValue(docId);
|
||||
}
|
||||
|
||||
public Iter getIter(int docId) {
|
||||
return delegate.getIter(docId);
|
||||
@Override
|
||||
public double nextValue() {
|
||||
throw new ElasticSearchIllegalStateException("Empty DoubleValues has no next value");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,66 @@
|
|||
/*
|
||||
* Licensed to ElasticSearch and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. ElasticSearch licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.index.fielddata;
|
||||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
|
||||
/**
|
||||
* <code>FilterBytesValues</code> contains another {@link BytesValues}, which it
|
||||
* uses as its basic source of data, possibly transforming the data along the
|
||||
* way or providing additional functionality.
|
||||
*/
|
||||
public abstract class FilterBytesValues extends BytesValues {
|
||||
|
||||
protected final BytesValues delegate;
|
||||
|
||||
protected FilterBytesValues(BytesValues delegate) {
|
||||
super(delegate.isMultiValued());
|
||||
this.delegate = delegate;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasValue(int docId) {
|
||||
return delegate.hasValue(docId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesRef copyShared() {
|
||||
return delegate.copyShared();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int setDocument(int docId) {
|
||||
return delegate.setDocument(docId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesRef nextValue() {
|
||||
return delegate.nextValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int currentValueHash() {
|
||||
return delegate.currentValueHash();
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesRef getValue(int docId) {
|
||||
return delegate.getValue(docId);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,58 @@
|
|||
/*
|
||||
* Licensed to ElasticSearch and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. ElasticSearch licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.index.fielddata;
|
||||
/**
|
||||
* <code>FilterDoubleValues</code> contains another {@link DoubleValues}, which it
|
||||
* uses as its basic source of data, possibly transforming the data along the
|
||||
* way or providing additional functionality.
|
||||
*/
|
||||
public abstract class FilterDoubleValues extends DoubleValues {
|
||||
|
||||
protected final DoubleValues delegate;
|
||||
|
||||
protected FilterDoubleValues(DoubleValues delegate) {
|
||||
super(delegate.isMultiValued());
|
||||
this.delegate = delegate;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasValue(int docId) {
|
||||
return delegate.hasValue(docId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public double getValue(int docId) {
|
||||
return delegate.getValue(docId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int setDocument(int docId) {
|
||||
return delegate.setDocument(docId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public double nextValue() {
|
||||
return delegate.nextValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public double getValueMissing(int docId, double missingValue) {
|
||||
return delegate.getValueMissing(docId, missingValue);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,59 @@
|
|||
/*
|
||||
* Licensed to ElasticSearch and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. ElasticSearch licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.index.fielddata;
|
||||
|
||||
/**
|
||||
* <code>FilterLongValues</code> contains another {@link LongValues}, which it
|
||||
* uses as its basic source of data, possibly transforming the data along the
|
||||
* way or providing additional functionality.
|
||||
*/
|
||||
public class FilterLongValues extends LongValues {
|
||||
|
||||
protected final LongValues delegate;
|
||||
|
||||
protected FilterLongValues(LongValues delegate) {
|
||||
super(delegate.isMultiValued());
|
||||
this.delegate = delegate;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasValue(int docId) {
|
||||
return delegate.hasValue(docId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getValue(int docId) {
|
||||
return delegate.getValue(docId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int setDocument(int docId) {
|
||||
return delegate.setDocument(docId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long nextValue() {
|
||||
return delegate.nextValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getValueMissing(int docId, long missingValue) {
|
||||
return delegate.getValueMissing(docId, missingValue); //To change body of overridden methods use File | Settings | File Templates.
|
||||
}
|
||||
}
|
|
@ -23,13 +23,35 @@ import org.elasticsearch.ElasticSearchIllegalStateException;
|
|||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
|
||||
/**
|
||||
* A state-full lightweight per document set of {@link GeoPoint} values.
|
||||
* To iterate over values in a document use the following pattern:
|
||||
* <pre>
|
||||
* GeoPointValues values = ..;
|
||||
* final int numValues = values.setDocId(docId);
|
||||
* for (int i = 0; i < numValues; i++) {
|
||||
* GeoPoint value = values.nextValue();
|
||||
* // process value
|
||||
* }
|
||||
* </pre>
|
||||
*/
|
||||
public abstract class GeoPointValues {
|
||||
|
||||
/**
|
||||
* An empty {@link GeoPointValues instance}
|
||||
*/
|
||||
public static final GeoPointValues EMPTY = new Empty();
|
||||
|
||||
private final boolean multiValued;
|
||||
|
||||
protected int docId = -1;
|
||||
|
||||
/**
|
||||
* Creates a new {@link GeoPointValues} instance
|
||||
* @param multiValued <code>true</code> iff this instance is multivalued. Otherwise <code>false</code>.
|
||||
*/
|
||||
protected GeoPointValues(boolean multiValued) {
|
||||
this.multiValued = multiValued;
|
||||
}
|
||||
/**
|
||||
* Is one of the documents in this field data values is multi valued?
|
||||
*/
|
||||
|
@ -38,100 +60,93 @@ public abstract class GeoPointValues {
|
|||
}
|
||||
|
||||
/**
|
||||
* Is there a value for this doc?
|
||||
* Returns <code>true</code> if the given document ID has a value in this. Otherwise <code>false</code>.
|
||||
*/
|
||||
public abstract boolean hasValue(int docId);
|
||||
|
||||
/**
|
||||
* Returns a value for the given document id. If the document
|
||||
* has more than one value the returned value is one of the values
|
||||
* associated with the document.
|
||||
*
|
||||
* Note: the {@link GeoPoint} might be shared across invocations.
|
||||
*
|
||||
* @param docId the documents id.
|
||||
* @return a value for the given document id.
|
||||
*/
|
||||
public abstract GeoPoint getValue(int docId);
|
||||
|
||||
public abstract GeoPoint getValueSafe(int docId);
|
||||
|
||||
public abstract Iter getIter(int docId);
|
||||
|
||||
public abstract Iter getIterSafe(int docId);
|
||||
|
||||
protected GeoPointValues(boolean multiValued) {
|
||||
this.multiValued = multiValued;
|
||||
/**
|
||||
* Sets iteration to the specified docID and returns the number of
|
||||
* values for this document ID,
|
||||
* @param docId document ID
|
||||
*
|
||||
* @see #nextValue()
|
||||
*/
|
||||
public int setDocument(int docId) {
|
||||
this.docId = docId;
|
||||
return hasValue(docId) ? 1 : 0;
|
||||
}
|
||||
/**
|
||||
* Returns the next value for the current docID set to {@link #setDocument(int)}.
|
||||
* This method should only be called <tt>N</tt> times where <tt>N</tt> is the number
|
||||
* returned from {@link #setDocument(int)}. If called more than <tt>N</tt> times the behavior
|
||||
* is undefined.
|
||||
*
|
||||
* Note: the returned {@link GeoPoint} might be shared across invocations.
|
||||
*
|
||||
* @return the next value for the current docID set to {@link #setDocument(int)}.
|
||||
*/
|
||||
public GeoPoint nextValue() {
|
||||
assert docId != -1;
|
||||
return getValue(docId);
|
||||
}
|
||||
|
||||
public GeoPoint getValueMissing(int docId, GeoPoint defaultGeoPoint) {
|
||||
/**
|
||||
* Returns a value for the given document id or the given missing value if
|
||||
* {@link #hasValue(int)} returns <code>false</code> ie. the document has no
|
||||
* value associated with it.
|
||||
*
|
||||
* @param docId the documents id.
|
||||
* @param missingValue the missing value
|
||||
* @return a value for the given document id or the given missing value if
|
||||
* {@link #hasValue(int)} returns <code>false</code> ie. the document has no
|
||||
* value associated with it.
|
||||
*/
|
||||
public GeoPoint getValueMissing(int docId, GeoPoint missingValue) {
|
||||
if (hasValue(docId)) {
|
||||
return getValue(docId);
|
||||
}
|
||||
return defaultGeoPoint;
|
||||
return missingValue;
|
||||
}
|
||||
|
||||
|
||||
public static interface Iter {
|
||||
|
||||
boolean hasNext();
|
||||
|
||||
GeoPoint next();
|
||||
|
||||
static class Empty implements Iter {
|
||||
|
||||
public static final Empty INSTANCE = new Empty();
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoPoint next() {
|
||||
throw new ElasticSearchIllegalStateException();
|
||||
}
|
||||
}
|
||||
|
||||
static class Single implements Iter {
|
||||
|
||||
public GeoPoint value;
|
||||
public boolean done;
|
||||
|
||||
public Single reset(GeoPoint value) {
|
||||
this.value = value;
|
||||
this.done = false;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return !done;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoPoint next() {
|
||||
assert !done;
|
||||
done = true;
|
||||
return value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static class Empty extends GeoPointValues {
|
||||
/**
|
||||
* An empty {@link GeoPointValues} implementation
|
||||
*/
|
||||
private static final class Empty extends GeoPointValues {
|
||||
protected Empty() {
|
||||
super(false);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasValue(int docId) {
|
||||
return false;
|
||||
}
|
||||
|
||||
public GeoPoint getValueSafe(int docId) {
|
||||
return getValue(docId);
|
||||
}
|
||||
|
||||
public Iter getIterSafe(int docId) {
|
||||
return getIter(docId);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public GeoPoint getValue(int docId) {
|
||||
return null;
|
||||
}
|
||||
|
||||
public Iter getIter(int docId) {
|
||||
return Iter.Empty.INSTANCE;
|
||||
@Override
|
||||
public int setDocument(int docId) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoPoint nextValue() {
|
||||
throw new ElasticSearchIllegalStateException("Empty GeoPointValues has no next value");
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,14 +24,34 @@ import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
|||
import org.elasticsearch.index.fielddata.ordinals.Ordinals.Docs;
|
||||
|
||||
/**
|
||||
* A state-full lightweight per document set of <code>long</code> values.
|
||||
*
|
||||
* To iterate over values in a document use the following pattern:
|
||||
* <pre>
|
||||
* LongValues values = ..;
|
||||
* final int numValues = values.setDocId(docId);
|
||||
* for (int i = 0; i < numValues; i++) {
|
||||
* long value = values.nextValue();
|
||||
* // process value
|
||||
* }
|
||||
* </pre>
|
||||
*
|
||||
*/
|
||||
public abstract class LongValues {
|
||||
|
||||
/**
|
||||
* An empty {@link LongValues instance}
|
||||
*/
|
||||
public static final LongValues EMPTY = new Empty();
|
||||
|
||||
private final boolean multiValued;
|
||||
protected final Iter.Single iter = new Iter.Single();
|
||||
|
||||
protected int docId;
|
||||
|
||||
/**
|
||||
* Creates a new {@link LongValues} instance
|
||||
* @param multiValued <code>true</code> iff this instance is multivalued. Otherwise <code>false</code>.
|
||||
*/
|
||||
protected LongValues(boolean multiValued) {
|
||||
this.multiValued = multiValued;
|
||||
}
|
||||
|
@ -44,12 +64,30 @@ public abstract class LongValues {
|
|||
}
|
||||
|
||||
/**
|
||||
* Is there a value for this doc?
|
||||
* Returns <code>true</code> if the given document ID has a value in this. Otherwise <code>false</code>.
|
||||
*/
|
||||
public abstract boolean hasValue(int docId);
|
||||
|
||||
/**
|
||||
* Returns a value for the given document id. If the document
|
||||
* has more than one value the returned value is one of the values
|
||||
* associated with the document.
|
||||
* @param docId the documents id.
|
||||
* @return a value for the given document id.
|
||||
*/
|
||||
public abstract long getValue(int docId);
|
||||
|
||||
/**
|
||||
* Returns a value for the given document id or the given missing value if
|
||||
* {@link #hasValue(int)} returns <code>false</code> ie. the document has no
|
||||
* value associated with it.
|
||||
*
|
||||
* @param docId the documents id.
|
||||
* @param missingValue the missing value
|
||||
* @return a value for the given document id or the given missing value if
|
||||
* {@link #hasValue(int)} returns <code>false</code> ie. the document has no
|
||||
* value associated with it.
|
||||
*/
|
||||
public long getValueMissing(int docId, long missingValue) {
|
||||
if (hasValue(docId)) {
|
||||
return getValue(docId);
|
||||
|
@ -57,162 +95,85 @@ public abstract class LongValues {
|
|||
return missingValue;
|
||||
}
|
||||
|
||||
public Iter getIter(int docId) {
|
||||
assert !isMultiValued();
|
||||
if (hasValue(docId)) {
|
||||
return iter.reset(getValue(docId));
|
||||
} else {
|
||||
return Iter.Empty.INSTANCE;
|
||||
}
|
||||
/**
|
||||
* Sets iteration to the specified docID and returns the number of
|
||||
* values for this document ID,
|
||||
* @param docId document ID
|
||||
*
|
||||
* @see #nextValue()
|
||||
*/
|
||||
public int setDocument(int docId) {
|
||||
this.docId = docId;
|
||||
return hasValue(docId) ? 1 : 0;
|
||||
}
|
||||
|
||||
|
||||
public static abstract class Dense extends LongValues {
|
||||
|
||||
|
||||
protected Dense(boolean multiValued) {
|
||||
super(multiValued);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final boolean hasValue(int docId) {
|
||||
return true;
|
||||
}
|
||||
|
||||
public final long getValueMissing(int docId, long missingValue) {
|
||||
assert hasValue(docId);
|
||||
assert !isMultiValued();
|
||||
return getValue(docId);
|
||||
}
|
||||
|
||||
public final Iter getIter(int docId) {
|
||||
assert hasValue(docId);
|
||||
assert !isMultiValued();
|
||||
return iter.reset(getValue(docId));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the next value for the current docID set to {@link #setDocument(int)}.
|
||||
* This method should only be called <tt>N</tt> times where <tt>N</tt> is the number
|
||||
* returned from {@link #setDocument(int)}. If called more than <tt>N</tt> times the behavior
|
||||
* is undefined.
|
||||
*
|
||||
* @return the next value for the current docID set to {@link #setDocument(int)}.
|
||||
*/
|
||||
public long nextValue() {
|
||||
return getValue(docId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Ordinal based {@link LongValues}.
|
||||
*/
|
||||
public static abstract class WithOrdinals extends LongValues {
|
||||
|
||||
protected final Docs ordinals;
|
||||
private final Iter.Multi iter;
|
||||
|
||||
protected WithOrdinals(Ordinals.Docs ordinals) {
|
||||
super(ordinals.isMultiValued());
|
||||
this.ordinals = ordinals;
|
||||
iter = new Iter.Multi(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the associated ordinals instance.
|
||||
* @return the associated ordinals instance.
|
||||
*/
|
||||
public Docs ordinals() {
|
||||
return this.ordinals;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the value for the given ordinal.
|
||||
* @param ord the ordinal to lookup.
|
||||
* @return a long value associated with the given ordinal.
|
||||
*/
|
||||
public abstract long getValueByOrd(long ord);
|
||||
|
||||
@Override
|
||||
public final boolean hasValue(int docId) {
|
||||
return ordinals.getOrd(docId) != 0;
|
||||
return ordinals.getOrd(docId) != Ordinals.MISSING_ORDINAL;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final long getValue(int docId) {
|
||||
return getValueByOrd(ordinals.getOrd(docId));
|
||||
}
|
||||
|
||||
public abstract long getValueByOrd(long ord);
|
||||
|
||||
@Override
|
||||
public final Iter getIter(int docId) {
|
||||
return iter.reset(ordinals.getIter(docId));
|
||||
long ord = ordinals.getOrd(docId);
|
||||
if (ord == Ordinals.MISSING_ORDINAL) {
|
||||
return 0l;
|
||||
}
|
||||
return getValueByOrd(ord);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final long getValueMissing(int docId, long missingValue) {
|
||||
final long ord = ordinals.getOrd(docId);
|
||||
if (ord == 0) {
|
||||
return missingValue;
|
||||
} else {
|
||||
return getValueByOrd(ord);
|
||||
}
|
||||
public int setDocument(int docId) {
|
||||
this.docId = docId;
|
||||
return ordinals.setDocument(docId);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static interface Iter {
|
||||
|
||||
boolean hasNext();
|
||||
|
||||
long next();
|
||||
|
||||
public static class Empty implements Iter {
|
||||
|
||||
public static final Empty INSTANCE = new Empty();
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long next() {
|
||||
throw new ElasticSearchIllegalStateException();
|
||||
}
|
||||
}
|
||||
|
||||
static class Single implements Iter {
|
||||
|
||||
public long value;
|
||||
public boolean done;
|
||||
|
||||
public Single reset(long value) {
|
||||
this.value = value;
|
||||
this.done = false;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return !done;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long next() {
|
||||
assert !done;
|
||||
done = true;
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
static class Multi implements Iter {
|
||||
|
||||
private org.elasticsearch.index.fielddata.ordinals.Ordinals.Docs.Iter ordsIter;
|
||||
private long ord;
|
||||
private WithOrdinals values;
|
||||
|
||||
public Multi(WithOrdinals values) {
|
||||
this.values = values;
|
||||
}
|
||||
|
||||
public Multi reset(Ordinals.Docs.Iter ordsIter) {
|
||||
this.ordsIter = ordsIter;
|
||||
this.ord = ordsIter.next();
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return ord != 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long next() {
|
||||
long value = values.getValueByOrd(ord);
|
||||
ord = ordsIter.next();
|
||||
return value;
|
||||
}
|
||||
@Override
|
||||
public long nextValue() {
|
||||
return getValueByOrd(ordinals.nextOrd());
|
||||
}
|
||||
}
|
||||
|
||||
static class Empty extends LongValues {
|
||||
|
||||
private static final class Empty extends LongValues {
|
||||
|
||||
public Empty() {
|
||||
super(false);
|
||||
|
@ -230,32 +191,14 @@ public abstract class LongValues {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Iter getIter(int docId) {
|
||||
return Iter.Empty.INSTANCE;
|
||||
public int setDocument(int docId) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long nextValue() {
|
||||
throw new ElasticSearchIllegalStateException("Empty LongValues has no next value");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static class Filtered extends LongValues {
|
||||
|
||||
protected final LongValues delegate;
|
||||
|
||||
public Filtered(LongValues delegate) {
|
||||
super(delegate.isMultiValued());
|
||||
this.delegate = delegate;
|
||||
}
|
||||
|
||||
public boolean hasValue(int docId) {
|
||||
return delegate.hasValue(docId);
|
||||
}
|
||||
|
||||
public long getValue(int docId) {
|
||||
return delegate.getValue(docId);
|
||||
}
|
||||
|
||||
public Iter getIter(int docId) {
|
||||
return delegate.getIter(docId);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -26,7 +26,6 @@ import org.elasticsearch.common.unit.DistanceUnit;
|
|||
import org.elasticsearch.common.util.SlicedDoubleList;
|
||||
import org.elasticsearch.common.util.SlicedLongList;
|
||||
import org.elasticsearch.common.util.SlicedObjectList;
|
||||
import org.elasticsearch.index.fielddata.BytesValues.Iter;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.joda.time.MutableDateTime;
|
||||
|
||||
|
@ -102,33 +101,29 @@ public abstract class ScriptDocValues {
|
|||
return this.values;
|
||||
}
|
||||
|
||||
public Iter getBytesIter() {
|
||||
return values.getIter(docId);
|
||||
}
|
||||
|
||||
public BytesRef getBytesValue() {
|
||||
return values.getValue(docId);
|
||||
}
|
||||
|
||||
public String getValue() {
|
||||
final BytesRef value = values.getValue(docId);
|
||||
if (value != null) {
|
||||
UnicodeUtil.UTF8toUTF16(value, spare);
|
||||
return spare.toString();
|
||||
String value = null;
|
||||
if (values.setDocument(docId) > 0) {
|
||||
UnicodeUtil.UTF8toUTF16(values.nextValue(), spare);
|
||||
value = spare.toString();
|
||||
}
|
||||
return null;
|
||||
return value;
|
||||
}
|
||||
|
||||
public List<String> getValues() {
|
||||
if (!listLoaded) {
|
||||
final int numValues = values.setDocument(docId);
|
||||
list.offset = 0;
|
||||
list.length = 0;
|
||||
Iter iter = values.getIter(docId);
|
||||
while (iter.hasNext()) {
|
||||
BytesRef next = iter.next();
|
||||
list.grow(list.length + 1);
|
||||
list.grow(numValues);
|
||||
list.length = numValues;
|
||||
for (int i = 0; i < numValues; i++) {
|
||||
BytesRef next = values.nextValue();
|
||||
UnicodeUtil.UTF8toUTF16(next, spare);
|
||||
list.values[list.length++] = spare.toString();
|
||||
list.values[i] = spare.toString();
|
||||
}
|
||||
listLoaded = true;
|
||||
}
|
||||
|
@ -158,22 +153,18 @@ public abstract class ScriptDocValues {
|
|||
return !values.hasValue(docId);
|
||||
}
|
||||
|
||||
public LongValues.Iter getIter() {
|
||||
return values.getIter(docId);
|
||||
}
|
||||
|
||||
public long getValue() {
|
||||
return values.getValue(docId);
|
||||
}
|
||||
|
||||
public List<Long> getValues() {
|
||||
if (!listLoaded) {
|
||||
final LongValues.Iter iter = values.getIter(docId);
|
||||
final int numValues = values.setDocument(docId);
|
||||
list.offset = 0;
|
||||
list.length = 0;
|
||||
while (iter.hasNext()) {
|
||||
list.grow(list.length + 1);
|
||||
list.values[list.length++] = iter.next();
|
||||
list.grow(numValues);
|
||||
list.length = numValues;
|
||||
for (int i = 0; i < numValues; i++) {
|
||||
list.values[i] = values.nextValue();
|
||||
}
|
||||
listLoaded = true;
|
||||
}
|
||||
|
@ -207,9 +198,6 @@ public abstract class ScriptDocValues {
|
|||
return !values.hasValue(docId);
|
||||
}
|
||||
|
||||
public DoubleValues.Iter getIter() {
|
||||
return values.getIter(docId);
|
||||
}
|
||||
|
||||
public double getValue() {
|
||||
return values.getValue(docId);
|
||||
|
@ -217,12 +205,12 @@ public abstract class ScriptDocValues {
|
|||
|
||||
public List<Double> getValues() {
|
||||
if (!listLoaded) {
|
||||
final DoubleValues.Iter iter = values.getIter(docId);
|
||||
int numValues = values.setDocument(docId);
|
||||
list.offset = 0;
|
||||
list.length = 0;
|
||||
while (iter.hasNext()) {
|
||||
list.grow(list.length + 1);
|
||||
list.values[list.length++] = iter.next();
|
||||
list.grow(numValues);
|
||||
list.length = numValues;
|
||||
for (int i = 0; i < numValues; i++) {
|
||||
list.values[i] = values.nextValue();
|
||||
}
|
||||
listLoaded = true;
|
||||
}
|
||||
|
@ -291,19 +279,18 @@ public abstract class ScriptDocValues {
|
|||
|
||||
public List<GeoPoint> getValues() {
|
||||
if (!listLoaded) {
|
||||
GeoPointValues.Iter iter = values.getIter(docId);
|
||||
int numValues = values.setDocument(docId);
|
||||
list.offset = 0;
|
||||
list.length = 0;
|
||||
while (iter.hasNext()) {
|
||||
int index = list.length;
|
||||
list.grow(index + 1);
|
||||
GeoPoint next = iter.next();
|
||||
GeoPoint point = list.values[index];
|
||||
list.grow(numValues);
|
||||
list.length = numValues;
|
||||
for (int i = 0; i < numValues; i++) {
|
||||
GeoPoint next = values.nextValue();
|
||||
GeoPoint point = list.values[i];
|
||||
if (point == null) {
|
||||
point = list.values[index] = new GeoPoint();
|
||||
point = list.values[i] = new GeoPoint();
|
||||
}
|
||||
point.reset(next.lat(), next.lon());
|
||||
list.values[list.length++] = point;
|
||||
list.values[i] = point;
|
||||
}
|
||||
listLoaded = true;
|
||||
}
|
||||
|
|
|
@ -188,7 +188,7 @@ public final class BytesRefOrdValComparator extends NestedWrappableComparator<By
|
|||
@Override
|
||||
public int compareDocToValue(int doc, BytesRef value) {
|
||||
final long ord = getOrd(doc);
|
||||
final BytesRef docValue = ord == 0 ? missingValue : termsIndex.getValueByOrd(ord);
|
||||
final BytesRef docValue = ord == Ordinals.MISSING_ORDINAL ? missingValue : termsIndex.getValueByOrd(ord);
|
||||
return compareValues(docValue, value);
|
||||
}
|
||||
|
||||
|
@ -200,7 +200,7 @@ public final class BytesRefOrdValComparator extends NestedWrappableComparator<By
|
|||
public int compareBottom(int doc) {
|
||||
assert bottomSlot != -1;
|
||||
final long docOrd = getOrd(doc);
|
||||
final long comparableOrd = docOrd == 0 ? missingOrd : docOrd << 2;
|
||||
final long comparableOrd = docOrd == Ordinals.MISSING_ORDINAL ? missingOrd : docOrd << 2;
|
||||
return LongValuesComparator.compare(bottomOrd, comparableOrd);
|
||||
}
|
||||
|
||||
|
@ -213,7 +213,7 @@ public final class BytesRefOrdValComparator extends NestedWrappableComparator<By
|
|||
@Override
|
||||
public void copy(int slot, int doc) {
|
||||
final long ord = getOrd(doc);
|
||||
if (ord == 0) {
|
||||
if (ord == Ordinals.MISSING_ORDINAL) {
|
||||
ords[slot] = missingOrd;
|
||||
values[slot] = missingValue;
|
||||
} else {
|
||||
|
@ -222,7 +222,7 @@ public final class BytesRefOrdValComparator extends NestedWrappableComparator<By
|
|||
if (values[slot] == null || values[slot] == missingValue) {
|
||||
values[slot] = new BytesRef();
|
||||
}
|
||||
termsIndex.getValueScratchByOrd(ord, values[slot]);
|
||||
values[slot].copyBytes(termsIndex.getValueByOrd(ord));
|
||||
}
|
||||
readerGen[slot] = currentReaderGen;
|
||||
}
|
||||
|
@ -274,7 +274,7 @@ public final class BytesRefOrdValComparator extends NestedWrappableComparator<By
|
|||
termsIndex = indexFieldData.load(context).getBytesValues();
|
||||
assert termsIndex.ordinals() != null && termsIndex.ordinals().ordinals() != null;
|
||||
if (missingValue == null) {
|
||||
missingOrd = 0;
|
||||
missingOrd = Ordinals.MISSING_ORDINAL;
|
||||
} else {
|
||||
missingOrd = ordInCurrentReader(termsIndex, missingValue);
|
||||
assert consistentInsertedOrd(termsIndex, missingOrd, missingValue);
|
||||
|
@ -304,7 +304,7 @@ public final class BytesRefOrdValComparator extends NestedWrappableComparator<By
|
|||
final BytesRef bottomValue = values[bottomSlot];
|
||||
|
||||
if (bottomValue == null) {
|
||||
bottomOrd = 0;
|
||||
bottomOrd = Ordinals.MISSING_ORDINAL;
|
||||
} else if (currentReaderGen == readerGen[bottomSlot]) {
|
||||
bottomOrd = ords[bottomSlot];
|
||||
} else {
|
||||
|
@ -336,7 +336,8 @@ public final class BytesRefOrdValComparator extends NestedWrappableComparator<By
|
|||
}
|
||||
|
||||
final protected static long binarySearch(BytesValues.WithOrdinals a, BytesRef key, long low, long high) {
|
||||
assert a.getValueByOrd(high) == null | a.getValueByOrd(high) != null; // make sure we actually can get these values
|
||||
assert low != Ordinals.MISSING_ORDINAL;
|
||||
assert high == Ordinals.MISSING_ORDINAL || (a.getValueByOrd(high) == null | a.getValueByOrd(high) != null); // make sure we actually can get these values
|
||||
assert low == high + 1 || a.getValueByOrd(low) == null | a.getValueByOrd(low) != null;
|
||||
while (low <= high) {
|
||||
long mid = (low + high) >>> 1;
|
||||
|
@ -358,57 +359,17 @@ public final class BytesRefOrdValComparator extends NestedWrappableComparator<By
|
|||
return -(low + 1);
|
||||
}
|
||||
|
||||
static BytesRef getRelevantValue(BytesValues.WithOrdinals readerValues, int docId, SortMode sortMode) {
|
||||
BytesValues.Iter iter = readerValues.getIter(docId);
|
||||
if (!iter.hasNext()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
BytesRef currentVal = iter.next();
|
||||
BytesRef relevantVal = currentVal;
|
||||
while (true) {
|
||||
int cmp = currentVal.compareTo(relevantVal);
|
||||
if (sortMode == SortMode.MAX) {
|
||||
if (cmp > 0) {
|
||||
relevantVal = currentVal;
|
||||
}
|
||||
} else {
|
||||
if (cmp < 0) {
|
||||
relevantVal = currentVal;
|
||||
}
|
||||
}
|
||||
if (!iter.hasNext()) {
|
||||
break;
|
||||
}
|
||||
currentVal = iter.next();
|
||||
}
|
||||
return relevantVal;
|
||||
}
|
||||
|
||||
static long getRelevantOrd(Ordinals.Docs readerOrds, int docId, SortMode sortMode) {
|
||||
Ordinals.Docs.Iter iter = readerOrds.getIter(docId);
|
||||
long currentVal = iter.next();
|
||||
if (currentVal == 0) {
|
||||
return 0;
|
||||
int length = readerOrds.setDocument(docId);
|
||||
long relevantVal = sortMode.startLong();
|
||||
long result = 0;
|
||||
assert sortMode == SortMode.MAX || sortMode == SortMode.MIN;
|
||||
for (int i = 0; i < length; i++) {
|
||||
result = relevantVal = sortMode.apply(readerOrds.nextOrd(), relevantVal);
|
||||
}
|
||||
|
||||
long relevantVal = currentVal;
|
||||
while (true) {
|
||||
if (sortMode == SortMode.MAX) {
|
||||
if (currentVal > relevantVal) {
|
||||
relevantVal = currentVal;
|
||||
}
|
||||
} else {
|
||||
if (currentVal < relevantVal) {
|
||||
relevantVal = currentVal;
|
||||
}
|
||||
}
|
||||
currentVal = iter.next();
|
||||
if (currentVal == 0) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return relevantVal;
|
||||
assert result >= 0;
|
||||
assert result <= readerOrds.getMaxOrd();
|
||||
return result;
|
||||
// Enable this when the api can tell us that the ords per doc are ordered
|
||||
/*if (reversed) {
|
||||
IntArrayRef ref = readerOrds.getOrds(docId);
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.apache.lucene.util.ArrayUtil;
|
|||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
import org.elasticsearch.index.fielddata.BytesValues;
|
||||
import org.elasticsearch.index.fielddata.FilterBytesValues;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -61,22 +62,21 @@ public final class BytesRefValComparator extends NestedWrappableComparator<Bytes
|
|||
|
||||
@Override
|
||||
public int compareBottom(int doc) throws IOException {
|
||||
BytesRef val2 = docTerms.getValue(doc);
|
||||
if (val2 == null) {
|
||||
val2 = missingValue;
|
||||
}
|
||||
int length = docTerms.setDocument(doc); // safes one hasValue lookup
|
||||
BytesRef val2 = length == 0 ? missingValue : docTerms.nextValue();
|
||||
return compareValues(bottom, val2);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void copy(int slot, int doc) throws IOException {
|
||||
if (!docTerms.hasValue(doc)) {
|
||||
int length = docTerms.setDocument(doc); // safes one hasValue lookup
|
||||
if (length == 0) {
|
||||
values[slot] = missingValue;
|
||||
} else {
|
||||
if (values[slot] == null || values[slot] == missingValue) {
|
||||
values[slot] = new BytesRef();
|
||||
}
|
||||
docTerms.getValueScratch(doc, values[slot]);
|
||||
values[slot].copyBytes(docTerms.nextValue());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -114,39 +114,14 @@ public final class BytesRefValComparator extends NestedWrappableComparator<Bytes
|
|||
|
||||
@Override
|
||||
public int compareDocToValue(int doc, BytesRef value) {
|
||||
return docTerms.getValue(doc).compareTo(value);
|
||||
final int length = docTerms.setDocument(doc); // safes one hasValue lookup
|
||||
return (length == 0 ? missingValue : docTerms.nextValue()).compareTo(value);
|
||||
}
|
||||
|
||||
public static class FilteredByteValues extends BytesValues {
|
||||
|
||||
protected final BytesValues delegate;
|
||||
|
||||
public FilteredByteValues(BytesValues delegate) {
|
||||
super(delegate.isMultiValued());
|
||||
this.delegate = delegate;
|
||||
}
|
||||
|
||||
public boolean hasValue(int docId) {
|
||||
return delegate.hasValue(docId);
|
||||
}
|
||||
|
||||
public BytesRef makeSafe(BytesRef bytes) {
|
||||
return delegate.makeSafe(bytes);
|
||||
}
|
||||
|
||||
public BytesRef getValueScratch(int docId, BytesRef ret) {
|
||||
return delegate.getValueScratch(docId, ret);
|
||||
}
|
||||
|
||||
public Iter getIter(int docId) {
|
||||
return delegate.getIter(docId);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static final class MultiValuedBytesWrapper extends FilteredByteValues {
|
||||
private static final class MultiValuedBytesWrapper extends FilterBytesValues {
|
||||
|
||||
private final SortMode sortMode;
|
||||
private int numValues;
|
||||
|
||||
public MultiValuedBytesWrapper(BytesValues delegate, SortMode sortMode) {
|
||||
super(delegate);
|
||||
|
@ -154,40 +129,47 @@ public final class BytesRefValComparator extends NestedWrappableComparator<Bytes
|
|||
}
|
||||
|
||||
@Override
|
||||
public BytesRef getValueScratch(int docId, BytesRef relevantVal) {
|
||||
BytesValues.Iter iter = delegate.getIter(docId);
|
||||
if (!iter.hasNext()) {
|
||||
relevantVal.length = 0;
|
||||
return relevantVal;
|
||||
public BytesRef getValue(int docId) {
|
||||
numValues = delegate.setDocument(docId);
|
||||
scratch.length = 0;
|
||||
if (numValues == 0) {
|
||||
scratch.length = 0;
|
||||
return scratch;
|
||||
}
|
||||
return nextValue();
|
||||
}
|
||||
|
||||
BytesRef currentVal = iter.next();
|
||||
public int setDocument(int docId) {
|
||||
// either 0 or 1
|
||||
return Math.min(1, (numValues = delegate.setDocument(docId)));
|
||||
}
|
||||
|
||||
public BytesRef nextValue() {
|
||||
BytesRef currentVal = delegate.nextValue();
|
||||
// We MUST allocate a new byte[] since relevantVal might have been filled by reference by a PagedBytes instance
|
||||
// meaning that the BytesRef.bytes are shared and shouldn't be overwritten. We can't use the bytes of the iterator
|
||||
// either because they will be overwritten by subsequent calls in the current thread
|
||||
relevantVal.bytes = new byte[ArrayUtil.oversize(currentVal.length, RamUsageEstimator.NUM_BYTES_BYTE)];
|
||||
relevantVal.offset = 0;
|
||||
relevantVal.length = 0;
|
||||
relevantVal.append(currentVal);
|
||||
while (true) {
|
||||
int cmp = currentVal.compareTo(relevantVal);
|
||||
scratch.bytes = new byte[ArrayUtil.oversize(currentVal.length, RamUsageEstimator.NUM_BYTES_BYTE)];
|
||||
scratch.offset = 0;
|
||||
scratch.length = currentVal.length;
|
||||
System.arraycopy(currentVal.bytes, currentVal.offset, scratch.bytes, 0, currentVal.length);
|
||||
for (int i = 1; i < numValues; i++) {
|
||||
currentVal = delegate.nextValue();
|
||||
if (sortMode == SortMode.MAX) {
|
||||
if (cmp > 0) {
|
||||
relevantVal.length = 0;
|
||||
relevantVal.append(currentVal);
|
||||
if (currentVal.compareTo(scratch) > 0) {
|
||||
scratch.grow(currentVal.length);
|
||||
scratch.length = currentVal.length;
|
||||
System.arraycopy(currentVal.bytes, currentVal.offset, scratch.bytes, 0, currentVal.length);
|
||||
}
|
||||
} else {
|
||||
if (cmp < 0) {
|
||||
relevantVal.length = 0;
|
||||
relevantVal.append(currentVal);
|
||||
if (currentVal.compareTo(scratch) < 0) {
|
||||
scratch.grow(currentVal.length);
|
||||
scratch.length = currentVal.length;
|
||||
System.arraycopy(currentVal.bytes, currentVal.offset, scratch.bytes, 0, currentVal.length);
|
||||
}
|
||||
}
|
||||
if (!iter.hasNext()) {
|
||||
break;
|
||||
}
|
||||
currentVal = iter.next();
|
||||
}
|
||||
return relevantVal;
|
||||
return scratch;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.index.fielddata.fieldcomparator;
|
|||
import org.apache.lucene.index.AtomicReaderContext;
|
||||
import org.apache.lucene.search.FieldComparator;
|
||||
import org.elasticsearch.index.fielddata.DoubleValues;
|
||||
import org.elasticsearch.index.fielddata.FilterDoubleValues;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -70,7 +71,7 @@ abstract class DoubleValuesComparatorBase<T extends Number> extends NumberCompar
|
|||
return Double.compare(left, right);
|
||||
}
|
||||
|
||||
static final class MultiValueWrapper extends DoubleValues.Filtered {
|
||||
static final class MultiValueWrapper extends FilterDoubleValues {
|
||||
|
||||
private final SortMode sortMode;
|
||||
|
||||
|
@ -81,42 +82,15 @@ abstract class DoubleValuesComparatorBase<T extends Number> extends NumberCompar
|
|||
|
||||
@Override
|
||||
public double getValueMissing(int docId, double missing) {
|
||||
DoubleValues.Iter iter = delegate.getIter(docId);
|
||||
if (!iter.hasNext()) {
|
||||
int numValues = delegate.setDocument(docId);
|
||||
if (numValues == 0) {
|
||||
return missing;
|
||||
}
|
||||
|
||||
double currentVal = iter.next();
|
||||
double relevantVal = currentVal;
|
||||
int counter = 1;
|
||||
while (iter.hasNext()) {
|
||||
currentVal = iter.next();
|
||||
int cmp = Double.compare(currentVal, relevantVal);
|
||||
switch (sortMode) {
|
||||
case SUM:
|
||||
relevantVal += currentVal;
|
||||
break;
|
||||
case AVG:
|
||||
relevantVal += currentVal;
|
||||
counter++;
|
||||
break;
|
||||
case MIN:
|
||||
if (cmp < 0) {
|
||||
relevantVal = currentVal;
|
||||
}
|
||||
break;
|
||||
case MAX:
|
||||
if (cmp > 0) {
|
||||
relevantVal = currentVal;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (sortMode == SortMode.AVG) {
|
||||
return relevantVal / counter;
|
||||
} else {
|
||||
return relevantVal;
|
||||
double relevantVal = sortMode.startDouble();
|
||||
for (int i = 0; i < numValues; i++) {
|
||||
relevantVal = sortMode.apply(relevantVal, delegate.nextValue());
|
||||
}
|
||||
return sortMode.reduce(relevantVal, numValues);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -41,6 +41,7 @@ public class GeoDistanceComparator extends NumberComparatorBase<Double> {
|
|||
protected final GeoDistance geoDistance;
|
||||
protected final GeoDistance.FixedSourceDistance fixedSourceDistance;
|
||||
protected final SortMode sortMode;
|
||||
private static final Double MISSING_VALUE = Double.MAX_VALUE;
|
||||
|
||||
private final double[] values;
|
||||
private double bottom;
|
||||
|
@ -71,39 +72,19 @@ public class GeoDistanceComparator extends NumberComparatorBase<Double> {
|
|||
|
||||
@Override
|
||||
public int compare(int slot1, int slot2) {
|
||||
final double v1 = values[slot1];
|
||||
final double v2 = values[slot2];
|
||||
if (v1 > v2) {
|
||||
return 1;
|
||||
} else if (v1 < v2) {
|
||||
return -1;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
return Double.compare(values[slot1], values[slot2]);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int compareBottom(int doc) {
|
||||
final double v2 = geoDistanceValues.computeDistance(doc);
|
||||
if (bottom > v2) {
|
||||
return 1;
|
||||
} else if (bottom < v2) {
|
||||
return -1;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
return Double.compare(bottom, v2);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int compareDocToValue(int doc, Double distance2) throws IOException {
|
||||
double distance1 = geoDistanceValues.computeDistance(doc);
|
||||
if (distance1 < distance2) {
|
||||
return -1;
|
||||
} else if (distance1 == distance2) {
|
||||
return 0;
|
||||
} else {
|
||||
return 1;
|
||||
}
|
||||
return Double.compare(distance1, distance2);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -133,12 +114,12 @@ public class GeoDistanceComparator extends NumberComparatorBase<Double> {
|
|||
|
||||
@Override
|
||||
public void missing(int slot) {
|
||||
values[slot] = Double.MAX_VALUE;
|
||||
values[slot] = MISSING_VALUE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int compareBottomMissing() {
|
||||
return Double.compare(bottom, Double.MAX_VALUE);
|
||||
return Double.compare(bottom, MISSING_VALUE);
|
||||
}
|
||||
|
||||
// Computes the distance based on geo points.
|
||||
|
@ -170,7 +151,7 @@ public class GeoDistanceComparator extends NumberComparatorBase<Double> {
|
|||
GeoPoint geoPoint = readerValues.getValue(doc);
|
||||
if (geoPoint == null) {
|
||||
// is this true? push this to the "end"
|
||||
return Double.MAX_VALUE;
|
||||
return MISSING_VALUE;
|
||||
} else {
|
||||
return fixedSourceDistance.calculate(geoPoint.lat(), geoPoint.lon());
|
||||
}
|
||||
|
@ -189,42 +170,15 @@ public class GeoDistanceComparator extends NumberComparatorBase<Double> {
|
|||
|
||||
@Override
|
||||
public double computeDistance(int doc) {
|
||||
GeoPointValues.Iter iter = readerValues.getIter(doc);
|
||||
if (!iter.hasNext()) {
|
||||
return Double.MAX_VALUE;
|
||||
}
|
||||
|
||||
GeoPoint point = iter.next();
|
||||
double distance = fixedSourceDistance.calculate(point.lat(), point.lon());
|
||||
int counter = 1;
|
||||
while (iter.hasNext()) {
|
||||
point = iter.next();
|
||||
double newDistance = fixedSourceDistance.calculate(point.lat(), point.lon());
|
||||
switch (sortMode) {
|
||||
case MIN:
|
||||
if (distance > newDistance) {
|
||||
distance = newDistance;
|
||||
}
|
||||
break;
|
||||
case MAX:
|
||||
if (distance < newDistance) {
|
||||
distance = newDistance;
|
||||
}
|
||||
break;
|
||||
case AVG:
|
||||
distance += newDistance;
|
||||
counter++;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (sortMode == SortMode.AVG && counter > 1) {
|
||||
return distance / counter;
|
||||
} else {
|
||||
return distance;
|
||||
final int length = readerValues.setDocument(doc);
|
||||
double distance = sortMode.startDouble();
|
||||
double result = MISSING_VALUE;
|
||||
for (int i = 0; i < length; i++) {
|
||||
GeoPoint point = readerValues.nextValue();
|
||||
result = distance = sortMode.apply(distance, fixedSourceDistance.calculate(point.lat(), point.lon()));
|
||||
}
|
||||
return sortMode.reduce(result, length);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.elasticsearch.index.fielddata.fieldcomparator;
|
|||
|
||||
import org.apache.lucene.index.AtomicReaderContext;
|
||||
import org.apache.lucene.search.FieldComparator;
|
||||
import org.elasticsearch.index.fielddata.FilterLongValues;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.fielddata.LongValues;
|
||||
|
||||
|
@ -77,7 +78,7 @@ abstract class LongValuesComparatorBase<T extends Number> extends NumberComparat
|
|||
return compare(bottom, missingValue);
|
||||
}
|
||||
|
||||
private static final class MultiValueWrapper extends LongValues.Filtered {
|
||||
private static final class MultiValueWrapper extends FilterLongValues {
|
||||
|
||||
private final SortMode sortMode;
|
||||
|
||||
|
@ -88,40 +89,14 @@ abstract class LongValuesComparatorBase<T extends Number> extends NumberComparat
|
|||
|
||||
@Override
|
||||
public long getValueMissing(int docId, long missing) {
|
||||
LongValues.Iter iter = delegate.getIter(docId);
|
||||
if (!iter.hasNext()) {
|
||||
return missing;
|
||||
final int numValues = delegate.setDocument(docId);
|
||||
long relevantVal = sortMode.startLong();
|
||||
long result = missing;
|
||||
for (int i = 0; i < numValues; i++) {
|
||||
result = relevantVal = sortMode.apply(relevantVal, delegate.nextValue());
|
||||
}
|
||||
return sortMode.reduce(result, numValues);
|
||||
|
||||
long currentVal = iter.next();
|
||||
long relevantVal = currentVal;
|
||||
int counter = 1;
|
||||
while (iter.hasNext()) {
|
||||
currentVal = iter.next();
|
||||
switch (sortMode) {
|
||||
case SUM:
|
||||
relevantVal += currentVal;
|
||||
break;
|
||||
case AVG:
|
||||
relevantVal += currentVal;
|
||||
counter++;
|
||||
break;
|
||||
case MAX:
|
||||
if (currentVal > relevantVal) {
|
||||
relevantVal = currentVal;
|
||||
}
|
||||
break;
|
||||
case MIN:
|
||||
if (currentVal < relevantVal) {
|
||||
relevantVal = currentVal;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (sortMode == SortMode.AVG) {
|
||||
return relevantVal / counter;
|
||||
} else {
|
||||
return relevantVal;
|
||||
}
|
||||
// If we have a method on readerValues that tells if the values emitted by Iter or ArrayRef are sorted per
|
||||
// document that we can do this or something similar:
|
||||
// (This is already possible, if values are loaded from index, but we just need a method that tells us this
|
||||
|
|
|
@ -22,6 +22,8 @@ package org.elasticsearch.index.fielddata.fieldcomparator;
|
|||
|
||||
import org.elasticsearch.ElasticSearchIllegalArgumentException;
|
||||
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
* Defines what values to pick in the case a document contains multiple values for a particular field.
|
||||
*/
|
||||
|
@ -30,33 +32,233 @@ public enum SortMode {
|
|||
/**
|
||||
* Sum of all the values.
|
||||
*/
|
||||
SUM,
|
||||
SUM {
|
||||
/**
|
||||
* Returns the sum of the two values
|
||||
*/
|
||||
@Override
|
||||
public double apply(double a, double b) {
|
||||
return a + b;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the sum of the two values
|
||||
*/
|
||||
@Override
|
||||
public long apply(long a, long b) {
|
||||
return a + b;
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Average of all the values.
|
||||
*/
|
||||
AVG,
|
||||
AVG {
|
||||
|
||||
/**
|
||||
* Returns the sum of the two values
|
||||
*/
|
||||
@Override
|
||||
public double apply(double a, double b) {
|
||||
return a + b;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the sum of the two values
|
||||
*/
|
||||
@Override
|
||||
public long apply(long a, long b) {
|
||||
return a + b;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns <code>a / Math.max(1.0d, numValues)</code>
|
||||
*/
|
||||
@Override
|
||||
public double reduce(double a, int numValues) {
|
||||
return a / Math.max(1.0d, (double) numValues);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns <code>Math.round(a / Math.max(1.0, numValues))</code>
|
||||
*/
|
||||
@Override
|
||||
public long reduce(long a, int numValues) {
|
||||
return Math.round(a / Math.max(1.0, numValues));
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Pick the lowest value.
|
||||
*/
|
||||
MIN,
|
||||
MIN {
|
||||
/**
|
||||
* Equivalent to {@link Math#min(double, double)}
|
||||
*/
|
||||
@Override
|
||||
public double apply(double a, double b) {
|
||||
return Math.min(a, b);
|
||||
}
|
||||
|
||||
/**
|
||||
* Equivalent to {@link Math#min(long, long)}
|
||||
*/
|
||||
@Override
|
||||
public long apply(long a, long b) {
|
||||
return Math.min(a, b);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns {@link Double#POSITIVE_INFINITY}
|
||||
*/
|
||||
@Override
|
||||
public double startDouble() {
|
||||
return Double.POSITIVE_INFINITY;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns {@link Long#MAX_VALUE}
|
||||
*/
|
||||
@Override
|
||||
public long startLong() {
|
||||
return Long.MAX_VALUE;
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Pick the highest value.
|
||||
*/
|
||||
MAX;
|
||||
MAX {
|
||||
/**
|
||||
* Equivalent to {@link Math#max(double, double)}
|
||||
*/
|
||||
@Override
|
||||
public double apply(double a, double b) {
|
||||
return Math.max(a, b);
|
||||
}
|
||||
|
||||
/**
|
||||
* Equivalent to {@link Math#max(long, long)}
|
||||
*/
|
||||
@Override
|
||||
public long apply(long a, long b) {
|
||||
return Math.max(a, b);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns {@link Double#NEGATIVE_INFINITY}
|
||||
*/
|
||||
@Override
|
||||
public double startDouble() {
|
||||
return Double.NEGATIVE_INFINITY;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Returns {@link Long#MIN_VALUE}
|
||||
*/
|
||||
@Override
|
||||
public long startLong() {
|
||||
return Long.MIN_VALUE;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Applies the sort mode and returns the result. This method is meant to be
|
||||
* a binary function that is commonly used in a loop to find the relevant
|
||||
* value for the sort mode in a list of values. For instance if the sort mode
|
||||
* is {@link SortMode#MAX} this method is equivalent to {@link Math#max(double, double)}.
|
||||
*
|
||||
* Note: all implementations are idempotent.
|
||||
*
|
||||
* @param a an argument
|
||||
* @param b another argument
|
||||
* @return the result of the function.
|
||||
*/
|
||||
public abstract double apply(double a, double b);
|
||||
|
||||
/**
|
||||
* Applies the sort mode and returns the result. This method is meant to be
|
||||
* a binary function that is commonly used in a loop to find the relevant
|
||||
* value for the sort mode in a list of values. For instance if the sort mode
|
||||
* is {@link SortMode#MAX} this method is equivalent to {@link Math#max(long, long)}.
|
||||
*
|
||||
* Note: all implementations are idempotent.
|
||||
*
|
||||
* @param a an argument
|
||||
* @param b another argument
|
||||
* @return the result of the function.
|
||||
*/
|
||||
public abstract long apply(long a, long b);
|
||||
|
||||
/**
|
||||
* Returns an initial value for the sort mode that is guaranteed to have no impact if passed
|
||||
* to {@link #apply(double, double)}. This value should be used as the initial value if the
|
||||
* sort mode is applied to a non-empty list of values. For instance:
|
||||
* <pre>
|
||||
* double relevantValue = sortMode.startDouble();
|
||||
* for (int i = 0; i < array.length; i++) {
|
||||
* relevantValue = sortMode.apply(array[i], relevantValue);
|
||||
* }
|
||||
* </pre>
|
||||
*
|
||||
* Note: This method return <code>0</code> by default.
|
||||
*
|
||||
* @return an initial value for the sort mode.
|
||||
*/
|
||||
public double startDouble() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an initial value for the sort mode that is guaranteed to have no impact if passed
|
||||
* to {@link #apply(long, long)}. This value should be used as the initial value if the
|
||||
* sort mode is applied to a non-empty list of values. For instance:
|
||||
* <pre>
|
||||
* long relevantValue = sortMode.startLong();
|
||||
* for (int i = 0; i < array.length; i++) {
|
||||
* relevantValue = sortMode.apply(array[i], relevantValue);
|
||||
* }
|
||||
* </pre>
|
||||
*
|
||||
* Note: This method return <code>0</code> by default.
|
||||
* @return an initial value for the sort mode.
|
||||
*/
|
||||
public long startLong() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the aggregated value based on the sort mode. For instance if {@link SortMode#AVG} is used
|
||||
* this method divides the given value by the number of values. The default implementation returns
|
||||
* the first argument.
|
||||
*
|
||||
* Note: all implementations are idempotent.
|
||||
*/
|
||||
public double reduce(double a, int numValues) {
|
||||
return a;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the aggregated value based on the sort mode. For instance if {@link SortMode#AVG} is used
|
||||
* this method divides the given value by the number of values. The default implementation returns
|
||||
* the first argument.
|
||||
*
|
||||
* Note: all implementations are idempotent.
|
||||
*/
|
||||
public long reduce(long a, int numValues) {
|
||||
return a;
|
||||
}
|
||||
|
||||
/**
|
||||
* A case insensitive version of {@link #valueOf(String)}
|
||||
*
|
||||
* @throws ElasticSearchIllegalArgumentException if the given string doesn't match a sort mode or is <code>null</code>.
|
||||
*/
|
||||
public static SortMode fromString(String sortMode) {
|
||||
if ("min".equals(sortMode)) {
|
||||
return MIN;
|
||||
} else if ("max".equals(sortMode)) {
|
||||
return MAX;
|
||||
} else if ("sum".equals(sortMode)) {
|
||||
return SUM;
|
||||
} else if ("avg".equals(sortMode)) {
|
||||
return AVG;
|
||||
} else {
|
||||
try {
|
||||
return valueOf(sortMode.toUpperCase(Locale.ROOT));
|
||||
} catch (Throwable t) {
|
||||
throw new ElasticSearchIllegalArgumentException("Illegal sort_mode " + sortMode);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -38,16 +38,6 @@ public class DocIdOrdinals implements Ordinals {
|
|||
this.numDocs = numDocs;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasSingleArrayBackingStorage() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getBackingStorage() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getMemorySizeInBytes() {
|
||||
return RamUsageEstimator.NUM_BYTES_OBJECT_REF;
|
||||
|
@ -82,7 +72,8 @@ public class DocIdOrdinals implements Ordinals {
|
|||
|
||||
private final DocIdOrdinals parent;
|
||||
private final LongsRef longsScratch = new LongsRef(new long[1], 0, 1);
|
||||
private final SingleValueIter iter = new SingleValueIter();
|
||||
private int docId = -1;
|
||||
private long currentOrdinal = -1;
|
||||
|
||||
public Docs(DocIdOrdinals parent) {
|
||||
this.parent = parent;
|
||||
|
@ -115,18 +106,32 @@ public class DocIdOrdinals implements Ordinals {
|
|||
|
||||
@Override
|
||||
public long getOrd(int docId) {
|
||||
return docId + 1;
|
||||
return currentOrdinal = docId + 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public LongsRef getOrds(int docId) {
|
||||
longsScratch.longs[0] = docId + 1;
|
||||
longsScratch.longs[0] = currentOrdinal = docId + 1;
|
||||
return longsScratch;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iter getIter(int docId) {
|
||||
return iter.reset(docId + 1);
|
||||
public long nextOrd() {
|
||||
assert docId >= 0;
|
||||
currentOrdinal = docId + 1;
|
||||
docId = -1;
|
||||
return currentOrdinal;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int setDocument(int docId) {
|
||||
this.docId = docId;
|
||||
return 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long currentOrd() {
|
||||
return currentOrdinal;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.index.fielddata.ordinals;
|
||||
|
||||
import org.apache.lucene.util.LongsRef;
|
||||
import org.elasticsearch.ElasticSearchIllegalStateException;
|
||||
|
||||
/**
|
||||
*/
|
||||
|
@ -36,16 +37,6 @@ public class EmptyOrdinals implements Ordinals {
|
|||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasSingleArrayBackingStorage() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getBackingStorage() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isMultiValued() {
|
||||
return false;
|
||||
|
@ -72,7 +63,6 @@ public class EmptyOrdinals implements Ordinals {
|
|||
}
|
||||
|
||||
public static class Docs implements Ordinals.Docs {
|
||||
|
||||
private final EmptyOrdinals parent;
|
||||
public static final LongsRef EMPTY_LONGS_REF = new LongsRef();
|
||||
|
||||
|
@ -116,9 +106,18 @@ public class EmptyOrdinals implements Ordinals {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Iter getIter(int docId) {
|
||||
return EmptyIter.INSTANCE;
|
||||
public long nextOrd() {
|
||||
throw new ElasticSearchIllegalStateException("Empty ordinals has no nextOrd");
|
||||
}
|
||||
|
||||
@Override
|
||||
public int setDocument(int docId) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long currentOrd() {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,7 +25,6 @@ import org.apache.lucene.util.RamUsageEstimator;
|
|||
import org.apache.lucene.util.packed.AppendingPackedLongBuffer;
|
||||
import org.apache.lucene.util.packed.MonotonicAppendingLongBuffer;
|
||||
import org.apache.lucene.util.packed.PackedInts;
|
||||
import org.elasticsearch.index.fielddata.ordinals.Ordinals.Docs.Iter;
|
||||
|
||||
/**
|
||||
* {@link Ordinals} implementation which is efficient at storing field data ordinals for multi-valued or sparse fields.
|
||||
|
@ -77,16 +76,6 @@ public class MultiOrdinals implements Ordinals {
|
|||
assert ords.size() == builder.getTotalNumOrds() : ords.size() + " != " + builder.getTotalNumOrds();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasSingleArrayBackingStorage() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getBackingStorage() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getMemorySizeInBytes() {
|
||||
return endOffsets.ramBytesUsed() + ords.ramBytesUsed();
|
||||
|
@ -123,14 +112,15 @@ public class MultiOrdinals implements Ordinals {
|
|||
private final MonotonicAppendingLongBuffer endOffsets;
|
||||
private final AppendingPackedLongBuffer ords;
|
||||
private final LongsRef longsScratch;
|
||||
private final MultiIter iter;
|
||||
private long offset;
|
||||
private long limit;
|
||||
private long currentOrd;
|
||||
|
||||
MultiDocs(MultiOrdinals ordinals) {
|
||||
this.ordinals = ordinals;
|
||||
this.endOffsets = ordinals.endOffsets;
|
||||
this.ords = ordinals.ords;
|
||||
this.longsScratch = new LongsRef(16);
|
||||
this.iter = new MultiIter(ords);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -163,9 +153,9 @@ public class MultiOrdinals implements Ordinals {
|
|||
final long startOffset = docId > 0 ? endOffsets.get(docId - 1) : 0;
|
||||
final long endOffset = endOffsets.get(docId);
|
||||
if (startOffset == endOffset) {
|
||||
return 0L; // ord for missing values
|
||||
return currentOrd = 0L; // ord for missing values
|
||||
} else {
|
||||
return 1L + ords.get(startOffset);
|
||||
return currentOrd = 1L + ords.get(startOffset);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -186,34 +176,23 @@ public class MultiOrdinals implements Ordinals {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Iter getIter(int docId) {
|
||||
final long startOffset = docId > 0 ? endOffsets.get(docId - 1) : 0;
|
||||
final long endOffset = endOffsets.get(docId);
|
||||
iter.offset = startOffset;
|
||||
iter.endOffset = endOffset;
|
||||
return iter;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class MultiIter implements Iter {
|
||||
|
||||
final AppendingPackedLongBuffer ordinals;
|
||||
long offset, endOffset;
|
||||
|
||||
MultiIter(AppendingPackedLongBuffer ordinals) {
|
||||
this.ordinals = ordinals;
|
||||
public long nextOrd() {
|
||||
assert offset < limit;
|
||||
return currentOrd = 1L + ords.get(offset++);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long next() {
|
||||
if (offset >= endOffset) {
|
||||
return 0L;
|
||||
} else {
|
||||
return 1L + ordinals.get(offset++);
|
||||
}
|
||||
public int setDocument(int docId) {
|
||||
final long startOffset = docId > 0 ? endOffsets.get(docId - 1) : 0;
|
||||
final long endOffset = endOffsets.get(docId);
|
||||
offset = startOffset;
|
||||
limit = endOffset;
|
||||
return (int) (endOffset - startOffset);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long currentOrd() {
|
||||
return currentOrd;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -26,15 +26,8 @@ import org.apache.lucene.util.LongsRef;
|
|||
*/
|
||||
public interface Ordinals {
|
||||
|
||||
/**
|
||||
* Are the ordinals backed by a single ordinals array?
|
||||
*/
|
||||
boolean hasSingleArrayBackingStorage();
|
||||
|
||||
/**
|
||||
* Returns the backing storage for this ordinals.
|
||||
*/
|
||||
Object getBackingStorage();
|
||||
static final long MISSING_ORDINAL = 0;
|
||||
static final long MIN_ORDINAL = 1;
|
||||
|
||||
/**
|
||||
* The memory size this ordinals take.
|
||||
|
@ -52,13 +45,13 @@ public interface Ordinals {
|
|||
int getNumDocs();
|
||||
|
||||
/**
|
||||
* The number of ordinals, excluding the "0" ordinal indicating a missing value.
|
||||
* The number of ordinals, excluding the {@link #MISSING_ORDINAL} ordinal indicating a missing value.
|
||||
*/
|
||||
long getNumOrds();
|
||||
|
||||
/**
|
||||
* Returns total unique ord count; this includes +1 for
|
||||
* the null ord (always 0).
|
||||
* the {@link #MISSING_ORDINAL} ord (always {@value #MISSING_ORDINAL} ).
|
||||
*/
|
||||
long getMaxOrd();
|
||||
|
||||
|
@ -72,6 +65,16 @@ public interface Ordinals {
|
|||
* is that this gets created for each "iteration" over ordinals.
|
||||
* <p/>
|
||||
* <p>A value of 0 ordinal when iterating indicated "no" value.</p>
|
||||
* To iterate of a set of ordinals for a given document use {@link #setDocument(int)} and {@link #nextOrd()} as
|
||||
* show in the example below:
|
||||
* <pre>
|
||||
* Ordinals.Docs docs = ...;
|
||||
* final int len = docs.setDocId(docId);
|
||||
* for (int i = 0; i < len; i++) {
|
||||
* final long ord = docs.nextOrd();
|
||||
* // process ord
|
||||
* }
|
||||
* </pre>
|
||||
*/
|
||||
interface Docs {
|
||||
|
||||
|
@ -113,51 +116,35 @@ public interface Ordinals {
|
|||
*/
|
||||
LongsRef getOrds(int docId);
|
||||
|
||||
|
||||
/**
|
||||
* Returns an iterator of the ordinals that match the docId, with an
|
||||
* empty iterator for a doc with no ordinals.
|
||||
* Returns the next ordinal for the current docID set to {@link #setDocument(int)}.
|
||||
* This method should only be called <tt>N</tt> times where <tt>N</tt> is the number
|
||||
* returned from {@link #setDocument(int)}. If called more than <tt>N</tt> times the behavior
|
||||
* is undefined.
|
||||
*
|
||||
* Note: This method will never return <tt>0</tt>.
|
||||
*
|
||||
* @return the next ordinal for the current docID set to {@link #setDocument(int)}.
|
||||
*/
|
||||
Iter getIter(int docId);
|
||||
long nextOrd();
|
||||
|
||||
|
||||
/**
|
||||
* An iterator over ordinals values.
|
||||
* Sets iteration to the specified docID and returns the number of
|
||||
* ordinals for this document ID,
|
||||
* @param docId document ID
|
||||
*
|
||||
* @see #nextOrd()
|
||||
*/
|
||||
interface Iter {
|
||||
int setDocument(int docId);
|
||||
|
||||
/**
|
||||
* Gets the next ordinal. Returning 0 if the iteration is exhausted.
|
||||
*/
|
||||
long next();
|
||||
}
|
||||
|
||||
static class EmptyIter implements Iter {
|
||||
|
||||
public static EmptyIter INSTANCE = new EmptyIter();
|
||||
|
||||
@Override
|
||||
public long next() {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
static class SingleValueIter implements Iter {
|
||||
|
||||
private long value;
|
||||
|
||||
public SingleValueIter reset(long value) {
|
||||
this.value = value;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long next() {
|
||||
long actual = value;
|
||||
value = 0;
|
||||
return actual;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the current ordinal in the iteration
|
||||
* @return the current ordinal in the iteration
|
||||
*/
|
||||
long currentOrd();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -44,19 +44,6 @@ public class SinglePackedOrdinals implements Ordinals {
|
|||
this.reader = reader;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasSingleArrayBackingStorage() {
|
||||
return reader.hasArray();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getBackingStorage() {
|
||||
if (reader.hasArray()) {
|
||||
return reader.getArray();
|
||||
}
|
||||
return reader;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getMemorySizeInBytes() {
|
||||
if (size == -1) {
|
||||
|
@ -96,7 +83,7 @@ public class SinglePackedOrdinals implements Ordinals {
|
|||
private final PackedInts.Reader reader;
|
||||
|
||||
private final LongsRef longsScratch = new LongsRef(1);
|
||||
private final SingleValueIter iter = new SingleValueIter();
|
||||
private long currentOrdinal;
|
||||
|
||||
public Docs(SinglePackedOrdinals parent, PackedInts.Reader reader) {
|
||||
this.parent = parent;
|
||||
|
@ -130,26 +117,34 @@ public class SinglePackedOrdinals implements Ordinals {
|
|||
|
||||
@Override
|
||||
public long getOrd(int docId) {
|
||||
return reader.get(docId);
|
||||
return currentOrdinal = reader.get(docId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public LongsRef getOrds(int docId) {
|
||||
final long ordinal = reader.get(docId);
|
||||
if (ordinal == 0) {
|
||||
longsScratch.length = 0;
|
||||
} else {
|
||||
longsScratch.offset = 0;
|
||||
longsScratch.length = 1;
|
||||
longsScratch.longs[0] = ordinal;
|
||||
}
|
||||
longsScratch.offset = 0;
|
||||
longsScratch.length = (int)Math.min(currentOrdinal, 1);
|
||||
longsScratch.longs[0] = currentOrdinal = ordinal;
|
||||
return longsScratch;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iter getIter(int docId) {
|
||||
return iter.reset((int) reader.get(docId));
|
||||
public long nextOrd() {
|
||||
assert currentOrdinal > 0;
|
||||
return currentOrdinal;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int setDocument(int docId) {
|
||||
currentOrdinal = reader.get(docId);
|
||||
// either this is > 1 or 0 - in any case it prevents a branch!
|
||||
return (int)Math.min(currentOrdinal, 1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long currentOrd() {
|
||||
return currentOrdinal;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -90,27 +90,19 @@ public class BinaryDVAtomicFieldData implements AtomicFieldData<ScriptDocValues.
|
|||
|
||||
return new BytesValues(false) {
|
||||
|
||||
final BytesValues.Iter.Single iter = new BytesValues.Iter.Single();
|
||||
final BytesRef spare = new BytesRef();
|
||||
|
||||
@Override
|
||||
public boolean hasValue(int docId) {
|
||||
return docsWithField.get(docId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesRef getValueScratch(int docId, BytesRef ret) {
|
||||
values.get(docId, ret);
|
||||
return ret;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iter getIter(int docId) {
|
||||
if (!docsWithField.get(docId)) {
|
||||
return BytesValues.Iter.Empty.INSTANCE;
|
||||
public BytesRef getValue(int docId) {
|
||||
if (docsWithField.get(docId)) {
|
||||
values.get(docId, scratch);
|
||||
return scratch;
|
||||
}
|
||||
values.get(docId, spare);
|
||||
return iter.reset(spare, -1L);
|
||||
scratch.length = 0;
|
||||
return scratch;
|
||||
}
|
||||
|
||||
};
|
||||
|
|
|
@ -0,0 +1,45 @@
|
|||
/*
|
||||
* Licensed to ElasticSearch and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. ElasticSearch licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.fielddata.plain;
|
||||
|
||||
import org.elasticsearch.index.fielddata.DoubleValues;
|
||||
|
||||
/**
|
||||
* Package private base class for dense long values.
|
||||
*/
|
||||
abstract class DenseDoubleValues extends DoubleValues {
|
||||
|
||||
|
||||
protected DenseDoubleValues(boolean multiValued) {
|
||||
super(multiValued);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final boolean hasValue(int docId) {
|
||||
return true;
|
||||
}
|
||||
|
||||
public final double getValueMissing(int docId, double missingValue) {
|
||||
assert hasValue(docId);
|
||||
assert !isMultiValued();
|
||||
return getValue(docId);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,48 @@
|
|||
/*
|
||||
* Licensed to ElasticSearch and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. ElasticSearch licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.index.fielddata.plain;
|
||||
|
||||
import org.elasticsearch.index.fielddata.LongValues;
|
||||
|
||||
/**
|
||||
* Package private base class for dense long values.
|
||||
*/
|
||||
abstract class DenseLongValues extends LongValues {
|
||||
|
||||
protected DenseLongValues(boolean multiValued) {
|
||||
super(multiValued);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final boolean hasValue(int docId) {
|
||||
return true;
|
||||
}
|
||||
|
||||
public final long getValueMissing(int docId, long missingValue) {
|
||||
assert hasValue(docId);
|
||||
assert !isMultiValued();
|
||||
return getValue(docId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int setDocument(int docId) {
|
||||
this.docId = docId;
|
||||
return 1;
|
||||
}
|
||||
}
|
|
@ -155,6 +155,7 @@ public abstract class DoubleArrayAtomicFieldData extends AbstractAtomicNumericFi
|
|||
|
||||
@Override
|
||||
public final long getValueByOrd(long ord) {
|
||||
assert ord != Ordinals.MISSING_ORDINAL;
|
||||
return (long) values.get(ord);
|
||||
}
|
||||
}
|
||||
|
@ -170,6 +171,7 @@ public abstract class DoubleArrayAtomicFieldData extends AbstractAtomicNumericFi
|
|||
|
||||
@Override
|
||||
public double getValueByOrd(long ord) {
|
||||
assert ord != Ordinals.MISSING_ORDINAL;
|
||||
return values.get(ord);
|
||||
}
|
||||
}
|
||||
|
@ -322,7 +324,7 @@ public abstract class DoubleArrayAtomicFieldData extends AbstractAtomicNumericFi
|
|||
return new DoubleValues(values);
|
||||
}
|
||||
|
||||
static class LongValues extends org.elasticsearch.index.fielddata.LongValues.Dense {
|
||||
static final class LongValues extends DenseLongValues {
|
||||
|
||||
private final BigDoubleArrayList values;
|
||||
|
||||
|
@ -336,9 +338,16 @@ public abstract class DoubleArrayAtomicFieldData extends AbstractAtomicNumericFi
|
|||
return (long) values.get(docId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long nextValue() {
|
||||
return (long) values.get(docId);
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
static class DoubleValues extends org.elasticsearch.index.fielddata.DoubleValues.Dense {
|
||||
static final class DoubleValues extends DenseDoubleValues {
|
||||
|
||||
private final BigDoubleArrayList values;
|
||||
|
||||
|
@ -352,6 +361,11 @@ public abstract class DoubleArrayAtomicFieldData extends AbstractAtomicNumericFi
|
|||
return values.get(docId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public double nextValue() {
|
||||
return values.get(docId);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,56 @@
|
|||
/*
|
||||
* Licensed to ElasticSearch and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. ElasticSearch licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.index.fielddata.plain;
|
||||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.ElasticSearchIllegalStateException;
|
||||
import org.elasticsearch.index.fielddata.BytesValues;
|
||||
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
||||
|
||||
/**
|
||||
* An empty {@link org.elasticsearch.index.fielddata.BytesValues.WithOrdinals} implementation
|
||||
*/
|
||||
final class EmptyByteValuesWithOrdinals extends BytesValues.WithOrdinals {
|
||||
|
||||
EmptyByteValuesWithOrdinals(Ordinals.Docs ordinals) {
|
||||
super(ordinals);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesRef getValueByOrd(long ord) {
|
||||
scratch.length = 0;
|
||||
return scratch;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int setDocument(int docId) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesRef nextValue() {
|
||||
throw new ElasticSearchIllegalStateException("Empty BytesValues has no next value");
|
||||
}
|
||||
|
||||
@Override
|
||||
public int currentValueHash() {
|
||||
throw new ElasticSearchIllegalStateException("Empty BytesValues has no hash for the current value");
|
||||
}
|
||||
|
||||
}
|
|
@ -21,11 +21,9 @@ package org.elasticsearch.index.fielddata.plain;
|
|||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.IntsRef;
|
||||
import org.apache.lucene.util.fst.BytesRefFSTEnum;
|
||||
import org.apache.lucene.util.fst.FST;
|
||||
import org.apache.lucene.util.fst.*;
|
||||
import org.apache.lucene.util.fst.FST.Arc;
|
||||
import org.apache.lucene.util.fst.FST.BytesReader;
|
||||
import org.apache.lucene.util.fst.Util;
|
||||
import org.elasticsearch.common.util.BigIntArray;
|
||||
import org.elasticsearch.index.fielddata.AtomicFieldData;
|
||||
import org.elasticsearch.index.fielddata.ScriptDocValues;
|
||||
|
@ -94,7 +92,7 @@ public class FSTBytesAtomicFieldData implements AtomicFieldData.WithOrdinals<Scr
|
|||
@Override
|
||||
public BytesValues.WithOrdinals getBytesValues() {
|
||||
assert fst != null;
|
||||
return ordinals.isMultiValued() ? new BytesValues.Multi(fst, ordinals.ordinals()) : new BytesValues.Single(fst, ordinals.ordinals());
|
||||
return new BytesValues(fst, ordinals.ordinals());
|
||||
}
|
||||
|
||||
|
||||
|
@ -126,10 +124,10 @@ public class FSTBytesAtomicFieldData implements AtomicFieldData.WithOrdinals<Scr
|
|||
}
|
||||
this.hashes = hashes;
|
||||
}
|
||||
return ordinals.isMultiValued() ? new BytesValues.MultiHashed(fst, ordinals.ordinals(), hashes) : new BytesValues.SingleHashed(fst, ordinals.ordinals(), hashes);
|
||||
return new HashedBytesValues(fst, ordinals.ordinals(), hashes);
|
||||
}
|
||||
|
||||
static abstract class BytesValues extends org.elasticsearch.index.fielddata.BytesValues.WithOrdinals {
|
||||
static class BytesValues extends org.elasticsearch.index.fielddata.BytesValues.WithOrdinals {
|
||||
|
||||
protected final FST<Long> fst;
|
||||
protected final Ordinals.Docs ordinals;
|
||||
|
@ -148,112 +146,40 @@ public class FSTBytesAtomicFieldData implements AtomicFieldData.WithOrdinals<Scr
|
|||
}
|
||||
|
||||
@Override
|
||||
public BytesRef getValueScratchByOrd(long ord, BytesRef ret) {
|
||||
if (ord == 0) {
|
||||
ret.length = 0;
|
||||
return ret;
|
||||
}
|
||||
public BytesRef getValueByOrd(long ord) {
|
||||
assert ord != Ordinals.MISSING_ORDINAL;
|
||||
in.setPosition(0);
|
||||
fst.getFirstArc(firstArc);
|
||||
try {
|
||||
IntsRef output = Util.getByOutput(fst, ord, in, firstArc, scratchArc, scratchInts);
|
||||
ret.grow(output.length);
|
||||
ret.length = ret.offset = 0;
|
||||
Util.toBytesRef(output, ret);
|
||||
scratch.length = scratch.offset = 0;
|
||||
scratch.grow(output.length);
|
||||
Util.toBytesRef(output, scratch);
|
||||
} catch (IOException ex) {
|
||||
//bogus
|
||||
}
|
||||
return ret;
|
||||
return scratch;
|
||||
}
|
||||
|
||||
static class Single extends BytesValues {
|
||||
private final Iter.Single iter;
|
||||
}
|
||||
|
||||
Single(FST<Long> fst, Ordinals.Docs ordinals) {
|
||||
super(fst, ordinals);
|
||||
assert !ordinals.isMultiValued();
|
||||
this.iter = newSingleIter();
|
||||
}
|
||||
static final class HashedBytesValues extends BytesValues {
|
||||
private final BigIntArray hashes;
|
||||
|
||||
@Override
|
||||
public Iter getIter(int docId) {
|
||||
long ord = ordinals.getOrd(docId);
|
||||
if (ord == 0) return Iter.Empty.INSTANCE;
|
||||
return iter.reset(getValueByOrd(ord), ord);
|
||||
}
|
||||
HashedBytesValues(FST<Long> fst, Docs ordinals, BigIntArray hashes) {
|
||||
super(fst, ordinals);
|
||||
this.hashes = hashes;
|
||||
}
|
||||
|
||||
static final class SingleHashed extends Single {
|
||||
private final BigIntArray hashes;
|
||||
|
||||
SingleHashed(FST<Long> fst, Docs ordinals, BigIntArray hashes) {
|
||||
super(fst, ordinals);
|
||||
this.hashes = hashes;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Iter.Single newSingleIter() {
|
||||
return new Iter.Single() {
|
||||
public int hash() {
|
||||
return hashes.get(ord);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getValueHashed(int docId, BytesRef ret) {
|
||||
final long ord = ordinals.getOrd(docId);
|
||||
getValueScratchByOrd(ord, ret);
|
||||
return hashes.get(ord);
|
||||
}
|
||||
}
|
||||
|
||||
static class Multi extends BytesValues {
|
||||
|
||||
private final Iter.Multi iter;
|
||||
|
||||
Multi(FST<Long> fst, Ordinals.Docs ordinals) {
|
||||
super(fst, ordinals);
|
||||
assert ordinals.isMultiValued();
|
||||
this.iter = newMultiIter();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iter getIter(int docId) {
|
||||
return iter.reset(ordinals.getIter(docId));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static final class MultiHashed extends Multi {
|
||||
private final BigIntArray hashes;
|
||||
|
||||
MultiHashed(FST<Long> fst, Docs ordinals, BigIntArray hashes) {
|
||||
super(fst, ordinals);
|
||||
this.hashes = hashes;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Iter.Multi newMultiIter() {
|
||||
return new Iter.Multi(this) {
|
||||
public int hash() {
|
||||
return hashes.get(ord);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getValueHashed(int docId, BytesRef ret) {
|
||||
final long ord = ordinals.getOrd(docId);
|
||||
getValueScratchByOrd(ord, ret);
|
||||
return hashes.get(ord);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int currentValueHash() {
|
||||
assert ordinals.currentOrd() >= 0;
|
||||
return hashes.get(ordinals.currentOrd());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static class Empty extends FSTBytesAtomicFieldData {
|
||||
final static class Empty extends FSTBytesAtomicFieldData {
|
||||
|
||||
Empty(int numDocs) {
|
||||
super(null, new EmptyOrdinals(numDocs));
|
||||
|
@ -276,7 +202,7 @@ public class FSTBytesAtomicFieldData implements AtomicFieldData.WithOrdinals<Scr
|
|||
|
||||
@Override
|
||||
public BytesValues.WithOrdinals getBytesValues() {
|
||||
return new BytesValues.WithOrdinals.Empty(ordinals.ordinals());
|
||||
return new EmptyByteValuesWithOrdinals(ordinals.ordinals());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -153,6 +153,7 @@ public abstract class FloatArrayAtomicFieldData extends AbstractAtomicNumericFie
|
|||
|
||||
@Override
|
||||
public long getValueByOrd(long ord) {
|
||||
assert ord != Ordinals.MISSING_ORDINAL;
|
||||
return (long) values.get(ord);
|
||||
}
|
||||
}
|
||||
|
@ -323,7 +324,7 @@ public abstract class FloatArrayAtomicFieldData extends AbstractAtomicNumericFie
|
|||
}
|
||||
|
||||
|
||||
static class LongValues extends org.elasticsearch.index.fielddata.LongValues.Dense {
|
||||
static class LongValues extends DenseLongValues {
|
||||
|
||||
private final BigFloatArrayList values;
|
||||
|
||||
|
@ -337,9 +338,15 @@ public abstract class FloatArrayAtomicFieldData extends AbstractAtomicNumericFie
|
|||
return (long) values.get(docId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long nextValue() {
|
||||
return (long) values.get(docId);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
static class DoubleValues extends org.elasticsearch.index.fielddata.DoubleValues.Dense {
|
||||
static class DoubleValues extends DenseDoubleValues {
|
||||
|
||||
private final BigFloatArrayList values;
|
||||
|
||||
|
@ -352,6 +359,12 @@ public abstract class FloatArrayAtomicFieldData extends AbstractAtomicNumericFie
|
|||
public double getValue(int docId) {
|
||||
return (double) values.get(docId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public double nextValue() {
|
||||
return values.get(docId);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -147,111 +147,39 @@ public abstract class GeoPointDoubleArrayAtomicFieldData extends AtomicGeoPointF
|
|||
private final Ordinals.Docs ordinals;
|
||||
|
||||
private final GeoPoint scratch = new GeoPoint();
|
||||
private final ValuesIter valuesIter;
|
||||
private final SafeValuesIter safeValuesIter;
|
||||
|
||||
GeoPointValuesWithOrdinals(BigDoubleArrayList lon, BigDoubleArrayList lat, Ordinals.Docs ordinals) {
|
||||
super(ordinals.isMultiValued());
|
||||
this.lon = lon;
|
||||
this.lat = lat;
|
||||
this.ordinals = ordinals;
|
||||
this.valuesIter = new ValuesIter(lon, lat);
|
||||
this.safeValuesIter = new SafeValuesIter(lon, lat);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasValue(int docId) {
|
||||
return ordinals.getOrd(docId) != 0;
|
||||
return ordinals.getOrd(docId) != Ordinals.MISSING_ORDINAL;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoPoint getValue(int docId) {
|
||||
long ord = ordinals.getOrd(docId);
|
||||
if (ord == 0L) {
|
||||
if (ord == Ordinals.MISSING_ORDINAL) {
|
||||
return null;
|
||||
}
|
||||
return scratch.reset(lat.get(ord), lon.get(ord));
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoPoint getValueSafe(int docId) {
|
||||
long ord = ordinals.getOrd(docId);
|
||||
if (ord == 0L) {
|
||||
return null;
|
||||
}
|
||||
return new GeoPoint(lat.get(ord), lon.get(ord));
|
||||
public GeoPoint nextValue() {
|
||||
final long ord = ordinals.nextOrd();
|
||||
assert ord > 0;
|
||||
return scratch.reset(lat.get(ord), lon.get(ord));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iter getIter(int docId) {
|
||||
return valuesIter.reset(ordinals.getIter(docId));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iter getIterSafe(int docId) {
|
||||
return safeValuesIter.reset(ordinals.getIter(docId));
|
||||
}
|
||||
|
||||
|
||||
static class ValuesIter implements Iter {
|
||||
|
||||
private final BigDoubleArrayList lon, lat;
|
||||
private final GeoPoint scratch = new GeoPoint();
|
||||
|
||||
private Ordinals.Docs.Iter ordsIter;
|
||||
private long ord;
|
||||
|
||||
ValuesIter(BigDoubleArrayList lon, BigDoubleArrayList lat) {
|
||||
this.lon = lon;
|
||||
this.lat = lat;
|
||||
}
|
||||
|
||||
public ValuesIter reset(Ordinals.Docs.Iter ordsIter) {
|
||||
this.ordsIter = ordsIter;
|
||||
this.ord = ordsIter.next();
|
||||
return this;
|
||||
}
|
||||
|
||||
public boolean hasNext() {
|
||||
return ord != 0;
|
||||
}
|
||||
|
||||
public GeoPoint next() {
|
||||
scratch.reset(lat.get(ord), lon.get(ord));
|
||||
ord = ordsIter.next();
|
||||
return scratch;
|
||||
}
|
||||
}
|
||||
|
||||
static class SafeValuesIter implements Iter {
|
||||
|
||||
private final BigDoubleArrayList lon, lat;
|
||||
|
||||
private Ordinals.Docs.Iter ordsIter;
|
||||
private long ord;
|
||||
|
||||
SafeValuesIter(BigDoubleArrayList lon, BigDoubleArrayList lat) {
|
||||
this.lon = lon;
|
||||
this.lat = lat;
|
||||
}
|
||||
|
||||
public SafeValuesIter reset(Ordinals.Docs.Iter ordsIter) {
|
||||
this.ordsIter = ordsIter;
|
||||
this.ord = ordsIter.next();
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return ord != 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoPoint next() {
|
||||
GeoPoint value = new GeoPoint(lat.get(ord), lon.get(ord));
|
||||
ord = ordsIter.next();
|
||||
return value;
|
||||
}
|
||||
public int setDocument(int docId) {
|
||||
this.docId = docId;
|
||||
return ordinals.setDocument(docId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -307,9 +235,7 @@ public abstract class GeoPointDoubleArrayAtomicFieldData extends AtomicGeoPointF
|
|||
private final BigDoubleArrayList lon;
|
||||
private final BigDoubleArrayList lat;
|
||||
private final FixedBitSet set;
|
||||
|
||||
private final GeoPoint scratch = new GeoPoint();
|
||||
private final Iter.Single iter = new Iter.Single();
|
||||
|
||||
|
||||
GeoPointValuesSingleFixedSet(BigDoubleArrayList lon, BigDoubleArrayList lat, FixedBitSet set) {
|
||||
|
@ -332,33 +258,6 @@ public abstract class GeoPointDoubleArrayAtomicFieldData extends AtomicGeoPointF
|
|||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoPoint getValueSafe(int docId) {
|
||||
if (set.get(docId)) {
|
||||
return new GeoPoint(lat.get(docId), lon.get(docId));
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iter getIter(int docId) {
|
||||
if (set.get(docId)) {
|
||||
return iter.reset(scratch.reset(lat.get(docId), lon.get(docId)));
|
||||
} else {
|
||||
return Iter.Empty.INSTANCE;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iter getIterSafe(int docId) {
|
||||
if (set.get(docId)) {
|
||||
return iter.reset(new GeoPoint(lat.get(docId), lon.get(docId)));
|
||||
} else {
|
||||
return Iter.Empty.INSTANCE;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -412,7 +311,6 @@ public abstract class GeoPointDoubleArrayAtomicFieldData extends AtomicGeoPointF
|
|||
private final BigDoubleArrayList lat;
|
||||
|
||||
private final GeoPoint scratch = new GeoPoint();
|
||||
private final Iter.Single iter = new Iter.Single();
|
||||
|
||||
|
||||
GeoPointValuesSingle(BigDoubleArrayList lon, BigDoubleArrayList lat) {
|
||||
|
@ -430,21 +328,6 @@ public abstract class GeoPointDoubleArrayAtomicFieldData extends AtomicGeoPointF
|
|||
public GeoPoint getValue(int docId) {
|
||||
return scratch.reset(lat.get(docId), lon.get(docId));
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoPoint getValueSafe(int docId) {
|
||||
return new GeoPoint(lat.get(docId), lon.get(docId));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iter getIter(int docId) {
|
||||
return iter.reset(scratch.reset(lat.get(docId), lon.get(docId)));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iter getIterSafe(int docId) {
|
||||
return iter.reset(new GeoPoint(lat.get(docId), lon.get(docId)));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -154,7 +154,8 @@ public abstract class PackedArrayAtomicFieldData extends AbstractAtomicNumericFi
|
|||
|
||||
@Override
|
||||
public long getValueByOrd(long ord) {
|
||||
return ord == 0 ? 0L : values.get(ord - 1);
|
||||
assert ord != Ordinals.MISSING_ORDINAL;
|
||||
return values.get(ord - 1);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -169,7 +170,8 @@ public abstract class PackedArrayAtomicFieldData extends AbstractAtomicNumericFi
|
|||
|
||||
@Override
|
||||
public double getValueByOrd(long ord) {
|
||||
return ord == 0 ? 0L : values.get(ord - 1);
|
||||
assert ord != Ordinals.MISSING_ORDINAL;
|
||||
return values.get(ord - 1);
|
||||
}
|
||||
|
||||
|
||||
|
@ -333,7 +335,7 @@ public abstract class PackedArrayAtomicFieldData extends AbstractAtomicNumericFi
|
|||
return new DoubleValues(values, minValue);
|
||||
}
|
||||
|
||||
static class LongValues extends org.elasticsearch.index.fielddata.LongValues.Dense {
|
||||
static class LongValues extends DenseLongValues {
|
||||
|
||||
private final PackedInts.Mutable values;
|
||||
private final long minValue;
|
||||
|
@ -349,9 +351,15 @@ public abstract class PackedArrayAtomicFieldData extends AbstractAtomicNumericFi
|
|||
return minValue + values.get(docId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long nextValue() {
|
||||
return minValue + values.get(docId);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
static class DoubleValues extends org.elasticsearch.index.fielddata.DoubleValues.Dense {
|
||||
static class DoubleValues extends DenseDoubleValues {
|
||||
|
||||
private final PackedInts.Mutable values;
|
||||
private final long minValue;
|
||||
|
@ -367,6 +375,11 @@ public abstract class PackedArrayAtomicFieldData extends AbstractAtomicNumericFi
|
|||
return minValue + values.get(docId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public double nextValue() {
|
||||
return minValue + values.get(docId);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -165,7 +165,7 @@ public class PackedArrayIndexFieldData extends AbstractIndexFieldData<AtomicNume
|
|||
}
|
||||
for (int i = 0; i < reader.maxDoc(); i++) {
|
||||
final long ord = ordinals.getOrd(i);
|
||||
if (ord > 0) {
|
||||
if (ord != Ordinals.MISSING_ORDINAL) {
|
||||
sValues.set(i, values.get(ord - 1) - minValue);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -107,15 +107,13 @@ public class PagedBytesAtomicFieldData implements AtomicFieldData.WithOrdinals<S
|
|||
|
||||
@Override
|
||||
public BytesValues.WithOrdinals getBytesValues() {
|
||||
return ordinals.isMultiValued() ? new BytesValues.Multi(bytes, termOrdToBytesOffset, ordinals.ordinals()) : new BytesValues.Single(
|
||||
bytes, termOrdToBytesOffset, ordinals.ordinals());
|
||||
return new BytesValues(bytes, termOrdToBytesOffset, ordinals.ordinals());
|
||||
}
|
||||
|
||||
@Override
|
||||
public org.elasticsearch.index.fielddata.BytesValues.WithOrdinals getHashedBytesValues() {
|
||||
final BigIntArray hashes = getHashes();
|
||||
return ordinals.isMultiValued() ? new BytesValues.MultiHashed(hashes, bytes, termOrdToBytesOffset, ordinals.ordinals())
|
||||
: new BytesValues.SingleHashed(hashes, bytes, termOrdToBytesOffset, ordinals.ordinals());
|
||||
return new BytesValues.HashedBytesValues(hashes, bytes, termOrdToBytesOffset, ordinals.ordinals());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -123,14 +121,12 @@ public class PagedBytesAtomicFieldData implements AtomicFieldData.WithOrdinals<S
|
|||
return new ScriptDocValues.Strings(getBytesValues());
|
||||
}
|
||||
|
||||
static abstract class BytesValues extends org.elasticsearch.index.fielddata.BytesValues.WithOrdinals {
|
||||
static class BytesValues extends org.elasticsearch.index.fielddata.BytesValues.WithOrdinals {
|
||||
|
||||
protected final PagedBytes.Reader bytes;
|
||||
protected final MonotonicAppendingLongBuffer termOrdToBytesOffset;
|
||||
protected final Ordinals.Docs ordinals;
|
||||
|
||||
protected final BytesRef scratch = new BytesRef();
|
||||
|
||||
BytesValues(PagedBytes.Reader bytes, MonotonicAppendingLongBuffer termOrdToBytesOffset, Ordinals.Docs ordinals) {
|
||||
super(ordinals);
|
||||
this.bytes = bytes;
|
||||
|
@ -139,116 +135,60 @@ public class PagedBytesAtomicFieldData implements AtomicFieldData.WithOrdinals<S
|
|||
}
|
||||
|
||||
@Override
|
||||
public BytesRef makeSafe(BytesRef bytes) {
|
||||
public BytesRef copyShared() {
|
||||
// when we fill from the pages bytes, we just reference an existing buffer slice, its enough
|
||||
// to create a shallow copy of the bytes to be safe for "reads".
|
||||
return new BytesRef(bytes.bytes, bytes.offset, bytes.length);
|
||||
return new BytesRef(scratch.bytes, scratch.offset, scratch.length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Ordinals.Docs ordinals() {
|
||||
public final Ordinals.Docs ordinals() {
|
||||
return this.ordinals;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesRef getValueScratchByOrd(long ord, BytesRef ret) {
|
||||
bytes.fill(ret, termOrdToBytesOffset.get(ord));
|
||||
return ret;
|
||||
public final BytesRef getValueByOrd(long ord) {
|
||||
assert ord != Ordinals.MISSING_ORDINAL;
|
||||
bytes.fill(scratch, termOrdToBytesOffset.get(ord));
|
||||
return scratch;
|
||||
}
|
||||
|
||||
|
||||
static class Single extends BytesValues {
|
||||
|
||||
private final Iter.Single iter;
|
||||
|
||||
Single(PagedBytes.Reader bytes, MonotonicAppendingLongBuffer termOrdToBytesOffset, Ordinals.Docs ordinals) {
|
||||
super(bytes, termOrdToBytesOffset, ordinals);
|
||||
assert !ordinals.isMultiValued();
|
||||
iter = newSingleIter();
|
||||
@Override
|
||||
public final BytesRef getValue(int docId) {
|
||||
final long ord = ordinals.getOrd(docId);
|
||||
if (ord == Ordinals.MISSING_ORDINAL) {
|
||||
scratch.length = 0;
|
||||
return scratch;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iter getIter(int docId) {
|
||||
long ord = ordinals.getOrd(docId);
|
||||
if (ord == 0) return Iter.Empty.INSTANCE;
|
||||
bytes.fill(scratch, termOrdToBytesOffset.get(ord));
|
||||
return iter.reset(scratch, ord);
|
||||
}
|
||||
|
||||
bytes.fill(scratch, termOrdToBytesOffset.get(ord));
|
||||
return scratch;
|
||||
}
|
||||
|
||||
static final class SingleHashed extends Single {
|
||||
@Override
|
||||
public final BytesRef nextValue() {
|
||||
bytes.fill(scratch, termOrdToBytesOffset.get(ordinals.nextOrd()));
|
||||
return scratch;
|
||||
}
|
||||
|
||||
static final class HashedBytesValues extends BytesValues {
|
||||
private final BigIntArray hashes;
|
||||
|
||||
SingleHashed(BigIntArray hashes, Reader bytes, MonotonicAppendingLongBuffer termOrdToBytesOffset, Docs ordinals) {
|
||||
|
||||
HashedBytesValues(BigIntArray hashes, Reader bytes, MonotonicAppendingLongBuffer termOrdToBytesOffset, Docs ordinals) {
|
||||
super(bytes, termOrdToBytesOffset, ordinals);
|
||||
this.hashes = hashes;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Iter.Single newSingleIter() {
|
||||
return new Iter.Single() {
|
||||
public int hash() {
|
||||
return hashes.get(ord);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getValueHashed(int docId, BytesRef ret) {
|
||||
final long ord = ordinals.getOrd(docId);
|
||||
getValueScratchByOrd(ord, ret);
|
||||
return hashes.get(ord);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
static class Multi extends BytesValues {
|
||||
|
||||
private final Iter.Multi iter;
|
||||
|
||||
Multi(PagedBytes.Reader bytes, MonotonicAppendingLongBuffer termOrdToBytesOffset, Ordinals.Docs ordinals) {
|
||||
super(bytes, termOrdToBytesOffset, ordinals);
|
||||
assert ordinals.isMultiValued();
|
||||
this.iter = newMultiIter();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iter getIter(int docId) {
|
||||
return iter.reset(ordinals.getIter(docId));
|
||||
public int currentValueHash() {
|
||||
assert ordinals.currentOrd() >= 0;
|
||||
return hashes.get(ordinals.currentOrd());
|
||||
}
|
||||
}
|
||||
|
||||
static final class MultiHashed extends Multi {
|
||||
|
||||
private final BigIntArray hashes;
|
||||
|
||||
MultiHashed(BigIntArray hashes, Reader bytes, MonotonicAppendingLongBuffer termOrdToBytesOffset, Docs ordinals) {
|
||||
super(bytes, termOrdToBytesOffset, ordinals);
|
||||
this.hashes = hashes;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Iter.Multi newMultiIter() {
|
||||
return new Iter.Multi(this) {
|
||||
public int hash() {
|
||||
return hashes.get(ord);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getValueHashed(int docId, BytesRef ret) {
|
||||
long ord = ordinals.getOrd(docId);
|
||||
getValueScratchByOrd(ord, ret);
|
||||
return hashes.get(ord);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
static class Empty extends PagedBytesAtomicFieldData {
|
||||
private final static class Empty extends PagedBytesAtomicFieldData {
|
||||
|
||||
Empty(int numDocs) {
|
||||
super(emptyBytes(), 0, new MonotonicAppendingLongBuffer(), new EmptyOrdinals(numDocs));
|
||||
|
@ -282,7 +222,7 @@ public class PagedBytesAtomicFieldData implements AtomicFieldData.WithOrdinals<S
|
|||
|
||||
@Override
|
||||
public BytesValues.WithOrdinals getBytesValues() {
|
||||
return new BytesValues.WithOrdinals.Empty(ordinals.ordinals());
|
||||
return new EmptyByteValuesWithOrdinals(ordinals.ordinals());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -113,67 +113,46 @@ abstract class SortedSetDVAtomicFieldData {
|
|||
}
|
||||
}
|
||||
|
||||
static abstract class AbstractSortedSetValues extends BytesValues.WithOrdinals {
|
||||
static class SortedSetValues extends BytesValues.WithOrdinals {
|
||||
|
||||
protected final SortedSetDocValues values;
|
||||
protected BytesValues.Iter.Multi iter;
|
||||
|
||||
AbstractSortedSetValues(AtomicReader reader, String field, SortedSetDocValues values) {
|
||||
SortedSetValues(AtomicReader reader, String field, SortedSetDocValues values) {
|
||||
super(new SortedSetDocs(new SortedSetOrdinals(reader, field, values.getValueCount()), values));
|
||||
this.values = values;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesRef getValueScratchByOrd(long ord, BytesRef ret) {
|
||||
if (ord == 0) {
|
||||
ret.length = 0;
|
||||
return ret;
|
||||
}
|
||||
values.lookupOrd(ord - 1, ret);
|
||||
return ret;
|
||||
public BytesRef getValueByOrd(long ord) {
|
||||
assert ord != Ordinals.MISSING_ORDINAL;
|
||||
values.lookupOrd(ord - 1, scratch);
|
||||
return scratch;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iter getIter(int docId) {
|
||||
return iter.reset(ordinals.getIter(docId));
|
||||
public BytesRef nextValue() {
|
||||
values.lookupOrd(ordinals.nextOrd()-1, scratch);
|
||||
return scratch;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class SortedSetValues extends AbstractSortedSetValues {
|
||||
|
||||
SortedSetValues(AtomicReader reader, String field, SortedSetDocValues values) {
|
||||
super(reader, field, values);
|
||||
this.iter = new Iter.Multi(this);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class SortedSetHashedValues extends AbstractSortedSetValues {
|
||||
static final class SortedSetHashedValues extends SortedSetValues {
|
||||
|
||||
private final IntArray hashes;
|
||||
|
||||
SortedSetHashedValues(AtomicReader reader, String field, SortedSetDocValues values, IntArray hashes) {
|
||||
super(reader, field, values);
|
||||
this.hashes = hashes;
|
||||
this.iter = new Iter.Multi(this) {
|
||||
@Override
|
||||
public int hash() {
|
||||
return SortedSetHashedValues.this.hashes.get(ord);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getValueHashed(int docId, BytesRef spare) {
|
||||
long ord = ordinals.getOrd(docId);
|
||||
getValueScratchByOrd(ord, spare);
|
||||
return hashes.get(ord);
|
||||
public int currentValueHash() {
|
||||
assert ordinals.currentOrd() >= 0;
|
||||
return hashes.get(ordinals.currentOrd());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class SortedSetOrdinals implements Ordinals {
|
||||
static final class SortedSetOrdinals implements Ordinals {
|
||||
|
||||
// We don't store SortedSetDocValues as a member because Ordinals must be thread-safe
|
||||
private final AtomicReader reader;
|
||||
|
@ -187,16 +166,6 @@ abstract class SortedSetDVAtomicFieldData {
|
|||
this.numOrds = numOrds;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasSingleArrayBackingStorage() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getBackingStorage() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getMemorySizeInBytes() {
|
||||
// Ordinals can't be distinguished from the atomic field data instance
|
||||
|
@ -237,7 +206,8 @@ abstract class SortedSetDVAtomicFieldData {
|
|||
private final SortedSetOrdinals ordinals;
|
||||
private final SortedSetDocValues values;
|
||||
private final LongsRef longScratch;
|
||||
private final LongsIter iter = new LongsIter();
|
||||
private int ordIndex = Integer.MAX_VALUE;
|
||||
private long currentOrdinal = -1;
|
||||
|
||||
SortedSetDocs(SortedSetOrdinals ordinals, SortedSetDocValues values) {
|
||||
this.ordinals = ordinals;
|
||||
|
@ -273,7 +243,7 @@ abstract class SortedSetDVAtomicFieldData {
|
|||
@Override
|
||||
public long getOrd(int docId) {
|
||||
values.setDocument(docId);
|
||||
return 1 + values.nextOrd();
|
||||
return currentOrdinal = 1 + values.nextOrd();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -289,34 +259,23 @@ abstract class SortedSetDVAtomicFieldData {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Iter getIter(int docId) {
|
||||
// For now, we consume all ords and pass them to the iter instead of doing it in a streaming way because Lucene's
|
||||
// SORTED_SET doc values are cached per thread, you can't have a fully independent instance
|
||||
iter.reset(getOrds(docId));
|
||||
return iter;
|
||||
public long nextOrd() {
|
||||
assert ordIndex < longScratch.length;
|
||||
return currentOrdinal = longScratch.longs[ordIndex++];
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class LongsIter implements Ordinals.Docs.Iter {
|
||||
|
||||
private LongsRef ords;
|
||||
private int i;
|
||||
|
||||
@Override
|
||||
public long next() {
|
||||
if (i == ords.length) {
|
||||
return 0L;
|
||||
}
|
||||
return ords.longs[i++];
|
||||
public int setDocument(int docId) {
|
||||
// For now, we consume all ords and pass them to the iter instead of doing it in a streaming way because Lucene's
|
||||
// SORTED_SET doc values are cached per thread, you can't have a fully independent instance
|
||||
final LongsRef ords = getOrds(docId);
|
||||
ordIndex = 0;
|
||||
return ords.length;
|
||||
}
|
||||
|
||||
public void reset(LongsRef ords) {
|
||||
this.ords = ords;
|
||||
assert ords.offset == 0;
|
||||
i = 0;
|
||||
@Override
|
||||
public long currentOrd() {
|
||||
return currentOrdinal;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -24,7 +24,14 @@ import org.elasticsearch.index.fielddata.AtomicFieldData;
|
|||
import org.elasticsearch.index.fielddata.ScriptDocValues;
|
||||
import org.elasticsearch.index.fielddata.ScriptDocValues.Strings;
|
||||
|
||||
public class SortedSetDVBytesAtomicFieldData extends SortedSetDVAtomicFieldData implements AtomicFieldData.WithOrdinals<ScriptDocValues.Strings> {
|
||||
/**
|
||||
* An {@link AtomicFieldData} implementation that uses Lucene {@link org.apache.lucene.index.SortedSetDocValues}.
|
||||
*/
|
||||
public final class SortedSetDVBytesAtomicFieldData extends SortedSetDVAtomicFieldData implements AtomicFieldData.WithOrdinals<ScriptDocValues.Strings> {
|
||||
|
||||
/* NOTE: This class inherits the methods getBytesValues() and getHashedBytesValues()
|
||||
* from SortedSetDVAtomicFieldData. This can cause confusion since the are
|
||||
* part of the interface this class implements.*/
|
||||
|
||||
SortedSetDVBytesAtomicFieldData(AtomicReader reader, String field) {
|
||||
super(reader, field);
|
||||
|
@ -39,5 +46,4 @@ public class SortedSetDVBytesAtomicFieldData extends SortedSetDVAtomicFieldData
|
|||
public Strings getScriptValues() {
|
||||
return new ScriptDocValues.Strings(getBytesValues());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.apache.lucene.index.AtomicReader;
|
|||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.index.fielddata.*;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
|
||||
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
||||
|
||||
public class SortedSetDVNumericAtomicFieldData extends SortedSetDVAtomicFieldData implements AtomicNumericFieldData {
|
||||
|
||||
|
@ -53,9 +54,7 @@ public class SortedSetDVNumericAtomicFieldData extends SortedSetDVAtomicFieldDat
|
|||
return new LongValues.WithOrdinals(values.ordinals()) {
|
||||
@Override
|
||||
public long getValueByOrd(long ord) {
|
||||
if (ord == 0L) {
|
||||
return 0L;
|
||||
}
|
||||
assert ord != Ordinals.MISSING_ORDINAL;
|
||||
return numericType.toLong(values.getValueByOrd(ord));
|
||||
}
|
||||
};
|
||||
|
@ -67,9 +66,7 @@ public class SortedSetDVNumericAtomicFieldData extends SortedSetDVAtomicFieldDat
|
|||
return new DoubleValues.WithOrdinals(values.ordinals()) {
|
||||
@Override
|
||||
public double getValueByOrd(long ord) {
|
||||
if (ord == 0L) {
|
||||
return 0d;
|
||||
}
|
||||
assert ord != Ordinals.MISSING_ORDINAL;
|
||||
return numericType.toDouble(values.getValueByOrd(ord));
|
||||
}
|
||||
};
|
||||
|
@ -79,52 +76,23 @@ public class SortedSetDVNumericAtomicFieldData extends SortedSetDVAtomicFieldDat
|
|||
public BytesValues.WithOrdinals getBytesValues() {
|
||||
final BytesValues.WithOrdinals values = super.getBytesValues();
|
||||
return new BytesValues.WithOrdinals(values.ordinals()) {
|
||||
|
||||
BytesRef spare = new BytesRef(16);
|
||||
Iter inIter;
|
||||
Iter iter = new Iter() {
|
||||
|
||||
BytesRef current = null;
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return inIter.hasNext();
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesRef next() {
|
||||
return current = convert(inIter.next());
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hash() {
|
||||
return current.hashCode();
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
private BytesRef convert(BytesRef spare) {
|
||||
if (spare.length == 0) {
|
||||
return spare;
|
||||
final BytesRef spare = new BytesRef(16);
|
||||
private BytesRef convert(BytesRef input, BytesRef output) {
|
||||
if (input.length == 0) {
|
||||
return input;
|
||||
}
|
||||
if (numericType.isFloatingPoint()) {
|
||||
return new BytesRef(Double.toString(numericType.toDouble(spare)));
|
||||
output.copyChars(Double.toString(numericType.toDouble(input)));
|
||||
} else {
|
||||
return new BytesRef(Long.toString(numericType.toLong(spare)));
|
||||
output.copyChars(Long.toString(numericType.toLong(input)));
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesRef getValueScratchByOrd(long ord, BytesRef ret) {
|
||||
return convert(values.getValueScratchByOrd(ord, spare));
|
||||
public BytesRef getValueByOrd(long ord) {
|
||||
return convert(values.getValueByOrd(ord), scratch);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iter getIter(int docId) {
|
||||
inIter = values.getIter(docId);
|
||||
return iter;
|
||||
}
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -51,24 +51,24 @@ final class QueriesLoaderCollector extends Collector {
|
|||
@Override
|
||||
public void collect(int doc) throws IOException {
|
||||
// the _source is the query
|
||||
BytesRef id = idValues.getValue(doc);
|
||||
if (id == null) {
|
||||
return;
|
||||
}
|
||||
fieldsVisitor.reset();
|
||||
reader.document(doc, fieldsVisitor);
|
||||
|
||||
try {
|
||||
// id is only used for logging, if we fail we log the id in the catch statement
|
||||
final Query parseQuery = percolator.parsePercolatorDocument(null, fieldsVisitor.source());
|
||||
if (parseQuery != null) {
|
||||
queries.put(new HashedBytesRef(idValues.makeSafe(id)), parseQuery);
|
||||
} else {
|
||||
logger.warn("failed to add query [{}] - parser returned null", id);
|
||||
if (idValues.setDocument(doc) > 0) {
|
||||
BytesRef id = idValues.nextValue();
|
||||
fieldsVisitor.reset();
|
||||
reader.document(doc, fieldsVisitor);
|
||||
|
||||
try {
|
||||
// id is only used for logging, if we fail we log the id in the catch statement
|
||||
final Query parseQuery = percolator.parsePercolatorDocument(null, fieldsVisitor.source());
|
||||
if (parseQuery != null) {
|
||||
queries.put(new HashedBytesRef(idValues.copyShared()), parseQuery);
|
||||
} else {
|
||||
logger.warn("failed to add query [{}] - parser returned null", id);
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.warn("failed to add query [{}]", e, id.utf8ToString());
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.warn("failed to add query [{}]", e, id.utf8ToString());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -25,7 +25,9 @@ import org.apache.lucene.search.Filter;
|
|||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.elasticsearch.common.lucene.docset.MatchDocIdSet;
|
||||
import org.elasticsearch.index.fielddata.*;
|
||||
import org.elasticsearch.index.fielddata.DoubleValues;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.fielddata.LongValues;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -132,25 +134,7 @@ public abstract class NumericRangeFieldDataFilter<T> extends Filter {
|
|||
return null;
|
||||
|
||||
final LongValues values = indexFieldData.load(ctx).getLongValues();
|
||||
return new MatchDocIdSet(ctx.reader().maxDoc(), acceptedDocs) {
|
||||
|
||||
@Override
|
||||
public boolean isCacheable() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean matchDoc(int doc) {
|
||||
LongValues.Iter iter = values.getIter(doc);
|
||||
while (iter.hasNext()) {
|
||||
long value = iter.next();
|
||||
if (value >= inclusiveLowerPoint && value <= inclusiveUpperPoint) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
};
|
||||
return new LongRangeMatchDocIdSet(ctx.reader().maxDoc(), acceptedDocs, values, inclusiveLowerPoint, inclusiveUpperPoint);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -182,25 +166,7 @@ public abstract class NumericRangeFieldDataFilter<T> extends Filter {
|
|||
return null;
|
||||
|
||||
final LongValues values = indexFieldData.load(ctx).getLongValues();
|
||||
return new MatchDocIdSet(ctx.reader().maxDoc(), acceptedDocs) {
|
||||
|
||||
@Override
|
||||
public boolean isCacheable() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean matchDoc(int doc) {
|
||||
LongValues.Iter iter = values.getIter(doc);
|
||||
while (iter.hasNext()) {
|
||||
long value = iter.next();
|
||||
if (value >= inclusiveLowerPoint && value <= inclusiveUpperPoint) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
};
|
||||
return new LongRangeMatchDocIdSet(ctx.reader().maxDoc(), acceptedDocs, values, inclusiveLowerPoint, inclusiveUpperPoint);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -231,25 +197,7 @@ public abstract class NumericRangeFieldDataFilter<T> extends Filter {
|
|||
return null;
|
||||
|
||||
final LongValues values = indexFieldData.load(ctx).getLongValues();
|
||||
return new MatchDocIdSet(ctx.reader().maxDoc(), acceptedDocs) {
|
||||
|
||||
@Override
|
||||
public boolean isCacheable() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean matchDoc(int doc) {
|
||||
LongValues.Iter iter = values.getIter(doc);
|
||||
while (iter.hasNext()) {
|
||||
long value = iter.next();
|
||||
if (value >= inclusiveLowerPoint && value <= inclusiveUpperPoint) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
};
|
||||
return new LongRangeMatchDocIdSet(ctx.reader().maxDoc(), acceptedDocs, values, inclusiveLowerPoint, inclusiveUpperPoint);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -280,25 +228,8 @@ public abstract class NumericRangeFieldDataFilter<T> extends Filter {
|
|||
return null;
|
||||
|
||||
final LongValues values = indexFieldData.load(ctx).getLongValues();
|
||||
return new MatchDocIdSet(ctx.reader().maxDoc(), acceptedDocs) {
|
||||
return new LongRangeMatchDocIdSet(ctx.reader().maxDoc(), acceptedDocs, values, inclusiveLowerPoint, inclusiveUpperPoint);
|
||||
|
||||
@Override
|
||||
public boolean isCacheable() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean matchDoc(int doc) {
|
||||
LongValues.Iter iter = values.getIter(doc);
|
||||
while (iter.hasNext()) {
|
||||
long value = iter.next();
|
||||
if (value >= inclusiveLowerPoint && value <= inclusiveUpperPoint) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -333,25 +264,7 @@ public abstract class NumericRangeFieldDataFilter<T> extends Filter {
|
|||
return null;
|
||||
|
||||
final DoubleValues values = indexFieldData.load(ctx).getDoubleValues();
|
||||
return new MatchDocIdSet(ctx.reader().maxDoc(), acceptedDocs) {
|
||||
|
||||
@Override
|
||||
public boolean isCacheable() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean matchDoc(int doc) {
|
||||
DoubleValues.Iter iter = values.getIter(doc);
|
||||
while (iter.hasNext()) {
|
||||
double value = iter.next();
|
||||
if (value >= inclusiveLowerPoint && value <= inclusiveUpperPoint) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
};
|
||||
return new DoubleRangeMatchDocIdSet(ctx.reader().maxDoc(), acceptedDocs, values, inclusiveLowerPoint, inclusiveUpperPoint);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -386,26 +299,71 @@ public abstract class NumericRangeFieldDataFilter<T> extends Filter {
|
|||
return null;
|
||||
|
||||
final DoubleValues values = indexFieldData.load(ctx).getDoubleValues();
|
||||
return new MatchDocIdSet(ctx.reader().maxDoc(), acceptedDocs) {
|
||||
|
||||
@Override
|
||||
public boolean isCacheable() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean matchDoc(int doc) {
|
||||
DoubleValues.Iter iter = values.getIter(doc);
|
||||
while (iter.hasNext()) {
|
||||
double value = iter.next();
|
||||
if (value >= inclusiveLowerPoint && value <= inclusiveUpperPoint) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
};
|
||||
return new DoubleRangeMatchDocIdSet(ctx.reader().maxDoc(), acceptedDocs, values, inclusiveLowerPoint, inclusiveUpperPoint);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static final class DoubleRangeMatchDocIdSet extends MatchDocIdSet {
|
||||
private final DoubleValues values;
|
||||
private final double inclusiveLowerPoint;
|
||||
private final double inclusiveUpperPoint;
|
||||
|
||||
protected DoubleRangeMatchDocIdSet(int maxDoc, Bits acceptDocs, final DoubleValues values,final double inclusiveLowerPoint, final double inclusiveUpperPoint ) {
|
||||
super(maxDoc, acceptDocs);
|
||||
this.inclusiveLowerPoint = inclusiveLowerPoint;
|
||||
this.inclusiveUpperPoint = inclusiveUpperPoint;
|
||||
this.values = values;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isCacheable() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean matchDoc(int doc) {
|
||||
int numValues = values.setDocument(doc);
|
||||
for (int i = 0; i < numValues; i++) {
|
||||
double value = values.nextValue();
|
||||
if (value >= inclusiveLowerPoint && value <= inclusiveUpperPoint) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static final class LongRangeMatchDocIdSet extends MatchDocIdSet {
|
||||
private final LongValues values;
|
||||
private final long inclusiveLowerPoint;
|
||||
private final long inclusiveUpperPoint;
|
||||
|
||||
protected LongRangeMatchDocIdSet(int maxDoc, Bits acceptDocs, final LongValues values,final long inclusiveLowerPoint, final long inclusiveUpperPoint ) {
|
||||
super(maxDoc, acceptDocs);
|
||||
this.inclusiveLowerPoint = inclusiveLowerPoint;
|
||||
this.inclusiveUpperPoint = inclusiveUpperPoint;
|
||||
this.values = values;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isCacheable() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean matchDoc(int doc) {
|
||||
int numValues = values.setDocument(doc);
|
||||
for (int i = 0; i < numValues; i++) {
|
||||
long value = values.nextValue();
|
||||
if (value >= inclusiveLowerPoint && value <= inclusiveUpperPoint) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -177,27 +177,15 @@ public class GeoDistanceFilter extends Filter {
|
|||
|
||||
@Override
|
||||
protected boolean matchDoc(int doc) {
|
||||
if (!values.hasValue(doc)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (values.isMultiValued()) {
|
||||
GeoPointValues.Iter iter = values.getIter(doc);
|
||||
while (iter.hasNext()) {
|
||||
GeoPoint point = iter.next();
|
||||
if (distanceBoundingCheck.isWithin(point.lat(), point.lon())) {
|
||||
double d = fixedSourceDistance.calculate(point.lat(), point.lon());
|
||||
if (d < distance) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
} else {
|
||||
GeoPoint point = values.getValue(doc);
|
||||
final int length = values.setDocument(doc);
|
||||
for (int i = 0; i < length; i++) {
|
||||
GeoPoint point = values.nextValue();
|
||||
if (distanceBoundingCheck.isWithin(point.lat(), point.lon())) {
|
||||
double d = fixedSourceDistance.calculate(point.lat(), point.lon());
|
||||
return d < distance;
|
||||
if (d < distance) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
|
|
|
@ -195,32 +195,17 @@ public class GeoDistanceRangeFilter extends Filter {
|
|||
|
||||
@Override
|
||||
protected boolean matchDoc(int doc) {
|
||||
if (!values.hasValue(doc)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (values.isMultiValued()) {
|
||||
GeoPointValues.Iter iter = values.getIter(doc);
|
||||
while (iter.hasNext()) {
|
||||
GeoPoint point = iter.next();
|
||||
if (distanceBoundingCheck.isWithin(point.lat(), point.lon())) {
|
||||
double d = fixedSourceDistance.calculate(point.lat(), point.lon());
|
||||
if (d >= inclusiveLowerPoint && d <= inclusiveUpperPoint) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
} else {
|
||||
GeoPoint point = values.getValue(doc);
|
||||
final int length = values.setDocument(doc);
|
||||
for (int i = 0; i < length; i++) {
|
||||
GeoPoint point = values.nextValue();
|
||||
if (distanceBoundingCheck.isWithin(point.lat(), point.lon())) {
|
||||
double d = fixedSourceDistance.calculate(point.lat(), point.lon());
|
||||
if (d >= inclusiveLowerPoint && d <= inclusiveUpperPoint) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -82,21 +82,12 @@ public class GeoPolygonFilter extends Filter {
|
|||
|
||||
@Override
|
||||
protected boolean matchDoc(int doc) {
|
||||
if (!values.hasValue(doc)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (values.isMultiValued()) {
|
||||
GeoPointValues.Iter iter = values.getIter(doc);
|
||||
while (iter.hasNext()) {
|
||||
GeoPoint point = iter.next();
|
||||
if (pointInPolygon(points, point.lat(), point.lon())) {
|
||||
return true;
|
||||
}
|
||||
final int length = values.setDocument(doc);
|
||||
for (int i = 0; i < length; i++) {
|
||||
GeoPoint point = values.nextValue();
|
||||
if (pointInPolygon(points, point.lat(), point.lon())) {
|
||||
return true;
|
||||
}
|
||||
} else {
|
||||
GeoPoint point = values.getValue(doc);
|
||||
return pointInPolygon(points, point.lat(), point.lon());
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
|
|
@ -95,22 +95,9 @@ public class InMemoryGeoBoundingBoxFilter extends Filter {
|
|||
|
||||
@Override
|
||||
protected boolean matchDoc(int doc) {
|
||||
if (!values.hasValue(doc)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (values.isMultiValued()) {
|
||||
GeoPointValues.Iter iter = values.getIter(doc);
|
||||
while (iter.hasNext()) {
|
||||
GeoPoint point = iter.next();
|
||||
if (((topLeft.lon() <= point.lon() || bottomRight.lon() >= point.lon())) &&
|
||||
(topLeft.lat() >= point.lat() && bottomRight.lat() <= point.lat())) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
GeoPoint point = values.getValue(doc);
|
||||
|
||||
final int length = values.setDocument(doc);
|
||||
for (int i = 0; i < length; i++) {
|
||||
GeoPoint point = values.nextValue();
|
||||
if (((topLeft.lon() <= point.lon() || bottomRight.lon() >= point.lon())) &&
|
||||
(topLeft.lat() >= point.lat() && bottomRight.lat() <= point.lat())) {
|
||||
return true;
|
||||
|
@ -139,21 +126,9 @@ public class InMemoryGeoBoundingBoxFilter extends Filter {
|
|||
|
||||
@Override
|
||||
protected boolean matchDoc(int doc) {
|
||||
if (!values.hasValue(doc)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (values.isMultiValued()) {
|
||||
GeoPointValues.Iter iter = values.getIter(doc);
|
||||
while (iter.hasNext()) {
|
||||
GeoPoint point = iter.next();
|
||||
if (topLeft.lon() <= point.lon() && bottomRight.lon() >= point.lon()
|
||||
&& topLeft.lat() >= point.lat() && bottomRight.lat() <= point.lat()) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
GeoPoint point = values.getValue(doc);
|
||||
final int length = values.setDocument(doc);
|
||||
for (int i = 0; i < length; i++) {
|
||||
GeoPoint point = values.nextValue();
|
||||
if (topLeft.lon() <= point.lon() && bottomRight.lon() >= point.lon()
|
||||
&& topLeft.lat() >= point.lat() && bottomRight.lat() <= point.lat()) {
|
||||
return true;
|
||||
|
|
|
@ -715,8 +715,12 @@ public class PercolatorService extends AbstractComponent {
|
|||
int segmentIdx = ReaderUtil.subIndex(scoreDoc.doc, percolatorSearcher.reader().leaves());
|
||||
AtomicReaderContext atomicReaderContext = percolatorSearcher.reader().leaves().get(segmentIdx);
|
||||
BytesValues values = idFieldData.load(atomicReaderContext).getBytesValues();
|
||||
spare.hash = values.getValueHashed(scoreDoc.doc - atomicReaderContext.docBase, spare.bytes);
|
||||
matches.add(values.makeSafe(spare.bytes));
|
||||
final int localDocId = scoreDoc.doc - atomicReaderContext.docBase;
|
||||
assert values.hasValue(localDocId);
|
||||
spare.bytes = values.getValue(localDocId);
|
||||
|
||||
spare.hash = values.currentValueHash();
|
||||
matches.add(values.copyShared());
|
||||
if (hls != null) {
|
||||
Query query = context.percolateQueries().get(spare);
|
||||
context.parsedQuery(new ParsedQuery(query, ImmutableMap.<String, Filter>of()));
|
||||
|
|
|
@ -152,7 +152,7 @@ abstract class QueryCollector extends Collector {
|
|||
|
||||
@Override
|
||||
public void collect(int doc) throws IOException {
|
||||
spare.hash = values.getValueHashed(doc, spare.bytes);
|
||||
spare.reset(values.getValue(doc), values.currentValueHash());
|
||||
Query query = queries.get(spare);
|
||||
if (query == null) {
|
||||
// log???
|
||||
|
@ -169,7 +169,7 @@ abstract class QueryCollector extends Collector {
|
|||
searcher.search(query, collector);
|
||||
if (collector.exists()) {
|
||||
if (!limit || counter < size) {
|
||||
matches.add(values.makeSafe(spare.bytes));
|
||||
matches.add(values.copyShared());
|
||||
if (context.highlight() != null) {
|
||||
highlightPhase.hitExecute(context, context.hitContext());
|
||||
hls.add(context.hitContext().hit().getHighlightFields());
|
||||
|
@ -210,7 +210,7 @@ abstract class QueryCollector extends Collector {
|
|||
|
||||
@Override
|
||||
public void collect(int doc) throws IOException {
|
||||
spare.hash = values.getValueHashed(doc, spare.bytes);
|
||||
spare.reset(values.getValue(doc), values.currentValueHash());
|
||||
Query query = queries.get(spare);
|
||||
if (query == null) {
|
||||
// log???
|
||||
|
@ -273,7 +273,7 @@ abstract class QueryCollector extends Collector {
|
|||
|
||||
@Override
|
||||
public void collect(int doc) throws IOException {
|
||||
spare.hash = values.getValueHashed(doc, spare.bytes);
|
||||
spare.reset(values.getValue(doc), values.currentValueHash());
|
||||
Query query = queries.get(spare);
|
||||
if (query == null) {
|
||||
// log???
|
||||
|
@ -289,7 +289,7 @@ abstract class QueryCollector extends Collector {
|
|||
searcher.search(query, collector);
|
||||
if (collector.exists()) {
|
||||
if (!limit || counter < size) {
|
||||
matches.add(values.makeSafe(spare.bytes));
|
||||
matches.add(values.copyShared());
|
||||
scores.add(scorer.score());
|
||||
if (context.highlight() != null) {
|
||||
highlightPhase.hitExecute(context, context.hitContext());
|
||||
|
@ -338,7 +338,7 @@ abstract class QueryCollector extends Collector {
|
|||
|
||||
@Override
|
||||
public void collect(int doc) throws IOException {
|
||||
spare.hash = values.getValueHashed(doc, spare.bytes);
|
||||
spare.reset(values.getValue(doc), values.currentValueHash());
|
||||
Query query = queries.get(spare);
|
||||
if (query == null) {
|
||||
// log???
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
package org.elasticsearch.search.facet;
|
||||
|
||||
import org.elasticsearch.index.fielddata.DoubleValues;
|
||||
import org.elasticsearch.index.fielddata.DoubleValues.Iter;
|
||||
|
||||
/**
|
||||
* Simple Facet aggregator base class for {@link DoubleValues}
|
||||
|
@ -29,15 +28,14 @@ public abstract class DoubleFacetAggregatorBase {
|
|||
private int missing;
|
||||
|
||||
public void onDoc(int docId, DoubleValues values) {
|
||||
if (values.hasValue(docId)) {
|
||||
final Iter iter = values.getIter(docId);
|
||||
while(iter.hasNext()) {
|
||||
onValue(docId, iter.next());
|
||||
total++;
|
||||
}
|
||||
} else {
|
||||
missing++;
|
||||
int numValues = values.setDocument(docId);
|
||||
int tempMissing = 1;
|
||||
for (int i = 0; i < numValues; i++) {
|
||||
tempMissing = 0;
|
||||
onValue(docId, values.nextValue());
|
||||
total++;
|
||||
}
|
||||
missing += tempMissing;
|
||||
}
|
||||
|
||||
protected abstract void onValue(int docId, double next);
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
package org.elasticsearch.search.facet;
|
||||
|
||||
import org.elasticsearch.index.fielddata.LongValues;
|
||||
import org.elasticsearch.index.fielddata.LongValues.Iter;
|
||||
|
||||
/**
|
||||
* Simple Facet aggregator base class for {@link LongValues}
|
||||
|
@ -29,15 +28,14 @@ public abstract class LongFacetAggregatorBase {
|
|||
private int missing;
|
||||
|
||||
public void onDoc(int docId, LongValues values) {
|
||||
if (values.hasValue(docId)) {
|
||||
final Iter iter = values.getIter(docId);
|
||||
while(iter.hasNext()) {
|
||||
onValue(docId, iter.next());
|
||||
total++;
|
||||
}
|
||||
} else {
|
||||
missing++;
|
||||
final int numValues = values.setDocument(docId);
|
||||
int tempMissing = 1;
|
||||
for (int i = 0; i < numValues; i++) {
|
||||
tempMissing = 0;
|
||||
onValue(docId, values.nextValue());
|
||||
total++;
|
||||
}
|
||||
missing += tempMissing;
|
||||
}
|
||||
|
||||
protected abstract void onValue(int docId, long next);
|
||||
|
|
|
@ -26,7 +26,6 @@ import org.elasticsearch.common.geo.GeoDistance;
|
|||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.index.fielddata.GeoPointValues;
|
||||
import org.elasticsearch.index.fielddata.GeoPointValues.Iter;
|
||||
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||
import org.elasticsearch.search.facet.FacetExecutor;
|
||||
import org.elasticsearch.search.facet.InternalFacet;
|
||||
|
@ -106,9 +105,9 @@ public class GeoDistanceFacetExecutor extends FacetExecutor {
|
|||
}
|
||||
|
||||
public void onDoc(int docId, GeoPointValues values) {
|
||||
final Iter iter = values.getIter(docId);
|
||||
while(iter.hasNext()) {
|
||||
final GeoPoint next = iter.next();
|
||||
final int length = values.setDocument(docId);
|
||||
for (int i = 0; i < length; i++) {
|
||||
final GeoPoint next = values.nextValue();
|
||||
double distance = fixedSourceDistance.calculate(next.getLat(), next.getLon());
|
||||
for (GeoDistanceFacet.Entry entry : entries) {
|
||||
if (entry.foundInDoc) {
|
||||
|
|
|
@ -19,8 +19,6 @@
|
|||
|
||||
package org.elasticsearch.search.facet.geodistance;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.index.AtomicReaderContext;
|
||||
import org.elasticsearch.common.geo.GeoDistance;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
|
@ -29,6 +27,8 @@ import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
|||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
|
@ -73,9 +73,9 @@ public class ValueGeoDistanceFacetExecutor extends GeoDistanceFacetExecutor {
|
|||
protected void collectGeoPoint(GeoDistanceFacet.Entry entry, int docId, double distance) {
|
||||
entry.foundInDoc = true;
|
||||
entry.count++;
|
||||
DoubleValues.Iter iter = valueValues.getIter(docId);
|
||||
while(iter.hasNext()) {
|
||||
double value = iter.next();
|
||||
int seek = valueValues.setDocument(docId);
|
||||
for (int i = 0; i < seek; i++) {
|
||||
double value = valueValues.nextValue();
|
||||
entry.totalCount++;
|
||||
entry.total += value;
|
||||
if (value < entry.min) {
|
||||
|
|
|
@ -102,29 +102,14 @@ public class KeyValueRangeFacetExecutor extends FacetExecutor {
|
|||
if (value >= entry.getFrom() && value < entry.getTo()) {
|
||||
entry.foundInDoc = true;
|
||||
entry.count++;
|
||||
if (valueValues.isMultiValued()) {
|
||||
for (DoubleValues.Iter iter = valueValues.getIter(docId); iter.hasNext(); ) {
|
||||
double valueValue = iter.next();
|
||||
entry.total += valueValue;
|
||||
if (valueValue < entry.min) {
|
||||
entry.min = valueValue;
|
||||
}
|
||||
if (valueValue > entry.max) {
|
||||
entry.max = valueValue;
|
||||
}
|
||||
entry.totalCount++;
|
||||
}
|
||||
} else if (valueValues.hasValue(docId)) {
|
||||
double valueValue = valueValues.getValue(docId);
|
||||
entry.totalCount++;
|
||||
int seek = valueValues.setDocument(docId);
|
||||
for (int i = 0; i < seek; i++) {
|
||||
double valueValue = valueValues.nextValue();
|
||||
entry.total += valueValue;
|
||||
if (valueValue < entry.min) {
|
||||
entry.min = valueValue;
|
||||
}
|
||||
if (valueValue > entry.max) {
|
||||
entry.max = valueValue;
|
||||
}
|
||||
entry.min = Math.min(entry.min, valueValue);
|
||||
entry.max = Math.max(entry.max, valueValue);
|
||||
}
|
||||
entry.totalCount+=seek;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -78,7 +78,7 @@ public class TermsDoubleFacetExecutor extends FacetExecutor {
|
|||
if (values instanceof DoubleValues.WithOrdinals) {
|
||||
DoubleValues.WithOrdinals valuesWithOrds = (DoubleValues.WithOrdinals) values;
|
||||
Ordinals.Docs ordinals = valuesWithOrds.ordinals();
|
||||
for (int ord = 1; ord < ordinals.getMaxOrd(); ord++) {
|
||||
for (long ord = Ordinals.MIN_ORDINAL; ord < ordinals.getMaxOrd(); ord++) {
|
||||
facets.v().putIfAbsent(valuesWithOrds.getValueByOrd(ord), 0);
|
||||
}
|
||||
} else {
|
||||
|
@ -88,10 +88,10 @@ public class TermsDoubleFacetExecutor extends FacetExecutor {
|
|||
if (!values.hasValue(docId)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
DoubleValues.Iter iter = values.getIter(docId);
|
||||
while (iter.hasNext()) {
|
||||
facets.v().putIfAbsent(iter.next(), 0);
|
||||
int numValues = values.setDocument(docId);
|
||||
DoubleIntOpenHashMap map = facets.v();
|
||||
for (int i = 0; i < numValues; i++) {
|
||||
map.putIfAbsent(values.nextValue(), 0);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
|
|
@ -77,20 +77,17 @@ public class TermsLongFacetExecutor extends FacetExecutor {
|
|||
if (values instanceof LongValues.WithOrdinals) {
|
||||
LongValues.WithOrdinals valuesWithOrds = (LongValues.WithOrdinals) values;
|
||||
Ordinals.Docs ordinals = valuesWithOrds.ordinals();
|
||||
for (int ord = 1; ord < ordinals.getMaxOrd(); ord++) {
|
||||
for (long ord = Ordinals.MIN_ORDINAL; ord < ordinals.getMaxOrd(); ord++) {
|
||||
facets.v().putIfAbsent(valuesWithOrds.getValueByOrd(ord), 0);
|
||||
}
|
||||
} else {
|
||||
// Shouldn't be true, otherwise it is WithOrdinals... just to be sure...
|
||||
if (values.isMultiValued()) {
|
||||
for (int docId = 0; docId < maxDoc; docId++) {
|
||||
if (!values.hasValue(docId)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
LongValues.Iter iter = values.getIter(docId);
|
||||
while (iter.hasNext()) {
|
||||
facets.v().putIfAbsent(iter.next(), 0);
|
||||
final int numValues = values.setDocument(docId);
|
||||
final LongIntOpenHashMap v = facets.v();
|
||||
for (int i = 0; i < numValues; i++) {
|
||||
v.putIfAbsent(values.nextValue(), 0);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
|
|
@ -26,7 +26,6 @@ import org.apache.lucene.util.BytesRefHash;
|
|||
import org.elasticsearch.common.collect.BoundedTreeSet;
|
||||
import org.elasticsearch.common.lucene.HashedBytesRef;
|
||||
import org.elasticsearch.index.fielddata.BytesValues;
|
||||
import org.elasticsearch.index.fielddata.BytesValues.Iter;
|
||||
import org.elasticsearch.search.facet.InternalFacet;
|
||||
import org.elasticsearch.search.facet.terms.TermsFacet;
|
||||
import org.elasticsearch.search.facet.terms.support.EntryPriorityQueue;
|
||||
|
@ -44,15 +43,15 @@ public class HashedAggregator {
|
|||
}
|
||||
|
||||
public void onDoc(int docId, BytesValues values) {
|
||||
if (values.hasValue(docId)) {
|
||||
final Iter iter = values.getIter(docId);
|
||||
while (iter.hasNext()) {
|
||||
onValue(docId, iter.next(), iter.hash(), values);
|
||||
total++;
|
||||
}
|
||||
} else {
|
||||
missing++;
|
||||
final int length = values.setDocument(docId);
|
||||
int pendingMissing = 1;
|
||||
total += length;
|
||||
for (int i = 0; i < length; i++) {
|
||||
final BytesRef value = values.nextValue();
|
||||
onValue(docId, value, values.currentValueHash(), values);
|
||||
pendingMissing = 0;
|
||||
}
|
||||
missing += pendingMissing;
|
||||
}
|
||||
|
||||
public void addValue(BytesRef value, int hashCode, BytesValues values) {
|
||||
|
@ -232,9 +231,7 @@ public class HashedAggregator {
|
|||
}
|
||||
|
||||
private static final class AssertingHashCount implements HashCount { // simple
|
||||
// implemenation
|
||||
// for
|
||||
// assertions
|
||||
// implementation for assertions
|
||||
private final ObjectIntOpenHashMap<HashedBytesRef> valuesAndCount = new ObjectIntOpenHashMap<HashedBytesRef>();
|
||||
private HashedBytesRef spare = new HashedBytesRef();
|
||||
|
||||
|
@ -244,7 +241,7 @@ public class HashedAggregator {
|
|||
assert adjustedValue >= 1;
|
||||
if (adjustedValue == 1) { // only if we added the spare we create a
|
||||
// new instance
|
||||
spare.bytes = values.makeSafe(spare.bytes);
|
||||
spare.bytes = BytesRef.deepCopyOf(value);
|
||||
spare = new HashedBytesRef();
|
||||
return true;
|
||||
}
|
||||
|
@ -268,9 +265,8 @@ public class HashedAggregator {
|
|||
@Override
|
||||
public boolean addNoCount(BytesRef value, int hashCode, BytesValues values) {
|
||||
if (!valuesAndCount.containsKey(spare.reset(value, hashCode))) {
|
||||
valuesAndCount.addTo(spare.reset(value, hashCode), 0);
|
||||
spare.bytes = values.makeSafe(spare.bytes);
|
||||
spare = new HashedBytesRef();
|
||||
valuesAndCount.addTo(spare.reset(BytesRef.deepCopyOf(value), hashCode), 0);
|
||||
spare = new HashedBytesRef(); // reset the reference since we just added to the hash
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
|
|
|
@ -76,7 +76,7 @@ public final class HashedScriptAggregator extends HashedAggregator {
|
|||
script.setNextDocId(docId);
|
||||
// LUCENE 4 UPGRADE: needs optimization -- maybe a CharSequence
|
||||
// does the job here?
|
||||
// we only creat that string if we really need
|
||||
// we only create that string if we really need
|
||||
script.setNextVar("term", spare.toString());
|
||||
Object scriptValue = script.run();
|
||||
if (scriptValue == null) {
|
||||
|
@ -87,8 +87,6 @@ public final class HashedScriptAggregator extends HashedAggregator {
|
|||
return;
|
||||
}
|
||||
} else {
|
||||
// LUCENE 4 UPGRADE: should be possible to convert directly
|
||||
// to BR
|
||||
scriptSpare.copyChars(scriptValue.toString());
|
||||
hashCode = scriptSpare.hashCode();
|
||||
super.onValue(docId, scriptSpare, hashCode, values);
|
||||
|
|
|
@ -126,39 +126,24 @@ public class TermsStringFacetExecutor extends FacetExecutor {
|
|||
}
|
||||
|
||||
static void loadAllTerms(SearchContext context, IndexFieldData indexFieldData, HashedAggregator aggregator) {
|
||||
|
||||
for (AtomicReaderContext readerContext : context.searcher().getTopReaderContext().leaves()) {
|
||||
int maxDoc = readerContext.reader().maxDoc();
|
||||
if (indexFieldData instanceof IndexFieldData.WithOrdinals) {
|
||||
BytesValues.WithOrdinals values = ((IndexFieldData.WithOrdinals) indexFieldData).load(readerContext).getBytesValues();
|
||||
Ordinals.Docs ordinals = values.ordinals();
|
||||
// 0 = docs with no value for field, so start from 1 instead
|
||||
for (int ord = 1; ord < ordinals.getMaxOrd(); ord++) {
|
||||
for (long ord = Ordinals.MIN_ORDINAL; ord < ordinals.getMaxOrd(); ord++) {
|
||||
BytesRef value = values.getValueByOrd(ord);
|
||||
aggregator.addValue(value, value.hashCode(), values);
|
||||
}
|
||||
} else {
|
||||
BytesValues values = indexFieldData.load(readerContext).getBytesValues();
|
||||
// Shouldn't be true, otherwise it is WithOrdinals... just to be sure...
|
||||
if (values.isMultiValued()) {
|
||||
for (int docId = 0; docId < maxDoc; docId++) {
|
||||
if (!values.hasValue(docId)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
BytesValues.Iter iter = values.getIter(docId);
|
||||
while (iter.hasNext()) {
|
||||
aggregator.addValue(iter.next(), iter.hash(), values);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
BytesRef spare = new BytesRef();
|
||||
for (int docId = 0; docId < maxDoc; docId++) {
|
||||
if (!values.hasValue(docId)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
int hash = values.getValueHashed(docId, spare);
|
||||
aggregator.addValue(spare, hash, values);
|
||||
for (int docId = 0; docId < maxDoc; docId++) {
|
||||
final int size = values.setDocument(docId);
|
||||
for (int i = 0; i < size; i++) {
|
||||
final BytesRef value = values.nextValue();
|
||||
aggregator.addValue(value, values.currentValueHash(), values);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -32,7 +32,6 @@ import org.elasticsearch.common.util.IntArrays;
|
|||
import org.elasticsearch.index.fielddata.BytesValues;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
||||
import org.elasticsearch.index.fielddata.ordinals.Ordinals.Docs.Iter;
|
||||
import org.elasticsearch.search.facet.FacetExecutor;
|
||||
import org.elasticsearch.search.facet.InternalFacet;
|
||||
import org.elasticsearch.search.facet.terms.TermsFacet;
|
||||
|
@ -115,7 +114,7 @@ public class TermsStringOrdinalsFacetExecutor extends FacetExecutor {
|
|||
|
||||
while (queue.size() > 0) {
|
||||
ReaderAggregator agg = queue.top();
|
||||
BytesRef value = agg.values.makeSafe(agg.current); // we need to makeSafe it, since we end up pushing it... (can we get around this?)
|
||||
BytesRef value = agg.copyCurrent(); // we need to makeSafe it, since we end up pushing it... (can we get around this?)
|
||||
int count = 0;
|
||||
do {
|
||||
count += agg.counts.get(agg.position);
|
||||
|
@ -155,7 +154,7 @@ public class TermsStringOrdinalsFacetExecutor extends FacetExecutor {
|
|||
|
||||
while (queue.size() > 0) {
|
||||
ReaderAggregator agg = queue.top();
|
||||
BytesRef value = agg.values.makeSafe(agg.current); // we need to makeSafe it, since we end up pushing it... (can we work around that?)
|
||||
BytesRef value = agg.copyCurrent(); // we need to makeSafe it, since we end up pushing it... (can we work around that?)
|
||||
int count = 0;
|
||||
do {
|
||||
count += agg.counts.get(agg.position);
|
||||
|
@ -211,12 +210,13 @@ public class TermsStringOrdinalsFacetExecutor extends FacetExecutor {
|
|||
|
||||
@Override
|
||||
public void collect(int doc) throws IOException {
|
||||
Iter iter = ordinals.getIter(doc);
|
||||
long ord = iter.next();
|
||||
current.onOrdinal(doc, ord);
|
||||
while ((ord = iter.next()) != 0) {
|
||||
current.onOrdinal(doc, ord);
|
||||
final int length = ordinals.setDocument(doc);
|
||||
int missing = 1;
|
||||
for (int i = 0; i < length; i++) {
|
||||
current.onOrdinal(doc, ordinals.nextOrd());
|
||||
missing = 0;
|
||||
}
|
||||
current.incrementMissing(missing);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -237,13 +237,14 @@ public class TermsStringOrdinalsFacetExecutor extends FacetExecutor {
|
|||
|
||||
public static final class ReaderAggregator {
|
||||
|
||||
private final long maxOrd;
|
||||
|
||||
final BytesValues.WithOrdinals values;
|
||||
final IntArray counts;
|
||||
|
||||
long position = 0;
|
||||
BytesRef current;
|
||||
int total;
|
||||
private final long maxOrd;
|
||||
|
||||
|
||||
public ReaderAggregator(BytesValues.WithOrdinals values, int ordinalsCacheLimit, CacheRecycler cacheRecycler) {
|
||||
this.values = values;
|
||||
|
@ -256,6 +257,11 @@ public class TermsStringOrdinalsFacetExecutor extends FacetExecutor {
|
|||
total++;
|
||||
}
|
||||
|
||||
final void incrementMissing(int numMissing) {
|
||||
counts.increment(0, numMissing);
|
||||
total += numMissing;
|
||||
}
|
||||
|
||||
public boolean nextPosition() {
|
||||
if (++position >= maxOrd) {
|
||||
return false;
|
||||
|
@ -263,6 +269,10 @@ public class TermsStringOrdinalsFacetExecutor extends FacetExecutor {
|
|||
current = values.getValueByOrd(position);
|
||||
return true;
|
||||
}
|
||||
|
||||
public BytesRef copyCurrent() {
|
||||
return values.copyShared();
|
||||
}
|
||||
}
|
||||
|
||||
public static class AggregatorPriorityQueue extends PriorityQueue<ReaderAggregator> {
|
||||
|
|
|
@ -170,7 +170,7 @@ public class TermsStatsStringFacetExecutor extends FacetExecutor {
|
|||
spare.reset(value, hashCode);
|
||||
InternalTermsStatsStringFacet.StringEntry stringEntry = entries.get(spare);
|
||||
if (stringEntry == null) {
|
||||
HashedBytesRef theValue = new HashedBytesRef(values.makeSafe(value), hashCode);
|
||||
HashedBytesRef theValue = new HashedBytesRef(values.copyShared(), hashCode);
|
||||
stringEntry = new InternalTermsStatsStringFacet.StringEntry(theValue, 0, 0, 0, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY);
|
||||
entries.put(theValue, stringEntry);
|
||||
}
|
||||
|
@ -210,7 +210,7 @@ public class TermsStatsStringFacetExecutor extends FacetExecutor {
|
|||
spare.reset(value, hashCode);
|
||||
InternalTermsStatsStringFacet.StringEntry stringEntry = entries.get(spare);
|
||||
if (stringEntry == null) {
|
||||
HashedBytesRef theValue = new HashedBytesRef(values.makeSafe(value), hashCode);
|
||||
HashedBytesRef theValue = new HashedBytesRef(values.copyShared(), hashCode);
|
||||
stringEntry = new InternalTermsStatsStringFacet.StringEntry(theValue, 1, 0, 0, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY);
|
||||
entries.put(theValue, stringEntry);
|
||||
} else {
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.apache.lucene.index.AtomicReaderContext;
|
|||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.lucene.HashedBytesRef;
|
||||
import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource;
|
||||
import org.elasticsearch.index.fielddata.fieldcomparator.SortMode;
|
||||
|
@ -93,19 +94,9 @@ public abstract class AbstractFieldDataImplTests extends AbstractFieldDataTests
|
|||
assertThat(bytesValues.getValue(1), equalTo(new BytesRef(one())));
|
||||
assertThat(bytesValues.getValue(2), equalTo(new BytesRef(three())));
|
||||
|
||||
BytesRef bytesRef = new BytesRef();
|
||||
assertThat(bytesValues.getValueScratch(0, bytesRef), equalTo(new BytesRef(two())));
|
||||
assertThat(bytesRef, equalTo(new BytesRef(two())));
|
||||
assertThat(bytesValues.getValueScratch(1, bytesRef), equalTo(new BytesRef(one())));
|
||||
assertThat(bytesRef, equalTo(new BytesRef(one())));
|
||||
assertThat(bytesValues.getValueScratch(2, bytesRef), equalTo(new BytesRef(three())));
|
||||
assertThat(bytesRef, equalTo(new BytesRef(three())));
|
||||
|
||||
|
||||
BytesValues.Iter bytesValuesIter = bytesValues.getIter(0);
|
||||
assertThat(bytesValuesIter.hasNext(), equalTo(true));
|
||||
assertThat(bytesValuesIter.next(), equalTo(new BytesRef(two())));
|
||||
assertThat(bytesValuesIter.hasNext(), equalTo(false));
|
||||
assertValues(bytesValues, 0, two());
|
||||
assertValues(bytesValues, 1, one());
|
||||
assertValues(bytesValues, 2, three());
|
||||
|
||||
BytesValues hashedBytesValues = fieldData.getBytesValues();
|
||||
|
||||
|
@ -116,11 +107,9 @@ public abstract class AbstractFieldDataImplTests extends AbstractFieldDataTests
|
|||
assertThat(convert(hashedBytesValues, 0), equalTo(new HashedBytesRef(two())));
|
||||
assertThat(convert(hashedBytesValues, 1), equalTo(new HashedBytesRef(one())));
|
||||
assertThat(convert(hashedBytesValues, 2), equalTo(new HashedBytesRef(three())));
|
||||
|
||||
BytesValues.Iter hashedBytesValuesIter = hashedBytesValues.getIter(0);
|
||||
assertThat(hashedBytesValuesIter.hasNext(), equalTo(true));
|
||||
assertThat(new HashedBytesRef(hashedBytesValuesIter.next(), hashedBytesValuesIter.hash()), equalTo(new HashedBytesRef(two())));
|
||||
assertThat(hashedBytesValuesIter.hasNext(), equalTo(false));
|
||||
assertHashedValues(hashedBytesValues, 0, two());
|
||||
assertHashedValues(hashedBytesValues, 1, one());
|
||||
assertHashedValues(hashedBytesValues, 2, three());
|
||||
|
||||
IndexSearcher searcher = new IndexSearcher(readerContext.reader());
|
||||
TopFieldDocs topDocs;
|
||||
|
@ -144,12 +133,48 @@ public abstract class AbstractFieldDataImplTests extends AbstractFieldDataTests
|
|||
}
|
||||
|
||||
private HashedBytesRef convert(BytesValues values, int doc) {
|
||||
BytesRef ref = new BytesRef();
|
||||
return new HashedBytesRef(ref, values.getValueHashed(doc, ref));
|
||||
if (values.hasValue(doc)) {
|
||||
return new HashedBytesRef(BytesRef.deepCopyOf(values.getValue(doc)), values.currentValueHash());
|
||||
} else {
|
||||
return new HashedBytesRef(new BytesRef());
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract void fillSingleValueWithMissing() throws Exception;
|
||||
|
||||
public void assertValues(BytesValues values, int docId, BytesRef... actualValues) {
|
||||
assertThat(values.setDocument(docId), equalTo(actualValues.length));
|
||||
for (int i = 0; i < actualValues.length; i++) {
|
||||
assertThat(values.nextValue(), equalTo(actualValues[i]));
|
||||
}
|
||||
}
|
||||
|
||||
public void assertValues(BytesValues values, int docId, String... actualValues) {
|
||||
assertThat(values.setDocument(docId), equalTo(actualValues.length));
|
||||
for (int i = 0; i < actualValues.length; i++) {
|
||||
assertThat(values.nextValue(), equalTo(new BytesRef(actualValues[i])));
|
||||
}
|
||||
}
|
||||
|
||||
public void assertHashedValues(BytesValues values, int docId, BytesRef... actualValues) {
|
||||
assertThat(values.setDocument(docId), equalTo(actualValues.length));
|
||||
BytesRef r = new BytesRef();
|
||||
for (int i = 0; i < actualValues.length; i++) {
|
||||
assertThat(values.nextValue(), equalTo(new HashedBytesRef(actualValues[i]).bytes));
|
||||
assertThat(values.currentValueHash(), equalTo(new HashedBytesRef(actualValues[i]).hash));
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
public void assertHashedValues(BytesValues values, int docId, String... actualValues) {
|
||||
assertThat(values.setDocument(docId), equalTo(actualValues.length));
|
||||
for (int i = 0; i < actualValues.length; i++) {
|
||||
assertThat(values.nextValue(), equalTo(new HashedBytesRef(actualValues[i]).bytes));
|
||||
assertThat(values.currentValueHash(), equalTo(new HashedBytesRef(actualValues[i]).hash));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testSingleValueWithMissing() throws Exception {
|
||||
fillSingleValueWithMissing();
|
||||
|
@ -169,26 +194,13 @@ public abstract class AbstractFieldDataImplTests extends AbstractFieldDataTests
|
|||
assertThat(bytesValues.hasValue(2), equalTo(true));
|
||||
|
||||
assertThat(bytesValues.getValue(0), equalTo(new BytesRef(two())));
|
||||
assertThat(bytesValues.getValue(1), nullValue());
|
||||
assertThat(bytesValues.getValue(1), equalTo(new BytesRef()));
|
||||
assertThat(bytesValues.getValue(2), equalTo(new BytesRef(three())));
|
||||
|
||||
BytesRef bytesRef = new BytesRef();
|
||||
assertThat(bytesValues.getValueScratch(0, bytesRef), equalTo(new BytesRef(two())));
|
||||
assertThat(bytesRef, equalTo(new BytesRef(two())));
|
||||
assertThat(bytesValues.getValueScratch(1, bytesRef), equalTo(new BytesRef()));
|
||||
assertThat(bytesRef, equalTo(new BytesRef()));
|
||||
assertThat(bytesValues.getValueScratch(2, bytesRef), equalTo(new BytesRef(three())));
|
||||
assertThat(bytesRef, equalTo(new BytesRef(three())));
|
||||
assertValues(bytesValues, 0, two());
|
||||
assertValues(bytesValues, 1, Strings.EMPTY_ARRAY);
|
||||
|
||||
|
||||
BytesValues.Iter bytesValuesIter = bytesValues.getIter(0);
|
||||
assertThat(bytesValuesIter.hasNext(), equalTo(true));
|
||||
assertThat(bytesValuesIter.next(), equalTo(new BytesRef(two())));
|
||||
assertThat(bytesValuesIter.hasNext(), equalTo(false));
|
||||
|
||||
bytesValuesIter = bytesValues.getIter(1);
|
||||
assertThat(bytesValuesIter.hasNext(), equalTo(false));
|
||||
|
||||
BytesValues hashedBytesValues = fieldData.getBytesValues();
|
||||
|
||||
assertThat(hashedBytesValues.hasValue(0), equalTo(true));
|
||||
|
@ -199,13 +211,9 @@ public abstract class AbstractFieldDataImplTests extends AbstractFieldDataTests
|
|||
assertThat(convert(hashedBytesValues, 1), equalTo(new HashedBytesRef(new BytesRef())));
|
||||
assertThat(convert(hashedBytesValues, 2), equalTo(new HashedBytesRef(three())));
|
||||
|
||||
BytesValues.Iter hashedBytesValuesIter = hashedBytesValues.getIter(0);
|
||||
assertThat(hashedBytesValuesIter.hasNext(), equalTo(true));
|
||||
assertThat(new HashedBytesRef(hashedBytesValuesIter.next(), hashedBytesValuesIter.hash()), equalTo(new HashedBytesRef(two())));
|
||||
assertThat(hashedBytesValuesIter.hasNext(), equalTo(false));
|
||||
assertHashedValues(hashedBytesValues, 0, two());
|
||||
assertHashedValues(hashedBytesValues, 1, Strings.EMPTY_ARRAY);
|
||||
|
||||
hashedBytesValuesIter = hashedBytesValues.getIter(1);
|
||||
assertThat(hashedBytesValuesIter.hasNext(), equalTo(false));
|
||||
}
|
||||
|
||||
protected abstract void fillMultiValueAllSet() throws Exception;
|
||||
|
@ -231,21 +239,7 @@ public abstract class AbstractFieldDataImplTests extends AbstractFieldDataTests
|
|||
assertThat(bytesValues.getValue(1), equalTo(new BytesRef(one())));
|
||||
assertThat(bytesValues.getValue(2), equalTo(new BytesRef(three())));
|
||||
|
||||
BytesRef bytesRef = new BytesRef();
|
||||
assertThat(bytesValues.getValueScratch(0, bytesRef), equalTo(new BytesRef(two())));
|
||||
assertThat(bytesRef, equalTo(new BytesRef(two())));
|
||||
assertThat(bytesValues.getValueScratch(1, bytesRef), equalTo(new BytesRef(one())));
|
||||
assertThat(bytesRef, equalTo(new BytesRef(one())));
|
||||
assertThat(bytesValues.getValueScratch(2, bytesRef), equalTo(new BytesRef(three())));
|
||||
assertThat(bytesRef, equalTo(new BytesRef(three())));
|
||||
|
||||
|
||||
BytesValues.Iter bytesValuesIter = bytesValues.getIter(0);
|
||||
assertThat(bytesValuesIter.hasNext(), equalTo(true));
|
||||
assertThat(bytesValuesIter.next(), equalTo(new BytesRef(two())));
|
||||
assertThat(bytesValuesIter.hasNext(), equalTo(true));
|
||||
assertThat(bytesValuesIter.next(), equalTo(new BytesRef(four())));
|
||||
assertThat(bytesValuesIter.hasNext(), equalTo(false));
|
||||
assertValues(bytesValues, 0, two(), four());
|
||||
|
||||
BytesValues hashedBytesValues = fieldData.getBytesValues();
|
||||
|
||||
|
@ -257,12 +251,7 @@ public abstract class AbstractFieldDataImplTests extends AbstractFieldDataTests
|
|||
assertThat(convert(hashedBytesValues, 1), equalTo(new HashedBytesRef(one())));
|
||||
assertThat(convert(hashedBytesValues, 2), equalTo(new HashedBytesRef(three())));
|
||||
|
||||
BytesValues.Iter hashedBytesValuesIter = hashedBytesValues.getIter(0);
|
||||
assertThat(hashedBytesValuesIter.hasNext(), equalTo(true));
|
||||
assertThat(new HashedBytesRef(hashedBytesValuesIter.next(), hashedBytesValuesIter.hash()), equalTo(new HashedBytesRef(two())));
|
||||
assertThat(hashedBytesValuesIter.hasNext(), equalTo(true));
|
||||
assertThat(new HashedBytesRef(hashedBytesValuesIter.next(), hashedBytesValuesIter.hash()), equalTo(new HashedBytesRef(four())));
|
||||
assertThat(hashedBytesValuesIter.hasNext(), equalTo(false));
|
||||
assertHashedValues(hashedBytesValues, 0, two(), four());
|
||||
|
||||
IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer, true));
|
||||
TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(new SortField("value", indexFieldData.comparatorSource(null, SortMode.MIN))));
|
||||
|
@ -300,27 +289,11 @@ public abstract class AbstractFieldDataImplTests extends AbstractFieldDataTests
|
|||
assertThat(bytesValues.hasValue(2), equalTo(true));
|
||||
|
||||
assertThat(bytesValues.getValue(0), equalTo(new BytesRef(two())));
|
||||
assertThat(bytesValues.getValue(1), nullValue());
|
||||
assertThat(bytesValues.getValue(1), equalTo(new BytesRef()));
|
||||
assertThat(bytesValues.getValue(2), equalTo(new BytesRef(three())));
|
||||
|
||||
BytesRef bytesRef = new BytesRef();
|
||||
assertThat(bytesValues.getValueScratch(0, bytesRef), equalTo(new BytesRef(two())));
|
||||
assertThat(bytesRef, equalTo(new BytesRef(two())));
|
||||
assertThat(bytesValues.getValueScratch(1, bytesRef), equalTo(new BytesRef()));
|
||||
assertThat(bytesRef, equalTo(new BytesRef()));
|
||||
assertThat(bytesValues.getValueScratch(2, bytesRef), equalTo(new BytesRef(three())));
|
||||
assertThat(bytesRef, equalTo(new BytesRef(three())));
|
||||
|
||||
|
||||
BytesValues.Iter bytesValuesIter = bytesValues.getIter(0);
|
||||
assertThat(bytesValuesIter.hasNext(), equalTo(true));
|
||||
assertThat(bytesValuesIter.next(), equalTo(new BytesRef(two())));
|
||||
assertThat(bytesValuesIter.hasNext(), equalTo(true));
|
||||
assertThat(bytesValuesIter.next(), equalTo(new BytesRef(four())));
|
||||
assertThat(bytesValuesIter.hasNext(), equalTo(false));
|
||||
|
||||
bytesValuesIter = bytesValues.getIter(1);
|
||||
assertThat(bytesValuesIter.hasNext(), equalTo(false));
|
||||
assertValues(bytesValues, 0, two(), four());
|
||||
assertValues(bytesValues, 1, Strings.EMPTY_ARRAY);
|
||||
|
||||
BytesValues hashedBytesValues = fieldData.getBytesValues();
|
||||
|
||||
|
@ -332,15 +305,10 @@ public abstract class AbstractFieldDataImplTests extends AbstractFieldDataTests
|
|||
assertThat(convert(hashedBytesValues, 1), equalTo(new HashedBytesRef(new BytesRef())));
|
||||
assertThat(convert(hashedBytesValues, 2), equalTo(new HashedBytesRef(three())));
|
||||
|
||||
BytesValues.Iter hashedBytesValuesIter = hashedBytesValues.getIter(0);
|
||||
assertThat(hashedBytesValuesIter.hasNext(), equalTo(true));
|
||||
assertThat(new HashedBytesRef(hashedBytesValuesIter.next(), hashedBytesValuesIter.hash()), equalTo(new HashedBytesRef(two())));
|
||||
assertThat(hashedBytesValuesIter.hasNext(), equalTo(true));
|
||||
assertThat(new HashedBytesRef(hashedBytesValuesIter.next(), hashedBytesValuesIter.hash()), equalTo(new HashedBytesRef(four())));
|
||||
assertThat(hashedBytesValuesIter.hasNext(), equalTo(false));
|
||||
|
||||
hashedBytesValuesIter = hashedBytesValues.getIter(1);
|
||||
assertThat(hashedBytesValuesIter.hasNext(), equalTo(false));
|
||||
assertHashedValues(bytesValues, 0, two(), four());
|
||||
assertHashedValues(bytesValues, 1, Strings.EMPTY_ARRAY);
|
||||
assertHashedValues(hashedBytesValues, 0, two(), four());
|
||||
assertHashedValues(hashedBytesValues, 1, Strings.EMPTY_ARRAY);
|
||||
}
|
||||
|
||||
public void testMissingValueForAll() throws Exception {
|
||||
|
@ -360,45 +328,25 @@ public abstract class AbstractFieldDataImplTests extends AbstractFieldDataTests
|
|||
assertThat(bytesValues.hasValue(1), equalTo(false));
|
||||
assertThat(bytesValues.hasValue(2), equalTo(false));
|
||||
|
||||
assertThat(bytesValues.getValue(0), nullValue());
|
||||
assertThat(bytesValues.getValue(1), nullValue());
|
||||
assertThat(bytesValues.getValue(2), nullValue());
|
||||
|
||||
BytesRef bytesRef = new BytesRef();
|
||||
assertThat(bytesValues.getValueScratch(0, bytesRef), equalTo(new BytesRef()));
|
||||
assertThat(bytesRef, equalTo(new BytesRef()));
|
||||
assertThat(bytesValues.getValueScratch(1, bytesRef), equalTo(new BytesRef()));
|
||||
assertThat(bytesRef, equalTo(new BytesRef()));
|
||||
assertThat(bytesValues.getValueScratch(2, bytesRef), equalTo(new BytesRef()));
|
||||
assertThat(bytesRef, equalTo(new BytesRef()));
|
||||
|
||||
BytesValues.Iter bytesValuesIter = bytesValues.getIter(0);
|
||||
assertThat(bytesValuesIter.hasNext(), equalTo(false));
|
||||
|
||||
bytesValuesIter = bytesValues.getIter(1);
|
||||
assertThat(bytesValuesIter.hasNext(), equalTo(false));
|
||||
|
||||
bytesValuesIter = bytesValues.getIter(2);
|
||||
assertThat(bytesValuesIter.hasNext(), equalTo(false));
|
||||
assertThat(bytesValues.getValue(0), equalTo(new BytesRef()));
|
||||
assertThat(bytesValues.getValue(1), equalTo(new BytesRef()));
|
||||
assertThat(bytesValues.getValue(2), equalTo(new BytesRef()));
|
||||
|
||||
assertValues(bytesValues, 0, Strings.EMPTY_ARRAY);
|
||||
assertValues(bytesValues, 1, Strings.EMPTY_ARRAY);
|
||||
assertValues(bytesValues, 2, Strings.EMPTY_ARRAY);
|
||||
BytesValues hashedBytesValues = fieldData.getBytesValues();
|
||||
|
||||
assertThat(hashedBytesValues.hasValue(0), equalTo(false));
|
||||
assertThat(hashedBytesValues.hasValue(1), equalTo(false));
|
||||
assertThat(hashedBytesValues.hasValue(2), equalTo(false));
|
||||
|
||||
assertThat(hashedBytesValues.getValue(0), nullValue());
|
||||
assertThat(hashedBytesValues.getValue(1), nullValue());
|
||||
assertThat(hashedBytesValues.getValue(2), nullValue());
|
||||
|
||||
BytesValues.Iter hashedBytesValuesIter = hashedBytesValues.getIter(0);
|
||||
assertThat(hashedBytesValuesIter.hasNext(), equalTo(false));
|
||||
|
||||
hashedBytesValuesIter = hashedBytesValues.getIter(1);
|
||||
assertThat(hashedBytesValuesIter.hasNext(), equalTo(false));
|
||||
|
||||
hashedBytesValuesIter = hashedBytesValues.getIter(2);
|
||||
assertThat(hashedBytesValuesIter.hasNext(), equalTo(false));
|
||||
assertThat(hashedBytesValues.getValue(0), equalTo(new BytesRef()));
|
||||
assertThat(hashedBytesValues.getValue(1), equalTo(new BytesRef()));
|
||||
assertThat(hashedBytesValues.getValue(2), equalTo(new BytesRef()));
|
||||
assertValues(hashedBytesValues, 0, Strings.EMPTY_ARRAY);
|
||||
assertValues(hashedBytesValues, 1, Strings.EMPTY_ARRAY);
|
||||
assertValues(hashedBytesValues, 2, Strings.EMPTY_ARRAY);
|
||||
}
|
||||
|
||||
protected abstract void fillAllMissing() throws Exception;
|
||||
|
|
|
@ -58,21 +58,14 @@ public abstract class AbstractNumericFieldDataTests extends AbstractFieldDataImp
|
|||
assertThat(longValues.getValueMissing(0, -1), equalTo(2l));
|
||||
assertThat(longValues.getValueMissing(1, -1), equalTo(1l));
|
||||
assertThat(longValues.getValueMissing(2, -1), equalTo(3l));
|
||||
assertThat(longValues.setDocument(0), equalTo(1));
|
||||
assertThat(longValues.nextValue(), equalTo(2l));
|
||||
|
||||
LongValues.Iter longValuesIter = longValues.getIter(0);
|
||||
assertThat(longValuesIter.hasNext(), equalTo(true));
|
||||
assertThat(longValuesIter.next(), equalTo(2l));
|
||||
assertThat(longValuesIter.hasNext(), equalTo(false));
|
||||
assertThat(longValues.setDocument(1), equalTo(1));
|
||||
assertThat(longValues.nextValue(), equalTo(1l));
|
||||
|
||||
longValuesIter = longValues.getIter(1);
|
||||
assertThat(longValuesIter.hasNext(), equalTo(true));
|
||||
assertThat(longValuesIter.next(), equalTo(1l));
|
||||
assertThat(longValuesIter.hasNext(), equalTo(false));
|
||||
|
||||
longValuesIter = longValues.getIter(2);
|
||||
assertThat(longValuesIter.hasNext(), equalTo(true));
|
||||
assertThat(longValuesIter.next(), equalTo(3l));
|
||||
assertThat(longValuesIter.hasNext(), equalTo(false));
|
||||
assertThat(longValues.setDocument(2), equalTo(1));
|
||||
assertThat(longValues.nextValue(), equalTo(3l));
|
||||
|
||||
DoubleValues doubleValues = fieldData.getDoubleValues();
|
||||
|
||||
|
@ -90,20 +83,14 @@ public abstract class AbstractNumericFieldDataTests extends AbstractFieldDataImp
|
|||
assertThat(doubleValues.getValueMissing(1, -1), equalTo(1d));
|
||||
assertThat(doubleValues.getValueMissing(2, -1), equalTo(3d));
|
||||
|
||||
DoubleValues.Iter doubleValuesIter = doubleValues.getIter(0);
|
||||
assertThat(doubleValuesIter.hasNext(), equalTo(true));
|
||||
assertThat(doubleValuesIter.next(), equalTo(2d));
|
||||
assertThat(doubleValuesIter.hasNext(), equalTo(false));
|
||||
assertThat(1, equalTo(doubleValues.setDocument(0)));
|
||||
assertThat(doubleValues.nextValue(), equalTo(2d));
|
||||
|
||||
doubleValuesIter = doubleValues.getIter(1);
|
||||
assertThat(doubleValuesIter.hasNext(), equalTo(true));
|
||||
assertThat(doubleValuesIter.next(), equalTo(1d));
|
||||
assertThat(doubleValuesIter.hasNext(), equalTo(false));
|
||||
assertThat(1, equalTo(doubleValues.setDocument(1)));
|
||||
assertThat(doubleValues.nextValue(), equalTo(1d));
|
||||
|
||||
doubleValuesIter = doubleValues.getIter(2);
|
||||
assertThat(doubleValuesIter.hasNext(), equalTo(true));
|
||||
assertThat(doubleValuesIter.next(), equalTo(3d));
|
||||
assertThat(doubleValuesIter.hasNext(), equalTo(false));
|
||||
assertThat(1, equalTo(doubleValues.setDocument(2)));
|
||||
assertThat(doubleValues.nextValue(), equalTo(3d));
|
||||
|
||||
IndexSearcher searcher = new IndexSearcher(readerContext.reader());
|
||||
TopFieldDocs topDocs;
|
||||
|
@ -146,18 +133,13 @@ public abstract class AbstractNumericFieldDataTests extends AbstractFieldDataImp
|
|||
assertThat(longValues.getValueMissing(1, -1), equalTo(-1l));
|
||||
assertThat(longValues.getValueMissing(2, -1), equalTo(3l));
|
||||
|
||||
LongValues.Iter longValuesIter = longValues.getIter(0);
|
||||
assertThat(longValuesIter.hasNext(), equalTo(true));
|
||||
assertThat(longValuesIter.next(), equalTo(2l));
|
||||
assertThat(longValuesIter.hasNext(), equalTo(false));
|
||||
assertThat(longValues.setDocument(0), equalTo(1));
|
||||
assertThat(longValues.nextValue(), equalTo(2l));
|
||||
|
||||
longValuesIter = longValues.getIter(1);
|
||||
assertThat(longValuesIter.hasNext(), equalTo(false));
|
||||
assertThat(longValues.setDocument(1), equalTo(0));
|
||||
|
||||
longValuesIter = longValues.getIter(2);
|
||||
assertThat(longValuesIter.hasNext(), equalTo(true));
|
||||
assertThat(longValuesIter.next(), equalTo(3l));
|
||||
assertThat(longValuesIter.hasNext(), equalTo(false));
|
||||
assertThat(longValues.setDocument(2), equalTo(1));
|
||||
assertThat(longValues.nextValue(), equalTo(3l));
|
||||
|
||||
DoubleValues doubleValues = fieldData.getDoubleValues();
|
||||
|
||||
|
@ -174,18 +156,13 @@ public abstract class AbstractNumericFieldDataTests extends AbstractFieldDataImp
|
|||
assertThat(doubleValues.getValueMissing(1, -1), equalTo(-1d));
|
||||
assertThat(doubleValues.getValueMissing(2, -1), equalTo(3d));
|
||||
|
||||
DoubleValues.Iter doubleValuesIter = doubleValues.getIter(0);
|
||||
assertThat(doubleValuesIter.hasNext(), equalTo(true));
|
||||
assertThat(doubleValuesIter.next(), equalTo(2d));
|
||||
assertThat(doubleValuesIter.hasNext(), equalTo(false));
|
||||
assertThat(1, equalTo(doubleValues.setDocument(0)));
|
||||
assertThat(doubleValues.nextValue(), equalTo(2d));
|
||||
|
||||
doubleValuesIter = doubleValues.getIter(1);
|
||||
assertThat(doubleValuesIter.hasNext(), equalTo(false));
|
||||
assertThat(0, equalTo(doubleValues.setDocument(1)));
|
||||
|
||||
doubleValuesIter = doubleValues.getIter(2);
|
||||
assertThat(doubleValuesIter.hasNext(), equalTo(true));
|
||||
assertThat(doubleValuesIter.next(), equalTo(3d));
|
||||
assertThat(doubleValuesIter.hasNext(), equalTo(false));
|
||||
assertThat(1, equalTo(doubleValues.setDocument(2)));
|
||||
assertThat(doubleValues.nextValue(), equalTo(3d));
|
||||
|
||||
IndexSearcher searcher = new IndexSearcher(readerContext.reader());
|
||||
TopFieldDocs topDocs;
|
||||
|
@ -257,22 +234,15 @@ public abstract class AbstractNumericFieldDataTests extends AbstractFieldDataImp
|
|||
assertThat(longValues.getValueMissing(1, -1), equalTo(1l));
|
||||
assertThat(longValues.getValueMissing(2, -1), equalTo(3l));
|
||||
|
||||
LongValues.Iter longValuesIter = longValues.getIter(0);
|
||||
assertThat(longValuesIter.hasNext(), equalTo(true));
|
||||
assertThat(longValuesIter.next(), equalTo(2l));
|
||||
assertThat(longValuesIter.hasNext(), equalTo(true));
|
||||
assertThat(longValuesIter.next(), equalTo(4l));
|
||||
assertThat(longValuesIter.hasNext(), equalTo(false));
|
||||
assertThat(longValues.setDocument(0), equalTo(2));
|
||||
assertThat(longValues.nextValue(), equalTo(2l));
|
||||
assertThat(longValues.nextValue(), equalTo(4l));
|
||||
|
||||
longValuesIter = longValues.getIter(1);
|
||||
assertThat(longValuesIter.hasNext(), equalTo(true));
|
||||
assertThat(longValuesIter.next(), equalTo(1l));
|
||||
assertThat(longValuesIter.hasNext(), equalTo(false));
|
||||
assertThat(longValues.setDocument(1), equalTo(1));
|
||||
assertThat(longValues.nextValue(), equalTo(1l));
|
||||
|
||||
longValuesIter = longValues.getIter(2);
|
||||
assertThat(longValuesIter.hasNext(), equalTo(true));
|
||||
assertThat(longValuesIter.next(), equalTo(3l));
|
||||
assertThat(longValuesIter.hasNext(), equalTo(false));
|
||||
assertThat(longValues.setDocument(2), equalTo(1));
|
||||
assertThat(longValues.nextValue(), equalTo(3l));
|
||||
|
||||
DoubleValues doubleValues = fieldData.getDoubleValues();
|
||||
|
||||
|
@ -290,22 +260,15 @@ public abstract class AbstractNumericFieldDataTests extends AbstractFieldDataImp
|
|||
assertThat(doubleValues.getValueMissing(1, -1), equalTo(1d));
|
||||
assertThat(doubleValues.getValueMissing(2, -1), equalTo(3d));
|
||||
|
||||
DoubleValues.Iter doubleValuesIter = doubleValues.getIter(0);
|
||||
assertThat(doubleValuesIter.hasNext(), equalTo(true));
|
||||
assertThat(doubleValuesIter.next(), equalTo(2d));
|
||||
assertThat(doubleValuesIter.hasNext(), equalTo(true));
|
||||
assertThat(doubleValuesIter.next(), equalTo(4d));
|
||||
assertThat(doubleValuesIter.hasNext(), equalTo(false));
|
||||
assertThat(2, equalTo(doubleValues.setDocument(0)));
|
||||
assertThat(doubleValues.nextValue(), equalTo(2d));
|
||||
assertThat(doubleValues.nextValue(), equalTo(4d));
|
||||
|
||||
doubleValuesIter = doubleValues.getIter(1);
|
||||
assertThat(doubleValuesIter.hasNext(), equalTo(true));
|
||||
assertThat(doubleValuesIter.next(), equalTo(1d));
|
||||
assertThat(doubleValuesIter.hasNext(), equalTo(false));
|
||||
assertThat(1, equalTo(doubleValues.setDocument(1)));
|
||||
assertThat(doubleValues.nextValue(), equalTo(1d));
|
||||
|
||||
doubleValuesIter = doubleValues.getIter(2);
|
||||
assertThat(doubleValuesIter.hasNext(), equalTo(true));
|
||||
assertThat(doubleValuesIter.next(), equalTo(3d));
|
||||
assertThat(doubleValuesIter.hasNext(), equalTo(false));
|
||||
assertThat(1, equalTo(doubleValues.setDocument(2)));
|
||||
assertThat(doubleValues.nextValue(), equalTo(3d));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -331,20 +294,14 @@ public abstract class AbstractNumericFieldDataTests extends AbstractFieldDataImp
|
|||
assertThat(longValues.getValueMissing(1, -1), equalTo(-1l));
|
||||
assertThat(longValues.getValueMissing(2, -1), equalTo(3l));
|
||||
|
||||
LongValues.Iter longValuesIter = longValues.getIter(0);
|
||||
assertThat(longValuesIter.hasNext(), equalTo(true));
|
||||
assertThat(longValuesIter.next(), equalTo(2l));
|
||||
assertThat(longValuesIter.hasNext(), equalTo(true));
|
||||
assertThat(longValuesIter.next(), equalTo(4l));
|
||||
assertThat(longValuesIter.hasNext(), equalTo(false));
|
||||
assertThat(longValues.setDocument(0), equalTo(2));
|
||||
assertThat(longValues.nextValue(), equalTo(2l));
|
||||
assertThat(longValues.nextValue(), equalTo(4l));
|
||||
|
||||
longValuesIter = longValues.getIter(1);
|
||||
assertThat(longValuesIter.hasNext(), equalTo(false));
|
||||
assertThat(longValues.setDocument(1), equalTo(0));
|
||||
|
||||
longValuesIter = longValues.getIter(2);
|
||||
assertThat(longValuesIter.hasNext(), equalTo(true));
|
||||
assertThat(longValuesIter.next(), equalTo(3l));
|
||||
assertThat(longValuesIter.hasNext(), equalTo(false));
|
||||
assertThat(longValues.setDocument(2), equalTo(1));
|
||||
assertThat(longValues.nextValue(), equalTo(3l));
|
||||
|
||||
DoubleValues doubleValues = fieldData.getDoubleValues();
|
||||
|
||||
|
@ -361,20 +318,15 @@ public abstract class AbstractNumericFieldDataTests extends AbstractFieldDataImp
|
|||
assertThat(doubleValues.getValueMissing(1, -1), equalTo(-1d));
|
||||
assertThat(doubleValues.getValueMissing(2, -1), equalTo(3d));
|
||||
|
||||
DoubleValues.Iter doubleValuesIter = doubleValues.getIter(0);
|
||||
assertThat(doubleValuesIter.hasNext(), equalTo(true));
|
||||
assertThat(doubleValuesIter.next(), equalTo(2d));
|
||||
assertThat(doubleValuesIter.hasNext(), equalTo(true));
|
||||
assertThat(doubleValuesIter.next(), equalTo(4d));
|
||||
assertThat(doubleValuesIter.hasNext(), equalTo(false));
|
||||
assertThat(2, equalTo(doubleValues.setDocument(0)));
|
||||
assertThat(doubleValues.nextValue(), equalTo(2d));
|
||||
assertThat(doubleValues.nextValue(), equalTo(4d));
|
||||
|
||||
doubleValuesIter = doubleValues.getIter(1);
|
||||
assertThat(doubleValuesIter.hasNext(), equalTo(false));
|
||||
assertThat(0, equalTo(doubleValues.setDocument(1)));
|
||||
|
||||
assertThat(1, equalTo(doubleValues.setDocument(2)));
|
||||
assertThat(doubleValues.nextValue(), equalTo(3d));
|
||||
|
||||
doubleValuesIter = doubleValues.getIter(2);
|
||||
assertThat(doubleValuesIter.hasNext(), equalTo(true));
|
||||
assertThat(doubleValuesIter.next(), equalTo(3d));
|
||||
assertThat(doubleValuesIter.hasNext(), equalTo(false));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -399,14 +351,10 @@ public abstract class AbstractNumericFieldDataTests extends AbstractFieldDataImp
|
|||
assertThat(longValues.getValueMissing(1, -1), equalTo(-1l));
|
||||
assertThat(longValues.getValueMissing(2, -1), equalTo(-1l));
|
||||
|
||||
LongValues.Iter longValuesIter = longValues.getIter(0);
|
||||
assertThat(longValuesIter.hasNext(), equalTo(false));
|
||||
|
||||
longValuesIter = longValues.getIter(1);
|
||||
assertThat(longValuesIter.hasNext(), equalTo(false));
|
||||
|
||||
longValuesIter = longValues.getIter(2);
|
||||
assertThat(longValuesIter.hasNext(), equalTo(false));
|
||||
assertThat(longValues.setDocument(0), equalTo(0));
|
||||
assertThat(longValues.setDocument(1), equalTo(0));
|
||||
assertThat(longValues.setDocument(2), equalTo(0));
|
||||
|
||||
// double values
|
||||
|
||||
|
@ -422,16 +370,14 @@ public abstract class AbstractNumericFieldDataTests extends AbstractFieldDataImp
|
|||
assertThat(doubleValues.getValueMissing(1, -1), equalTo(-1d));
|
||||
assertThat(doubleValues.getValueMissing(2, -1), equalTo(-1d));
|
||||
|
||||
DoubleValues.Iter doubleValuesIter = doubleValues.getIter(0);
|
||||
assertThat(doubleValuesIter.hasNext(), equalTo(false));
|
||||
assertThat(0, equalTo(doubleValues.setDocument(0)));
|
||||
|
||||
doubleValuesIter = doubleValues.getIter(1);
|
||||
assertThat(doubleValuesIter.hasNext(), equalTo(false));
|
||||
assertThat(0, equalTo(doubleValues.setDocument(1)));
|
||||
|
||||
doubleValuesIter = doubleValues.getIter(2);
|
||||
assertThat(doubleValuesIter.hasNext(), equalTo(false));
|
||||
assertThat(0, equalTo(doubleValues.setDocument(2)));
|
||||
}
|
||||
|
||||
|
||||
protected void fillAllMissing() throws Exception {
|
||||
Document d = new Document();
|
||||
d.add(new StringField("_id", "1", Field.Store.NO));
|
||||
|
|
|
@ -27,15 +27,14 @@ import org.apache.lucene.util.English;
|
|||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||
import org.elasticsearch.index.fielddata.BytesValues.Iter;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.hamcrest.Matchers;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class DuelFieldDataTests extends AbstractFieldDataTests {
|
||||
|
||||
|
@ -349,50 +348,55 @@ public class DuelFieldDataTests extends AbstractFieldDataTests {
|
|||
}
|
||||
|
||||
|
||||
private static void duelFieldDataBytes(Random random, AtomicReaderContext context, IndexFieldData left, IndexFieldData right, Preprocessor pre) throws Exception {
|
||||
AtomicFieldData leftData = random.nextBoolean() ? left.load(context) : left.loadDirect(context);
|
||||
AtomicFieldData rightData = random.nextBoolean() ? right.load(context) : right.loadDirect(context);
|
||||
private static void duelFieldDataBytes(Random random, AtomicReaderContext context, IndexFieldData<?> left, IndexFieldData<?> right, Preprocessor pre) throws Exception {
|
||||
AtomicFieldData<?> leftData = random.nextBoolean() ? left.load(context) : left.loadDirect(context);
|
||||
AtomicFieldData<?> rightData = random.nextBoolean() ? right.load(context) : right.loadDirect(context);
|
||||
assertThat(leftData.getNumDocs(), equalTo(rightData.getNumDocs()));
|
||||
|
||||
int numDocs = leftData.getNumDocs();
|
||||
BytesValues leftBytesValues = random.nextBoolean() ? leftData.getBytesValues() : leftData.getHashedBytesValues();
|
||||
BytesValues rightBytesValues = random.nextBoolean() ? rightData.getBytesValues() : rightData.getHashedBytesValues();
|
||||
BytesRef leftSpare = new BytesRef();
|
||||
BytesRef rightSpare = new BytesRef();
|
||||
for (int i = 0; i < numDocs; i++) {
|
||||
assertThat(leftBytesValues.hasValue(i), equalTo(rightBytesValues.hasValue(i)));
|
||||
if (leftBytesValues.hasValue(i)) {
|
||||
assertThat(pre.toString(leftBytesValues.getValue(i)), equalTo(pre.toString(rightBytesValues.getValue(i))));
|
||||
|
||||
} else {
|
||||
assertThat(leftBytesValues.getValue(i), nullValue());
|
||||
assertThat(rightBytesValues.getValue(i), nullValue());
|
||||
assertThat(leftBytesValues.getValue(i), equalTo(new BytesRef()));
|
||||
assertThat(rightBytesValues.getValue(i), equalTo(new BytesRef()));
|
||||
}
|
||||
|
||||
boolean hasValue = leftBytesValues.hasValue(i);
|
||||
Iter leftIter = leftBytesValues.getIter(i);
|
||||
Iter rightIter = rightBytesValues.getIter(i);
|
||||
assertThat(leftIter.hasNext(), equalTo(rightIter.hasNext()));
|
||||
assertThat(leftIter.hasNext(), equalTo(hasValue));
|
||||
|
||||
while (leftIter.hasNext()) {
|
||||
assertThat(hasValue, equalTo(true));
|
||||
assertThat(leftIter.hasNext(), equalTo(rightIter.hasNext()));
|
||||
BytesRef rightBytes = rightIter.next();
|
||||
BytesRef leftBytes = leftIter.next();
|
||||
int numValues = 0;
|
||||
if (leftBytesValues.hasValue(i)) {
|
||||
assertThat(rightBytesValues.hasValue(i), equalTo(true));
|
||||
assertThat(leftBytesValues.setDocument(i), Matchers.greaterThanOrEqualTo(1));
|
||||
assertThat(rightBytesValues.setDocument(i), Matchers.greaterThanOrEqualTo(1));
|
||||
} else {
|
||||
assertThat(rightBytesValues.hasValue(i), equalTo(false));
|
||||
assertThat(leftBytesValues.setDocument(i), equalTo(0));
|
||||
assertThat(rightBytesValues.setDocument(i), equalTo(0));
|
||||
}
|
||||
|
||||
assertThat(pre.toString(leftBytes), equalTo(pre.toString(rightBytes)));
|
||||
if (rightBytes.equals(leftBytes)) {
|
||||
assertThat(leftIter.hash(), equalTo(rightIter.hash()));// call twice
|
||||
assertThat(leftIter.hash(), equalTo(rightIter.hash()));
|
||||
assertThat(leftIter.hash(), equalTo(rightBytes.hashCode()));
|
||||
assertThat(rightIter.hash(), equalTo(leftBytes.hashCode()));
|
||||
assertThat((numValues = leftBytesValues.setDocument(i)), equalTo(rightBytesValues.setDocument(i)));
|
||||
for (int j = 0; j < numValues; j++) {
|
||||
rightSpare.copyBytes(rightBytesValues.nextValue());
|
||||
leftSpare.copyBytes(leftBytesValues.nextValue());
|
||||
assertThat(rightSpare.hashCode(), equalTo(rightBytesValues.currentValueHash()));
|
||||
assertThat(leftSpare.hashCode(), equalTo(leftBytesValues.currentValueHash()));
|
||||
pre.toString(rightSpare);
|
||||
pre.toString(leftSpare);
|
||||
assertThat(pre.toString(leftSpare), equalTo(pre.toString(rightSpare)));
|
||||
if (leftSpare.equals(rightSpare)) {
|
||||
assertThat(leftBytesValues.currentValueHash(), equalTo(rightBytesValues.currentValueHash()));
|
||||
}
|
||||
}
|
||||
assertThat(leftIter.hasNext(), equalTo(rightIter.hasNext()));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private static void duelFieldDataDouble(Random random, AtomicReaderContext context, IndexNumericFieldData left, IndexNumericFieldData right) throws Exception {
|
||||
private static void duelFieldDataDouble(Random random, AtomicReaderContext context, IndexNumericFieldData<?> left, IndexNumericFieldData<?> right) throws Exception {
|
||||
AtomicNumericFieldData leftData = random.nextBoolean() ? left.load(context) : left.loadDirect(context);
|
||||
AtomicNumericFieldData rightData = random.nextBoolean() ? right.load(context) : right.loadDirect(context);
|
||||
|
||||
|
@ -405,27 +409,16 @@ public class DuelFieldDataTests extends AbstractFieldDataTests {
|
|||
assertThat(leftDoubleValues.hasValue(i), equalTo(rightDoubleValues.hasValue(i)));
|
||||
if (leftDoubleValues.hasValue(i)) {
|
||||
assertThat(leftDoubleValues.getValue(i), equalTo(rightDoubleValues.getValue(i)));
|
||||
|
||||
} else {
|
||||
assertThat(leftDoubleValues.getValue(i), equalTo(0d));
|
||||
assertThat(rightDoubleValues.getValue(i), equalTo(0d));
|
||||
}
|
||||
|
||||
boolean hasValue = leftDoubleValues.hasValue(i);
|
||||
DoubleValues.Iter leftIter = leftDoubleValues.getIter(i);
|
||||
DoubleValues.Iter rightIter = rightDoubleValues.getIter(i);
|
||||
assertThat(leftIter.hasNext(), equalTo(rightIter.hasNext()));
|
||||
assertThat(leftIter.hasNext(), equalTo(hasValue));
|
||||
|
||||
while (leftIter.hasNext()) {
|
||||
assertThat(hasValue, equalTo(true));
|
||||
assertThat(leftIter.hasNext(), equalTo(rightIter.hasNext()));
|
||||
double rightValue = rightIter.next();
|
||||
double leftValue = leftIter.next();
|
||||
|
||||
assertThat(leftValue, equalTo(rightValue));
|
||||
int numValues = 0;
|
||||
assertThat((numValues = leftDoubleValues.setDocument(i)), equalTo(rightDoubleValues.setDocument(i)));
|
||||
for (int j = 0; j < numValues; j++) {
|
||||
assertThat(leftDoubleValues.nextValue(), equalTo(rightDoubleValues.nextValue()));
|
||||
}
|
||||
assertThat(leftIter.hasNext(), equalTo(rightIter.hasNext()));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -436,33 +429,23 @@ public class DuelFieldDataTests extends AbstractFieldDataTests {
|
|||
assertThat(leftData.getNumDocs(), equalTo(rightData.getNumDocs()));
|
||||
|
||||
int numDocs = leftData.getNumDocs();
|
||||
LongValues leftDoubleValues = leftData.getLongValues();
|
||||
LongValues rightDoubleValues = rightData.getLongValues();
|
||||
LongValues leftLongValues = leftData.getLongValues();
|
||||
LongValues rightLongValues = rightData.getLongValues();
|
||||
for (int i = 0; i < numDocs; i++) {
|
||||
assertThat(leftDoubleValues.hasValue(i), equalTo(rightDoubleValues.hasValue(i)));
|
||||
if (leftDoubleValues.hasValue(i)) {
|
||||
assertThat(leftDoubleValues.getValue(i), equalTo(rightDoubleValues.getValue(i)));
|
||||
assertThat(leftLongValues.hasValue(i), equalTo(rightLongValues.hasValue(i)));
|
||||
if (leftLongValues.hasValue(i)) {
|
||||
assertThat(leftLongValues.getValue(i), equalTo(rightLongValues.getValue(i)));
|
||||
|
||||
} else {
|
||||
assertThat(leftDoubleValues.getValue(i), equalTo(0l));
|
||||
assertThat(rightDoubleValues.getValue(i), equalTo(0l));
|
||||
assertThat(leftLongValues.getValue(i), equalTo(0l));
|
||||
assertThat(rightLongValues.getValue(i), equalTo(0l));
|
||||
}
|
||||
|
||||
boolean hasValue = leftDoubleValues.hasValue(i);
|
||||
LongValues.Iter leftIter = leftDoubleValues.getIter(i);
|
||||
LongValues.Iter rightIter = rightDoubleValues.getIter(i);
|
||||
assertThat(leftIter.hasNext(), equalTo(rightIter.hasNext()));
|
||||
assertThat(leftIter.hasNext(), equalTo(hasValue));
|
||||
|
||||
while (leftIter.hasNext()) {
|
||||
assertThat(hasValue, equalTo(true));
|
||||
assertThat(leftIter.hasNext(), equalTo(rightIter.hasNext()));
|
||||
long rightValue = rightIter.next();
|
||||
long leftValue = leftIter.next();
|
||||
|
||||
assertThat(leftValue, equalTo(rightValue));
|
||||
int numValues = 0;
|
||||
assertThat((numValues = leftLongValues.setDocument(i)), equalTo(rightLongValues.setDocument(i)));
|
||||
for (int j = 0; j < numValues; j++) {
|
||||
assertThat(leftLongValues.nextValue(), equalTo(rightLongValues.nextValue()));
|
||||
}
|
||||
assertThat(leftIter.hasNext(), equalTo(rightIter.hasNext()));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -476,6 +459,7 @@ public class DuelFieldDataTests extends AbstractFieldDataTests {
|
|||
|
||||
private static class ToDoublePreprocessor extends Preprocessor {
|
||||
public String toString(BytesRef ref) {
|
||||
assert ref.length > 0;
|
||||
return Double.toString(Double.parseDouble(super.toString(ref)));
|
||||
}
|
||||
}
|
||||
|
@ -486,3 +470,4 @@ public class DuelFieldDataTests extends AbstractFieldDataTests {
|
|||
}
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -334,8 +334,9 @@ public class LongFieldDataTests extends AbstractNumericFieldDataTests {
|
|||
}
|
||||
|
||||
set.clear();
|
||||
for (LongValues.Iter iter = data.getIter(i); iter.hasNext(); ) {
|
||||
set.add(iter.next());
|
||||
int numValues = data.setDocument(i);
|
||||
for (int j = 0; j < numValues; j++) {
|
||||
set.add(data.nextValue());
|
||||
}
|
||||
assertThat(set, equalTo(v));
|
||||
|
||||
|
@ -348,8 +349,9 @@ public class LongFieldDataTests extends AbstractNumericFieldDataTests {
|
|||
}
|
||||
}
|
||||
doubleSet.clear();
|
||||
for (DoubleValues.Iter iter = doubleData.getIter(i); iter.hasNext(); ) {
|
||||
doubleSet.add(iter.next());
|
||||
numValues = doubleData.setDocument(i);
|
||||
for (int j = 0; j < numValues; j++) {
|
||||
doubleSet.add(doubleData.nextValue());
|
||||
}
|
||||
assertThat(doubleSet, equalTo(doubleV));
|
||||
}
|
||||
|
|
|
@ -124,7 +124,7 @@ public class MultiOrdinalsTests extends ElasticsearchTestCase {
|
|||
for (int i = 0; i < array.length; i++) {
|
||||
array[i] = docOrds.get(i);
|
||||
}
|
||||
assertIter(docs.getIter(docId), array);
|
||||
assertIter(docs, docId, array);
|
||||
}
|
||||
for (int i = docId + 1; i < ordAndId.id; i++) {
|
||||
assertThat(docs.getOrd(i), equalTo(0L));
|
||||
|
@ -215,12 +215,11 @@ public class MultiOrdinalsTests extends ElasticsearchTestCase {
|
|||
assertEquals(docs, ordinalPlan);
|
||||
}
|
||||
|
||||
protected static void assertIter(Ordinals.Docs.Iter iter, long... expectedOrdinals) {
|
||||
protected static void assertIter(Ordinals.Docs docs, int docId, long... expectedOrdinals) {
|
||||
assertThat(docs.setDocument(docId), equalTo(expectedOrdinals.length));
|
||||
for (long expectedOrdinal : expectedOrdinals) {
|
||||
assertThat(iter.next(), equalTo(expectedOrdinal));
|
||||
assertThat(docs.nextOrd(), equalTo(expectedOrdinal));
|
||||
}
|
||||
assertThat(iter.next(), equalTo(0L)); // Last one should always be 0
|
||||
assertThat(iter.next(), equalTo(0L)); // Just checking it stays 0
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -284,7 +283,7 @@ public class MultiOrdinalsTests extends ElasticsearchTestCase {
|
|||
assertThat(ref.offset, equalTo(0));
|
||||
long[] ords = ordinalPlan[doc];
|
||||
assertThat(ref, equalTo(new LongsRef(ords, 0, ords.length)));
|
||||
assertIter(docs.getIter(doc), ords);
|
||||
assertIter(docs, doc, ords);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue