Fielddata: Remove BytesValues.WithOrdinals.currentOrd and copyShared.
These methods don't exist in Lucene's sorted set doc values. Relates to #6524
This commit is contained in:
parent
9e624942d8
commit
8ccfca3a2f
|
@ -19,10 +19,8 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.fielddata;
|
package org.elasticsearch.index.fielddata;
|
||||||
|
|
||||||
import org.apache.lucene.index.TermsEnum;
|
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.elasticsearch.index.fielddata.ScriptDocValues.Strings;
|
import org.elasticsearch.index.fielddata.ScriptDocValues.Strings;
|
||||||
import org.elasticsearch.index.fielddata.plain.AtomicFieldDataWithOrdinalsTermsEnum;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The thread safe {@link org.apache.lucene.index.AtomicReader} level cache of the data.
|
* The thread safe {@link org.apache.lucene.index.AtomicReader} level cache of the data.
|
||||||
|
@ -90,19 +88,9 @@ public interface AtomicFieldData<Script extends ScriptDocValues> extends RamUsag
|
||||||
public long getMaxOrd() {
|
public long getMaxOrd() {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public long currentOrd() {
|
|
||||||
return MISSING_ORDINAL;
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public TermsEnum getTermsEnum() {
|
|
||||||
return new AtomicFieldDataWithOrdinalsTermsEnum(this);
|
|
||||||
}
|
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -111,11 +99,6 @@ public interface AtomicFieldData<Script extends ScriptDocValues> extends RamUsag
|
||||||
*/
|
*/
|
||||||
BytesValues.WithOrdinals getBytesValues();
|
BytesValues.WithOrdinals getBytesValues();
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns a terms enum to iterate over all the underlying values.
|
|
||||||
*/
|
|
||||||
TermsEnum getTermsEnum();
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -20,8 +20,10 @@
|
||||||
package org.elasticsearch.index.fielddata;
|
package org.elasticsearch.index.fielddata;
|
||||||
|
|
||||||
import org.apache.lucene.index.SortedSetDocValues;
|
import org.apache.lucene.index.SortedSetDocValues;
|
||||||
|
import org.apache.lucene.index.TermsEnum;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.elasticsearch.ElasticsearchIllegalStateException;
|
import org.elasticsearch.ElasticsearchIllegalStateException;
|
||||||
|
import org.elasticsearch.index.fielddata.plain.BytesValuesWithOrdinalsTermsEnum;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A state-full lightweight per document set of <code>byte[]</code> values.
|
* A state-full lightweight per document set of <code>byte[]</code> values.
|
||||||
|
@ -60,15 +62,6 @@ public abstract class BytesValues {
|
||||||
return multiValued;
|
return multiValued;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Converts the current shared {@link BytesRef} to a stable instance. Note,
|
|
||||||
* this calls makes the bytes safe for *reads*, not writes (into the same BytesRef). For example,
|
|
||||||
* it makes it safe to be placed in a map.
|
|
||||||
*/
|
|
||||||
public BytesRef copyShared() {
|
|
||||||
return BytesRef.deepCopyOf(scratch);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sets iteration to the specified docID and returns the number of
|
* Sets iteration to the specified docID and returns the number of
|
||||||
* values for this document ID,
|
* values for this document ID,
|
||||||
|
@ -139,12 +132,6 @@ public abstract class BytesValues {
|
||||||
*/
|
*/
|
||||||
public abstract long nextOrd();
|
public abstract long nextOrd();
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the current ordinal in the iteration
|
|
||||||
* @return the current ordinal in the iteration
|
|
||||||
*/
|
|
||||||
public abstract long currentOrd();
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the value for the given ordinal.
|
* Returns the value for the given ordinal.
|
||||||
* @param ord the ordinal to lookup.
|
* @param ord the ordinal to lookup.
|
||||||
|
@ -157,6 +144,13 @@ public abstract class BytesValues {
|
||||||
public BytesRef nextValue() {
|
public BytesRef nextValue() {
|
||||||
return getValueByOrd(nextOrd());
|
return getValueByOrd(nextOrd());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a terms enum to iterate over all the underlying values.
|
||||||
|
*/
|
||||||
|
public TermsEnum getTermsEnum() {
|
||||||
|
return new BytesValuesWithOrdinalsTermsEnum(this);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -43,11 +43,6 @@ public class GlobalOrdinalMapping extends BytesValues.WithOrdinals {
|
||||||
|
|
||||||
int readerIndex;
|
int readerIndex;
|
||||||
|
|
||||||
@Override
|
|
||||||
public BytesRef copyShared() {
|
|
||||||
return bytesValues[readerIndex].copyShared();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long getMaxOrd() {
|
public long getMaxOrd() {
|
||||||
return ordinalMap.getValueCount();
|
return ordinalMap.getValueCount();
|
||||||
|
@ -67,11 +62,6 @@ public class GlobalOrdinalMapping extends BytesValues.WithOrdinals {
|
||||||
return getGlobalOrd(values.nextOrd());
|
return getGlobalOrd(values.nextOrd());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public long currentOrd() {
|
|
||||||
return getGlobalOrd(values.currentOrd());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int setDocument(int docId) {
|
public int setDocument(int docId) {
|
||||||
return values.setDocument(docId);
|
return values.setDocument(docId);
|
||||||
|
|
|
@ -49,7 +49,7 @@ public class InternalGlobalOrdinalsBuilder extends AbstractIndexComponent implem
|
||||||
final TermsEnum[] subs = new TermsEnum[indexReader.leaves().size()];
|
final TermsEnum[] subs = new TermsEnum[indexReader.leaves().size()];
|
||||||
for (int i = 0; i < indexReader.leaves().size(); ++i) {
|
for (int i = 0; i < indexReader.leaves().size(); ++i) {
|
||||||
atomicFD[i] = indexFieldData.load(indexReader.leaves().get(i));
|
atomicFD[i] = indexFieldData.load(indexReader.leaves().get(i));
|
||||||
subs[i] = atomicFD[i].getTermsEnum();
|
subs[i] = atomicFD[i].getBytesValues().getTermsEnum();
|
||||||
}
|
}
|
||||||
final OrdinalMap ordinalMap = new OrdinalMap(null, subs);
|
final OrdinalMap ordinalMap = new OrdinalMap(null, subs);
|
||||||
final long memorySizeInBytes = ordinalMap.ramBytesUsed();
|
final long memorySizeInBytes = ordinalMap.ramBytesUsed();
|
||||||
|
|
|
@ -20,14 +20,12 @@ package org.elasticsearch.index.fielddata.ordinals;
|
||||||
|
|
||||||
import org.apache.lucene.index.AtomicReaderContext;
|
import org.apache.lucene.index.AtomicReaderContext;
|
||||||
import org.apache.lucene.index.MultiDocValues.OrdinalMap;
|
import org.apache.lucene.index.MultiDocValues.OrdinalMap;
|
||||||
import org.apache.lucene.index.TermsEnum;
|
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.index.Index;
|
import org.elasticsearch.index.Index;
|
||||||
import org.elasticsearch.index.fielddata.AtomicFieldData;
|
import org.elasticsearch.index.fielddata.AtomicFieldData;
|
||||||
import org.elasticsearch.index.fielddata.BytesValues;
|
import org.elasticsearch.index.fielddata.BytesValues;
|
||||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||||
import org.elasticsearch.index.fielddata.ScriptDocValues;
|
import org.elasticsearch.index.fielddata.ScriptDocValues;
|
||||||
import org.elasticsearch.index.fielddata.plain.AtomicFieldDataWithOrdinalsTermsEnum;
|
|
||||||
import org.elasticsearch.index.mapper.FieldMapper;
|
import org.elasticsearch.index.mapper.FieldMapper;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -86,11 +84,6 @@ final class InternalGlobalOrdinalsIndexFieldData extends GlobalOrdinalsIndexFiel
|
||||||
throw new UnsupportedOperationException("Script values not supported on global ordinals");
|
throw new UnsupportedOperationException("Script values not supported on global ordinals");
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public TermsEnum getTermsEnum() {
|
|
||||||
return new AtomicFieldDataWithOrdinalsTermsEnum(this);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void close() {
|
public void close() {
|
||||||
}
|
}
|
||||||
|
|
|
@ -93,7 +93,6 @@ public class MultiOrdinals extends Ordinals {
|
||||||
private final AppendingPackedLongBuffer ords;
|
private final AppendingPackedLongBuffer ords;
|
||||||
private long offset;
|
private long offset;
|
||||||
private long limit;
|
private long limit;
|
||||||
private long currentOrd;
|
|
||||||
private final ValuesHolder values;
|
private final ValuesHolder values;
|
||||||
|
|
||||||
MultiDocs(MultiOrdinals ordinals, ValuesHolder values) {
|
MultiDocs(MultiOrdinals ordinals, ValuesHolder values) {
|
||||||
|
@ -114,16 +113,16 @@ public class MultiOrdinals extends Ordinals {
|
||||||
final long startOffset = docId > 0 ? endOffsets.get(docId - 1) : 0;
|
final long startOffset = docId > 0 ? endOffsets.get(docId - 1) : 0;
|
||||||
final long endOffset = endOffsets.get(docId);
|
final long endOffset = endOffsets.get(docId);
|
||||||
if (startOffset == endOffset) {
|
if (startOffset == endOffset) {
|
||||||
return currentOrd = MISSING_ORDINAL; // ord for missing values
|
return MISSING_ORDINAL; // ord for missing values
|
||||||
} else {
|
} else {
|
||||||
return currentOrd = ords.get(startOffset);
|
return ords.get(startOffset);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long nextOrd() {
|
public long nextOrd() {
|
||||||
assert offset < limit;
|
assert offset < limit;
|
||||||
return currentOrd = ords.get(offset++);
|
return ords.get(offset++);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -135,19 +134,9 @@ public class MultiOrdinals extends Ordinals {
|
||||||
return (int) (endOffset - startOffset);
|
return (int) (endOffset - startOffset);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public long currentOrd() {
|
|
||||||
return currentOrd;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BytesRef getValueByOrd(long ord) {
|
public BytesRef getValueByOrd(long ord) {
|
||||||
return values.getValueByOrd(ord);
|
return values.getValueByOrd(ord);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public BytesRef copyShared() {
|
|
||||||
return values.copy(scratch);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -95,19 +95,9 @@ public class SinglePackedOrdinals extends Ordinals {
|
||||||
return 1 + (int) Math.min(currentOrdinal, 0);
|
return 1 + (int) Math.min(currentOrdinal, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public long currentOrd() {
|
|
||||||
return currentOrdinal;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BytesRef getValueByOrd(long ord) {
|
public BytesRef getValueByOrd(long ord) {
|
||||||
return values.getValueByOrd(ord);
|
return values.getValueByOrd(ord);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public BytesRef copyShared() {
|
|
||||||
return values.copy(scratch);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,7 +18,6 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.index.fielddata.plain;
|
package org.elasticsearch.index.fielddata.plain;
|
||||||
|
|
||||||
import org.apache.lucene.index.TermsEnum;
|
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.apache.lucene.util.IntsRef;
|
import org.apache.lucene.util.IntsRef;
|
||||||
import org.apache.lucene.util.fst.FST;
|
import org.apache.lucene.util.fst.FST;
|
||||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.index.fielddata.plain;
|
||||||
|
|
||||||
import org.apache.lucene.index.AtomicReaderContext;
|
import org.apache.lucene.index.AtomicReaderContext;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.TermsEnum;
|
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.index.Index;
|
import org.elasticsearch.index.Index;
|
||||||
|
@ -75,11 +74,6 @@ public class IndexIndexFieldData implements IndexFieldData.WithOrdinals<AtomicFi
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public long currentOrd() {
|
|
||||||
return BytesValues.WithOrdinals.MIN_ORDINAL;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BytesRef getValueByOrd(long ord) {
|
public BytesRef getValueByOrd(long ord) {
|
||||||
return scratch;
|
return scratch;
|
||||||
|
@ -114,11 +108,6 @@ public class IndexIndexFieldData implements IndexFieldData.WithOrdinals<AtomicFi
|
||||||
public void close() {
|
public void close() {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public TermsEnum getTermsEnum() {
|
|
||||||
return new AtomicFieldDataWithOrdinalsTermsEnum(this);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private final FieldMapper.Names names;
|
private final FieldMapper.Names names;
|
||||||
|
|
|
@ -18,7 +18,6 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.index.fielddata.plain;
|
package org.elasticsearch.index.fielddata.plain;
|
||||||
|
|
||||||
import org.apache.lucene.index.TermsEnum;
|
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.apache.lucene.util.PagedBytes;
|
import org.apache.lucene.util.PagedBytes;
|
||||||
import org.apache.lucene.util.packed.MonotonicAppendingLongBuffer;
|
import org.apache.lucene.util.packed.MonotonicAppendingLongBuffer;
|
||||||
|
|
|
@ -68,8 +68,7 @@ public class ParentChildAtomicFieldData implements AtomicFieldData {
|
||||||
int numValues = values.setDocument(docId);
|
int numValues = values.setDocument(docId);
|
||||||
assert numValues <= 1 : "Per doc/type combination only a single value is allowed";
|
assert numValues <= 1 : "Per doc/type combination only a single value is allowed";
|
||||||
if (numValues == 1) {
|
if (numValues == 1) {
|
||||||
values.nextValue();
|
terms[counter++] = BytesRef.deepCopyOf(values.nextValue());
|
||||||
terms[counter++] = values.copyShared();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
assert counter <= 2 : "A single doc can potentially be both parent and child, so the maximum allowed values is 2";
|
assert counter <= 2 : "A single doc can potentially be both parent and child, so the maximum allowed values is 2";
|
||||||
|
|
|
@ -96,7 +96,6 @@ abstract class SortedSetDVAtomicFieldData {
|
||||||
private final SortedSetDocValues values;
|
private final SortedSetDocValues values;
|
||||||
private long[] ords;
|
private long[] ords;
|
||||||
private int ordIndex = Integer.MAX_VALUE;
|
private int ordIndex = Integer.MAX_VALUE;
|
||||||
private long currentOrdinal = -1;
|
|
||||||
|
|
||||||
SortedSetValues(SortedSetDocValues values) {
|
SortedSetValues(SortedSetDocValues values) {
|
||||||
super(DocValues.unwrapSingleton(values) == null);
|
super(DocValues.unwrapSingleton(values) == null);
|
||||||
|
@ -112,13 +111,13 @@ abstract class SortedSetDVAtomicFieldData {
|
||||||
@Override
|
@Override
|
||||||
public long getOrd(int docId) {
|
public long getOrd(int docId) {
|
||||||
values.setDocument(docId);
|
values.setDocument(docId);
|
||||||
return currentOrdinal = values.nextOrd();
|
return values.nextOrd();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long nextOrd() {
|
public long nextOrd() {
|
||||||
assert ordIndex < ords.length;
|
assert ordIndex < ords.length;
|
||||||
return currentOrdinal = ords[ordIndex++];
|
return ords[ordIndex++];
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -135,11 +134,6 @@ abstract class SortedSetDVAtomicFieldData {
|
||||||
return i;
|
return i;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public long currentOrd() {
|
|
||||||
return currentOrdinal;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BytesRef getValueByOrd(long ord) {
|
public BytesRef getValueByOrd(long ord) {
|
||||||
values.lookupOrd(ord, scratch);
|
values.lookupOrd(ord, scratch);
|
||||||
|
|
|
@ -74,7 +74,7 @@ final class QueriesLoaderCollector extends Collector {
|
||||||
// id is only used for logging, if we fail we log the id in the catch statement
|
// id is only used for logging, if we fail we log the id in the catch statement
|
||||||
final Query parseQuery = percolator.parsePercolatorDocument(null, fieldsVisitor.source());
|
final Query parseQuery = percolator.parsePercolatorDocument(null, fieldsVisitor.source());
|
||||||
if (parseQuery != null) {
|
if (parseQuery != null) {
|
||||||
queries.put(idValues.copyShared(), parseQuery);
|
queries.put(BytesRef.deepCopyOf(id), parseQuery);
|
||||||
} else {
|
} else {
|
||||||
logger.warn("failed to add query [{}] - parser returned null", id);
|
logger.warn("failed to add query [{}] - parser returned null", id);
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,14 +23,17 @@ import org.apache.lucene.queries.TermFilter;
|
||||||
import org.apache.lucene.search.DocIdSet;
|
import org.apache.lucene.search.DocIdSet;
|
||||||
import org.apache.lucene.search.DocIdSetIterator;
|
import org.apache.lucene.search.DocIdSetIterator;
|
||||||
import org.apache.lucene.search.Filter;
|
import org.apache.lucene.search.Filter;
|
||||||
import org.apache.lucene.util.*;
|
import org.apache.lucene.util.Bits;
|
||||||
|
import org.apache.lucene.util.BytesRef;
|
||||||
|
import org.apache.lucene.util.FixedBitSet;
|
||||||
|
import org.apache.lucene.util.LongBitSet;
|
||||||
import org.elasticsearch.common.lease.Releasables;
|
import org.elasticsearch.common.lease.Releasables;
|
||||||
import org.elasticsearch.common.lucene.search.AndFilter;
|
import org.elasticsearch.common.lucene.search.AndFilter;
|
||||||
|
import org.elasticsearch.common.util.BytesRefHash;
|
||||||
import org.elasticsearch.common.util.LongHash;
|
import org.elasticsearch.common.util.LongHash;
|
||||||
import org.elasticsearch.index.fielddata.BytesValues;
|
import org.elasticsearch.index.fielddata.BytesValues;
|
||||||
import org.elasticsearch.index.mapper.Uid;
|
import org.elasticsearch.index.mapper.Uid;
|
||||||
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
||||||
import org.elasticsearch.common.util.BytesRefHash;
|
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -49,8 +52,7 @@ final class ParentIdsFilter extends Filter {
|
||||||
String parentType, BytesValues.WithOrdinals globalValues,
|
String parentType, BytesValues.WithOrdinals globalValues,
|
||||||
LongBitSet parentOrds, long numFoundParents) {
|
LongBitSet parentOrds, long numFoundParents) {
|
||||||
if (numFoundParents == 1) {
|
if (numFoundParents == 1) {
|
||||||
globalValues.getValueByOrd(parentOrds.nextSetBit(0));
|
BytesRef id = globalValues.getValueByOrd(parentOrds.nextSetBit(0));
|
||||||
BytesRef id = globalValues.copyShared();
|
|
||||||
if (nonNestedDocsFilter != null) {
|
if (nonNestedDocsFilter != null) {
|
||||||
List<Filter> filters = Arrays.asList(
|
List<Filter> filters = Arrays.asList(
|
||||||
new TermFilter(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(parentType, id))),
|
new TermFilter(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(parentType, id))),
|
||||||
|
@ -83,8 +85,7 @@ final class ParentIdsFilter extends Filter {
|
||||||
String parentType, BytesValues.WithOrdinals globalValues,
|
String parentType, BytesValues.WithOrdinals globalValues,
|
||||||
LongHash parentIdxs, long numFoundParents) {
|
LongHash parentIdxs, long numFoundParents) {
|
||||||
if (numFoundParents == 1) {
|
if (numFoundParents == 1) {
|
||||||
globalValues.getValueByOrd(parentIdxs.get(0));
|
BytesRef id = globalValues.getValueByOrd(parentIdxs.get(0));
|
||||||
BytesRef id = globalValues.copyShared();
|
|
||||||
if (nonNestedDocsFilter != null) {
|
if (nonNestedDocsFilter != null) {
|
||||||
List<Filter> filters = Arrays.asList(
|
List<Filter> filters = Arrays.asList(
|
||||||
new TermFilter(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(parentType, id))),
|
new TermFilter(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(parentType, id))),
|
||||||
|
|
|
@ -765,7 +765,7 @@ public class PercolatorService extends AbstractComponent {
|
||||||
final int numValues = values.setDocument(localDocId);
|
final int numValues = values.setDocument(localDocId);
|
||||||
assert numValues == 1;
|
assert numValues == 1;
|
||||||
BytesRef bytes = values.nextValue();
|
BytesRef bytes = values.nextValue();
|
||||||
matches.add(values.copyShared());
|
matches.add(BytesRef.deepCopyOf(bytes));
|
||||||
if (hls != null) {
|
if (hls != null) {
|
||||||
Query query = context.percolateQueries().get(bytes);
|
Query query = context.percolateQueries().get(bytes);
|
||||||
context.parsedQuery(new ParsedQuery(query, ImmutableMap.<String, Filter>of()));
|
context.parsedQuery(new ParsedQuery(query, ImmutableMap.<String, Filter>of()));
|
||||||
|
|
|
@ -212,7 +212,7 @@ abstract class QueryCollector extends Collector {
|
||||||
}
|
}
|
||||||
if (collector.exists()) {
|
if (collector.exists()) {
|
||||||
if (!limit || counter < size) {
|
if (!limit || counter < size) {
|
||||||
matches.add(values.copyShared());
|
matches.add(BytesRef.deepCopyOf(current));
|
||||||
if (context.highlight() != null) {
|
if (context.highlight() != null) {
|
||||||
highlightPhase.hitExecute(context, context.hitContext());
|
highlightPhase.hitExecute(context, context.hitContext());
|
||||||
hls.add(context.hitContext().hit().getHighlightFields());
|
hls.add(context.hitContext().hit().getHighlightFields());
|
||||||
|
@ -334,7 +334,7 @@ abstract class QueryCollector extends Collector {
|
||||||
}
|
}
|
||||||
if (collector.exists()) {
|
if (collector.exists()) {
|
||||||
if (!limit || counter < size) {
|
if (!limit || counter < size) {
|
||||||
matches.add(values.copyShared());
|
matches.add(BytesRef.deepCopyOf(current));
|
||||||
scores.add(scorer.score());
|
scores.add(scorer.score());
|
||||||
if (context.highlight() != null) {
|
if (context.highlight() != null) {
|
||||||
highlightPhase.hitExecute(context, context.hitContext());
|
highlightPhase.hitExecute(context, context.hitContext());
|
||||||
|
|
|
@ -376,19 +376,9 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr
|
||||||
return numAcceptedOrds;
|
return numAcceptedOrds;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public long currentOrd() {
|
|
||||||
return currentOrd;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BytesRef getValueByOrd(long ord) {
|
public BytesRef getValueByOrd(long ord) {
|
||||||
return inner.getValueByOrd(ord);
|
return inner.getValueByOrd(ord);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public BytesRef copyShared() {
|
|
||||||
return inner.copyShared();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -89,6 +89,7 @@ public class StringTermsAggregator extends AbstractStringTermsAggregator {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO: use terms enum
|
||||||
/** Returns an iterator over the field data terms. */
|
/** Returns an iterator over the field data terms. */
|
||||||
private static Iterator<BytesRef> terms(final BytesValues.WithOrdinals bytesValues, boolean reverse) {
|
private static Iterator<BytesRef> terms(final BytesValues.WithOrdinals bytesValues, boolean reverse) {
|
||||||
if (reverse) {
|
if (reverse) {
|
||||||
|
@ -103,8 +104,7 @@ public class StringTermsAggregator extends AbstractStringTermsAggregator {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BytesRef next() {
|
public BytesRef next() {
|
||||||
bytesValues.getValueByOrd(i--);
|
return BytesRef.deepCopyOf(bytesValues.getValueByOrd(i--));
|
||||||
return bytesValues.copyShared();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
};
|
};
|
||||||
|
@ -120,8 +120,7 @@ public class StringTermsAggregator extends AbstractStringTermsAggregator {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BytesRef next() {
|
public BytesRef next() {
|
||||||
bytesValues.getValueByOrd(i++);
|
return BytesRef.deepCopyOf(bytesValues.getValueByOrd(i++));
|
||||||
return bytesValues.copyShared();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
|
@ -79,7 +79,7 @@ public class IncludeExclude {
|
||||||
* Computes which global ordinals are accepted by this IncludeExclude instance.
|
* Computes which global ordinals are accepted by this IncludeExclude instance.
|
||||||
*/
|
*/
|
||||||
public LongBitSet acceptedGlobalOrdinals(BytesValues.WithOrdinals globalOrdinals, ValuesSource.Bytes.WithOrdinals valueSource) {
|
public LongBitSet acceptedGlobalOrdinals(BytesValues.WithOrdinals globalOrdinals, ValuesSource.Bytes.WithOrdinals valueSource) {
|
||||||
TermsEnum globalTermsEnum = valueSource.getGlobalTermsEnum();
|
TermsEnum globalTermsEnum = valueSource.globalBytesValues().getTermsEnum();
|
||||||
LongBitSet acceptedGlobalOrdinals = new LongBitSet(globalOrdinals.getMaxOrd());
|
LongBitSet acceptedGlobalOrdinals = new LongBitSet(globalOrdinals.getMaxOrd());
|
||||||
try {
|
try {
|
||||||
for (BytesRef term = globalTermsEnum.next(); term != null; term = globalTermsEnum.next()) {
|
for (BytesRef term = globalTermsEnum.next(); term != null; term = globalTermsEnum.next()) {
|
||||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.search.aggregations.support;
|
||||||
import org.apache.lucene.index.AtomicReaderContext;
|
import org.apache.lucene.index.AtomicReaderContext;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.IndexReaderContext;
|
import org.apache.lucene.index.IndexReaderContext;
|
||||||
import org.apache.lucene.index.TermsEnum;
|
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
import org.apache.lucene.util.ArrayUtil;
|
import org.apache.lucene.util.ArrayUtil;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
|
@ -176,8 +175,6 @@ public abstract class ValuesSource {
|
||||||
|
|
||||||
public abstract long globalMaxOrd(IndexSearcher indexSearcher);
|
public abstract long globalMaxOrd(IndexSearcher indexSearcher);
|
||||||
|
|
||||||
public abstract TermsEnum getGlobalTermsEnum();
|
|
||||||
|
|
||||||
public static class FieldData extends WithOrdinals implements ReaderContextAware {
|
public static class FieldData extends WithOrdinals implements ReaderContextAware {
|
||||||
|
|
||||||
protected final IndexFieldData.WithOrdinals<?> indexFieldData;
|
protected final IndexFieldData.WithOrdinals<?> indexFieldData;
|
||||||
|
@ -262,11 +259,6 @@ public abstract class ValuesSource {
|
||||||
return maxOrd = values.getMaxOrd();
|
return maxOrd = values.getMaxOrd();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public TermsEnum getGlobalTermsEnum() {
|
|
||||||
return globalAtomicFieldData.getTermsEnum();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -280,7 +280,7 @@ public class TermsStringOrdinalsFacetExecutor extends FacetExecutor {
|
||||||
}
|
}
|
||||||
|
|
||||||
public BytesRef copyCurrent() {
|
public BytesRef copyCurrent() {
|
||||||
return values.copyShared();
|
return BytesRef.deepCopyOf(current);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -170,7 +170,7 @@ public class TermsStatsStringFacetExecutor extends FacetExecutor {
|
||||||
spare.reset(value, hashCode);
|
spare.reset(value, hashCode);
|
||||||
InternalTermsStatsStringFacet.StringEntry stringEntry = entries.get(spare);
|
InternalTermsStatsStringFacet.StringEntry stringEntry = entries.get(spare);
|
||||||
if (stringEntry == null) {
|
if (stringEntry == null) {
|
||||||
HashedBytesRef theValue = new HashedBytesRef(values.copyShared(), hashCode);
|
HashedBytesRef theValue = new HashedBytesRef(BytesRef.deepCopyOf(value), hashCode);
|
||||||
stringEntry = new InternalTermsStatsStringFacet.StringEntry(theValue, 0, 0, 0, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY);
|
stringEntry = new InternalTermsStatsStringFacet.StringEntry(theValue, 0, 0, 0, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY);
|
||||||
entries.put(theValue, stringEntry);
|
entries.put(theValue, stringEntry);
|
||||||
}
|
}
|
||||||
|
@ -210,7 +210,7 @@ public class TermsStatsStringFacetExecutor extends FacetExecutor {
|
||||||
spare.reset(value, hashCode);
|
spare.reset(value, hashCode);
|
||||||
InternalTermsStatsStringFacet.StringEntry stringEntry = entries.get(spare);
|
InternalTermsStatsStringFacet.StringEntry stringEntry = entries.get(spare);
|
||||||
if (stringEntry == null) {
|
if (stringEntry == null) {
|
||||||
HashedBytesRef theValue = new HashedBytesRef(values.copyShared(), hashCode);
|
HashedBytesRef theValue = new HashedBytesRef(BytesRef.deepCopyOf(value), hashCode);
|
||||||
stringEntry = new InternalTermsStatsStringFacet.StringEntry(theValue, 1, 0, 0, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY);
|
stringEntry = new InternalTermsStatsStringFacet.StringEntry(theValue, 1, 0, 0, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY);
|
||||||
entries.put(theValue, stringEntry);
|
entries.put(theValue, stringEntry);
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -506,7 +506,7 @@ public abstract class AbstractStringFieldDataTests extends AbstractFieldDataImpl
|
||||||
IndexFieldData.WithOrdinals ifd = getForField("value");
|
IndexFieldData.WithOrdinals ifd = getForField("value");
|
||||||
AtomicFieldData.WithOrdinals afd = ifd.load(atomicReaderContext);
|
AtomicFieldData.WithOrdinals afd = ifd.load(atomicReaderContext);
|
||||||
|
|
||||||
TermsEnum termsEnum = afd.getTermsEnum();
|
TermsEnum termsEnum = afd.getBytesValues().getTermsEnum();
|
||||||
int size = 0;
|
int size = 0;
|
||||||
while (termsEnum.next() != null) {
|
while (termsEnum.next() != null) {
|
||||||
size++;
|
size++;
|
||||||
|
|
|
@ -112,40 +112,8 @@ public class FieldDataSourceTests extends ElasticsearchTestCase {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void assertConsistent(BytesValues values) {
|
|
||||||
final int numDocs = scaledRandomIntBetween(10, 100);
|
|
||||||
for (int i = 0; i < numDocs; ++i) {
|
|
||||||
final int valueCount = values.setDocument(i);
|
|
||||||
for (int j = 0; j < valueCount; ++j) {
|
|
||||||
final BytesRef term = values.nextValue();
|
|
||||||
assertTrue(term.bytesEquals(values.copyShared()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void bytesValuesWithScript() {
|
|
||||||
final BytesValues values = randomBytesValues();
|
|
||||||
ValuesSource source = new ValuesSource.Bytes() {
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public BytesValues bytesValues() {
|
|
||||||
return values;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public MetaData metaData() {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
};
|
|
||||||
SearchScript script = randomScript();
|
|
||||||
assertConsistent(new ValuesSource.WithScript.BytesValues(source, script));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void sortedUniqueBytesValues() {
|
public void sortedUniqueBytesValues() {
|
||||||
assertConsistent(new ValuesSource.Bytes.SortedAndUnique.SortedUniqueBytesValues(randomBytesValues()));
|
|
||||||
assertSortedAndUnique(new ValuesSource.Bytes.SortedAndUnique.SortedUniqueBytesValues(randomBytesValues()));
|
assertSortedAndUnique(new ValuesSource.Bytes.SortedAndUnique.SortedUniqueBytesValues(randomBytesValues()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -160,7 +128,7 @@ public class FieldDataSourceTests extends ElasticsearchTestCase {
|
||||||
if (j > 0) {
|
if (j > 0) {
|
||||||
assertThat(BytesRef.getUTF8SortedAsUnicodeComparator().compare(ref.get(ref.size() - 1), term), lessThan(0));
|
assertThat(BytesRef.getUTF8SortedAsUnicodeComparator().compare(ref.get(ref.size() - 1), term), lessThan(0));
|
||||||
}
|
}
|
||||||
ref.add(values.copyShared());
|
ref.add(BytesRef.deepCopyOf(term));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue