Upgrade to a Lucene 7 snapshot (#24089)

We want to upgrade to Lucene 7 ahead of time in order to be able to check whether it causes any trouble to Elasticsearch before Lucene 7.0 gets released. From a user perspective, the main benefit of this upgrade is the enhanced support for sparse fields, whose resource consumption is now function of the number of docs that have a value rather than the total number of docs in the index.

Some notes about the change:
 - it includes the deprecation of the `disable_coord` parameter of the `bool` and `common_terms` queries: Lucene has removed support for coord factors
 - it includes the deprecation of the `index.similarity.base` expert setting, since it was only useful to configure coords and query norms, which have both been removed
 - two tests have been marked with `@AwaitsFix` because of #23966, which we intend to address after the merge
This commit is contained in:
Adrien Grand 2017-04-18 15:17:21 +02:00 committed by GitHub
parent f217eb8ad8
commit 4632661bc7
269 changed files with 3993 additions and 3868 deletions

View File

@ -26,12 +26,6 @@ java.util.concurrent.ThreadLocalRandom
java.security.MessageDigest#clone() @ use org.elasticsearch.common.hash.MessageDigests
@defaultMessage this should not have been added to lucene in the first place
org.apache.lucene.index.IndexReader#getCombinedCoreAndDeletesKey()
@defaultMessage Soon to be removed
org.apache.lucene.document.FieldType#numericType()
@defaultMessage Don't use MethodHandles in slow ways, don't be lenient in tests.
java.lang.invoke.MethodHandle#invoke(java.lang.Object[])
java.lang.invoke.MethodHandle#invokeWithArguments(java.lang.Object[])

View File

@ -36,16 +36,6 @@ org.apache.lucene.index.IndexReader#decRef()
org.apache.lucene.index.IndexReader#incRef()
org.apache.lucene.index.IndexReader#tryIncRef()
@defaultMessage Close listeners can only installed via ElasticsearchDirectoryReader#addReaderCloseListener
org.apache.lucene.index.IndexReader#addReaderClosedListener(org.apache.lucene.index.IndexReader$ReaderClosedListener)
org.apache.lucene.index.IndexReader#removeReaderClosedListener(org.apache.lucene.index.IndexReader$ReaderClosedListener)
@defaultMessage Pass the precision step from the mappings explicitly instead
org.apache.lucene.search.LegacyNumericRangeQuery#newDoubleRange(java.lang.String,java.lang.Double,java.lang.Double,boolean,boolean)
org.apache.lucene.search.LegacyNumericRangeQuery#newFloatRange(java.lang.String,java.lang.Float,java.lang.Float,boolean,boolean)
org.apache.lucene.search.LegacyNumericRangeQuery#newIntRange(java.lang.String,java.lang.Integer,java.lang.Integer,boolean,boolean)
org.apache.lucene.search.LegacyNumericRangeQuery#newLongRange(java.lang.String,java.lang.Long,java.lang.Long,boolean,boolean)
@defaultMessage Only use wait / notify when really needed try to use concurrency primitives, latches or callbacks instead.
java.lang.Object#wait()
java.lang.Object#wait(long)

View File

@ -1,6 +1,6 @@
# When updating elasticsearch, please update 'rest' version in core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy
elasticsearch = 6.0.0-alpha1
lucene = 6.5.0
lucene = 7.0.0-snapshot-89f6d17
# optional dependencies
spatial4j = 0.6

View File

@ -1 +0,0 @@
3989779b05ecd0ace6affe19223b1c27156604f1

View File

@ -0,0 +1 @@
e69234c2e898d86a53edbe8d22e33bebc45286cd

View File

@ -1 +0,0 @@
6a8660e7133f357ef40d9cac26316ccd9937a2eb

View File

@ -0,0 +1 @@
48172a8e1fe6562f55ab671d42af53652794d5df

View File

@ -1 +0,0 @@
ff176c9bde4228b43827849f5d2ff2e2717e3297

View File

@ -0,0 +1 @@
3dab251d4c7ab4ff5095e5f1d1e127ec2cf3c07d

View File

@ -1 +0,0 @@
10d2e5b36f460527ac9b948be0ec3077bde5b0ca

View File

@ -0,0 +1 @@
c01ae8a23b733d75d058a76bd85fcb49b9fd06fd

View File

@ -1 +0,0 @@
0019bb6a631ea0123e8e553b0510fa81c9d3c3eb

View File

@ -0,0 +1 @@
c53df048b97946fe66035505306b5651b702adb1

View File

@ -1 +0,0 @@
dad85baba266793b9ceb80a9b08c4ee9838e09df

View File

@ -0,0 +1 @@
1ecb349ba29abab75359e5125ac8a94fc81441d5

View File

@ -1 +0,0 @@
938f9f7efe8a403fd57c99aedd75d040d9caa896

View File

@ -0,0 +1 @@
e5f53b38652b1284ff254fba39e624ec117aef7d

View File

@ -1 +0,0 @@
afdff39ecb30f6e2c6f056a5bdfcb13d928a25af

View File

@ -0,0 +1 @@
2f340ed3f46d6b4c89fa31975b675c19028c15eb

View File

@ -1 +0,0 @@
8e3971a008070712d57b59cf1f7b44c0d9d3df25

View File

@ -0,0 +1 @@
a13862fb62cc1e516d16d6b6bb3cdb906c4925f6

View File

@ -1 +0,0 @@
225b904edf91ccdffffa398e1924ebadd5677c09

View File

@ -0,0 +1 @@
4e014f72a588453bae7dd1a555d741cf3bf39032

View File

@ -1 +0,0 @@
5c994fc5dc4f37133a861571211303d81c5d51ff

View File

@ -0,0 +1 @@
5e87d61c604d6b1c0ee5c38f09441d1b8b9c8c2b

View File

@ -1 +0,0 @@
553b7b13bef994f14076a85557df03cad67322e9

View File

@ -0,0 +1 @@
be14aa163b339403d8ec904493c1be5dfa9baeaf

View File

@ -1 +0,0 @@
73deae791d861820974600705ba06e9f801cbe56

View File

@ -0,0 +1 @@
a2c13be0fe4c5a98a30ec6ae673be1442409817c

View File

@ -1 +0,0 @@
c2aad69500dac79338ef45f570cab47bec3d2724

View File

@ -0,0 +1 @@
92b8282e474845fdae31f9f239f953bc7164401f

View File

@ -1 +0,0 @@
acf211f2bf901dfc8155a46c5a42c5650edf74ef

View File

@ -0,0 +1 @@
1c4aaea267ed41657ebf01769bfddbcab5b27414

View File

@ -296,16 +296,15 @@ public abstract class BlendedTermQuery extends Query {
return Objects.hash(classHash(), Arrays.hashCode(equalsTerms()));
}
public static BlendedTermQuery booleanBlendedQuery(Term[] terms, final boolean disableCoord) {
return booleanBlendedQuery(terms, null, disableCoord);
public static BlendedTermQuery booleanBlendedQuery(Term[] terms) {
return booleanBlendedQuery(terms, null);
}
public static BlendedTermQuery booleanBlendedQuery(Term[] terms, final float[] boosts, final boolean disableCoord) {
public static BlendedTermQuery booleanBlendedQuery(Term[] terms, final float[] boosts) {
return new BlendedTermQuery(terms, boosts) {
@Override
protected Query topLevelQuery(Term[] terms, TermContext[] ctx, int[] docFreqs, int maxDoc) {
BooleanQuery.Builder booleanQueryBuilder = new BooleanQuery.Builder();
booleanQueryBuilder.setDisableCoord(disableCoord);
for (int i = 0; i < terms.length; i++) {
Query query = new TermQuery(terms[i], ctx[i]);
if (boosts != null && boosts[i] != 1f) {
@ -318,14 +317,12 @@ public abstract class BlendedTermQuery extends Query {
};
}
public static BlendedTermQuery commonTermsBlendedQuery(Term[] terms, final float[] boosts, final boolean disableCoord, final float maxTermFrequency) {
public static BlendedTermQuery commonTermsBlendedQuery(Term[] terms, final float[] boosts, final float maxTermFrequency) {
return new BlendedTermQuery(terms, boosts) {
@Override
protected Query topLevelQuery(Term[] terms, TermContext[] ctx, int[] docFreqs, int maxDoc) {
BooleanQuery.Builder highBuilder = new BooleanQuery.Builder();
highBuilder.setDisableCoord(disableCoord);
BooleanQuery.Builder lowBuilder = new BooleanQuery.Builder();
lowBuilder.setDisableCoord(disableCoord);
for (int i = 0; i < terms.length; i++) {
Query query = new TermQuery(terms[i], ctx[i]);
if (boosts != null && boosts[i] != 1f) {
@ -343,7 +340,6 @@ public abstract class BlendedTermQuery extends Query {
BooleanQuery low = lowBuilder.build();
if (low.clauses().isEmpty()) {
BooleanQuery.Builder queryBuilder = new BooleanQuery.Builder();
queryBuilder.setDisableCoord(disableCoord);
for (BooleanClause booleanClause : high) {
queryBuilder.add(booleanClause.getQuery(), Occur.MUST);
}
@ -352,7 +348,6 @@ public abstract class BlendedTermQuery extends Query {
return low;
} else {
return new BooleanQuery.Builder()
.setDisableCoord(true)
.add(high, BooleanClause.Occur.SHOULD)
.add(low, BooleanClause.Occur.MUST)
.build();

View File

@ -35,8 +35,8 @@ public class ExtendedCommonTermsQuery extends CommonTermsQuery {
private final MappedFieldType fieldType;
public ExtendedCommonTermsQuery(Occur highFreqOccur, Occur lowFreqOccur, float maxTermFrequency, boolean disableCoord, MappedFieldType fieldType) {
super(highFreqOccur, lowFreqOccur, maxTermFrequency, disableCoord);
public ExtendedCommonTermsQuery(Occur highFreqOccur, Occur lowFreqOccur, float maxTermFrequency, MappedFieldType fieldType) {
super(highFreqOccur, lowFreqOccur, maxTermFrequency);
this.fieldType = fieldType;
}

View File

@ -57,8 +57,8 @@ public final class MinDocQuery extends Query {
}
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
return new ConstantScoreWeight(this) {
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
return new ConstantScoreWeight(this, boost) {
@Override
public Scorer scorer(LeafReaderContext context) throws IOException {
final int maxDoc = context.reader().maxDoc();

View File

@ -25,9 +25,7 @@ import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.index.Term;
import org.apache.lucene.queryparser.analyzing.AnalyzingQueryParser;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.DisjunctionMaxQuery;
import org.apache.lucene.search.FuzzyQuery;
@ -70,7 +68,7 @@ import static org.elasticsearch.common.lucene.search.Queries.fixNegativeQueryIfN
* Also breaks fields with [type].[name] into a boolean query that must include the type
* as well as the query on the name.
*/
public class MapperQueryParser extends AnalyzingQueryParser {
public class MapperQueryParser extends QueryParser {
public static final Map<String, FieldQueryExtension> FIELD_QUERY_EXTENSIONS;
@ -103,14 +101,13 @@ public class MapperQueryParser extends AnalyzingQueryParser {
setAnalyzer(settings.analyzer());
setMultiTermRewriteMethod(settings.rewriteMethod());
setEnablePositionIncrements(settings.enablePositionIncrements());
setSplitOnWhitespace(settings.splitOnWhitespace());
setAutoGeneratePhraseQueries(settings.autoGeneratePhraseQueries());
setMaxDeterminizedStates(settings.maxDeterminizedStates());
setAllowLeadingWildcard(settings.allowLeadingWildcard());
setLowercaseExpandedTerms(false);
setPhraseSlop(settings.phraseSlop());
setDefaultOperator(settings.defaultOperator());
setFuzzyPrefixLength(settings.fuzzyPrefixLength());
setSplitOnWhitespace(settings.splitOnWhitespace());
}
/**
@ -175,7 +172,7 @@ public class MapperQueryParser extends AnalyzingQueryParser {
}
}
if (clauses.isEmpty()) return null; // happens for stopwords
return getBooleanQueryCoordDisabled(clauses);
return getBooleanQuery(clauses);
}
} else {
return getFieldQuerySingle(field, queryText, quoted);
@ -277,7 +274,7 @@ public class MapperQueryParser extends AnalyzingQueryParser {
}
}
if (clauses.isEmpty()) return null; // happens for stopwords
return getBooleanQueryCoordDisabled(clauses);
return getBooleanQuery(clauses);
}
} else {
return super.getFieldQuery(field, queryText, slop);
@ -328,7 +325,7 @@ public class MapperQueryParser extends AnalyzingQueryParser {
}
}
if (clauses.isEmpty()) return null; // happens for stopwords
return getBooleanQueryCoordDisabled(clauses);
return getBooleanQuery(clauses);
}
}
@ -386,7 +383,7 @@ public class MapperQueryParser extends AnalyzingQueryParser {
clauses.add(new BooleanClause(applyBoost(mField, q), BooleanClause.Occur.SHOULD));
}
}
return getBooleanQueryCoordDisabled(clauses);
return getBooleanQuery(clauses);
}
} else {
return getFuzzyQuerySingle(field, termStr, minSimilarity);
@ -450,7 +447,7 @@ public class MapperQueryParser extends AnalyzingQueryParser {
}
}
if (clauses.isEmpty()) return null; // happens for stopwords
return getBooleanQueryCoordDisabled(clauses);
return getBooleanQuery(clauses);
}
} else {
return getPrefixQuerySingle(field, termStr);
@ -559,7 +556,7 @@ public class MapperQueryParser extends AnalyzingQueryParser {
innerClauses.add(new BooleanClause(super.getPrefixQuery(field, token),
BooleanClause.Occur.SHOULD));
}
posQuery = getBooleanQueryCoordDisabled(innerClauses);
posQuery = getBooleanQuery(innerClauses);
}
clauses.add(new BooleanClause(posQuery,
getDefaultOperator() == Operator.AND ? BooleanClause.Occur.MUST : BooleanClause.Occur.SHOULD));
@ -612,7 +609,7 @@ public class MapperQueryParser extends AnalyzingQueryParser {
}
}
if (clauses.isEmpty()) return null; // happens for stopwords
return getBooleanQueryCoordDisabled(clauses);
return getBooleanQuery(clauses);
}
} else {
return getWildcardQuerySingle(field, termStr);
@ -676,7 +673,7 @@ public class MapperQueryParser extends AnalyzingQueryParser {
}
}
if (clauses.isEmpty()) return null; // happens for stopwords
return getBooleanQueryCoordDisabled(clauses);
return getBooleanQuery(clauses);
}
} else {
return getRegexpQuerySingle(field, termStr);
@ -713,19 +710,6 @@ public class MapperQueryParser extends AnalyzingQueryParser {
}
}
/**
* @deprecated review all use of this, don't rely on coord
*/
@Deprecated
protected Query getBooleanQueryCoordDisabled(List<BooleanClause> clauses) throws ParseException {
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.setDisableCoord(true);
for (BooleanClause clause : clauses) {
builder.add(clause);
}
return fixNegativeQueryIfNeeded(builder.build());
}
@Override
protected Query getBooleanQuery(List<BooleanClause> clauses) throws ParseException {

View File

@ -22,31 +22,32 @@ import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.index.fielddata.AbstractNumericDocValues;
import org.elasticsearch.index.fielddata.AbstractSortedDocValues;
import java.io.IOException;
import java.util.Collection;
/**
* Utility class that ensures that a single collapse key is extracted per document.
*/
abstract class CollapsingDocValuesSource<T> {
abstract class CollapsingDocValuesSource<T> extends GroupSelector<T> {
protected final String field;
CollapsingDocValuesSource(String field) throws IOException {
this.field = field;
}
abstract T get(int doc);
abstract T copy(T value, T reuse);
abstract void setNextReader(LeafReader reader) throws IOException;
@Override
public void setGroups(Collection<SearchGroup<T>> groups) {
throw new UnsupportedOperationException();
}
/**
* Implementation for {@link NumericDocValues} and {@link SortedNumericDocValues}.
@ -54,35 +55,43 @@ abstract class CollapsingDocValuesSource<T> {
*/
static class Numeric extends CollapsingDocValuesSource<Long> {
private NumericDocValues values;
private Bits docsWithField;
private long value;
private boolean hasValue;
Numeric(String field) throws IOException {
super(field);
}
@Override
public Long get(int doc) {
if (docsWithField.get(doc)) {
return values.get(doc);
public State advanceTo(int doc) throws IOException {
if (values.advanceExact(doc)) {
hasValue = true;
value = values.longValue();
return State.ACCEPT;
} else {
return null;
hasValue = false;
return State.SKIP;
}
}
@Override
public Long copy(Long value, Long reuse) {
return value;
public Long currentValue() {
return hasValue ? value : null;
}
@Override
public void setNextReader(LeafReader reader) throws IOException {
public Long copyValue() {
return currentValue();
}
@Override
public void setNextReader(LeafReaderContext readerContext) throws IOException {
LeafReader reader = readerContext.reader();
DocValuesType type = getDocValuesType(reader, field);
if (type == null || type == DocValuesType.NONE) {
values = DocValues.emptyNumeric();
docsWithField = new Bits.MatchNoBits(reader.maxDoc());
return ;
}
docsWithField = DocValues.getDocsWithField(reader, field);
switch (type) {
case NUMERIC:
values = DocValues.getNumeric(reader, field);
@ -92,17 +101,34 @@ abstract class CollapsingDocValuesSource<T> {
final SortedNumericDocValues sorted = DocValues.getSortedNumeric(reader, field);
values = DocValues.unwrapSingleton(sorted);
if (values == null) {
values = new NumericDocValues() {
values = new AbstractNumericDocValues() {
private long value;
@Override
public long get(int docID) {
sorted.setDocument(docID);
assert sorted.count() > 0;
if (sorted.count() > 1) {
throw new IllegalStateException("failed to collapse " + docID +
", the collapse field must be single valued");
public boolean advanceExact(int target) throws IOException {
if (sorted.advanceExact(target)) {
if (sorted.docValueCount() > 1) {
throw new IllegalStateException("failed to collapse " + target +
", the collapse field must be single valued");
}
value = sorted.nextValue();
return true;
} else {
return false;
}
return sorted.valueAt(0);
}
@Override
public int docID() {
return sorted.docID();
}
@Override
public long longValue() throws IOException {
return value;
}
};
}
break;
@ -119,47 +145,56 @@ abstract class CollapsingDocValuesSource<T> {
* Fails with an {@link IllegalStateException} if a document contains multiple values for the specified field.
*/
static class Keyword extends CollapsingDocValuesSource<BytesRef> {
private Bits docsWithField;
private SortedDocValues values;
private int ord;
Keyword(String field) throws IOException {
super(field);
}
@Override
public BytesRef get(int doc) {
if (docsWithField.get(doc)) {
return values.get(doc);
public org.apache.lucene.search.grouping.GroupSelector.State advanceTo(int doc)
throws IOException {
if (values.advanceExact(doc)) {
ord = values.ordValue();
return State.ACCEPT;
} else {
return null;
ord = -1;
return State.SKIP;
}
}
@Override
public BytesRef copy(BytesRef value, BytesRef reuse) {
public BytesRef currentValue() {
if (ord == -1) {
return null;
} else {
try {
return values.lookupOrd(ord);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
@Override
public BytesRef copyValue() {
BytesRef value = currentValue();
if (value == null) {
return null;
}
if (reuse != null) {
reuse.bytes = ArrayUtil.grow(reuse.bytes, value.length);
reuse.offset = 0;
reuse.length = value.length;
System.arraycopy(value.bytes, value.offset, reuse.bytes, 0, value.length);
return reuse;
} else {
return BytesRef.deepCopyOf(value);
}
}
@Override
public void setNextReader(LeafReader reader) throws IOException {
public void setNextReader(LeafReaderContext readerContext) throws IOException {
LeafReader reader = readerContext.reader();
DocValuesType type = getDocValuesType(reader, field);
if (type == null || type == DocValuesType.NONE) {
values = DocValues.emptySorted();
docsWithField = new Bits.MatchNoBits(reader.maxDoc());
return ;
}
docsWithField = DocValues.getDocsWithField(reader, field);
switch (type) {
case SORTED:
values = DocValues.getSorted(reader, field);
@ -169,20 +204,36 @@ abstract class CollapsingDocValuesSource<T> {
final SortedSetDocValues sorted = DocValues.getSortedSet(reader, field);
values = DocValues.unwrapSingleton(sorted);
if (values == null) {
values = new SortedDocValues() {
values = new AbstractSortedDocValues() {
private int ord;
@Override
public int getOrd(int docID) {
sorted.setDocument(docID);
int ord = (int) sorted.nextOrd();
if (sorted.nextOrd() != SortedSetDocValues.NO_MORE_ORDS) {
throw new IllegalStateException("failed to collapse " + docID +
", the collapse field must be single valued");
public boolean advanceExact(int target) throws IOException {
if (sorted.advanceExact(target)) {
ord = (int) sorted.nextOrd();
if (sorted.nextOrd() != SortedSetDocValues.NO_MORE_ORDS) {
throw new IllegalStateException("failed to collapse " + target +
", the collapse field must be single valued");
}
return true;
} else {
return false;
}
}
@Override
public int docID() {
return sorted.docID();
}
@Override
public int ordValue() {
return ord;
}
@Override
public BytesRef lookupOrd(int ord) {
public BytesRef lookupOrd(int ord) throws IOException {
return sorted.lookupOrd(ord);
}

View File

@ -18,13 +18,11 @@
*/
package org.apache.lucene.search.grouping;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.util.BytesRef;
import java.io.IOException;
import java.util.Collection;
@ -37,7 +35,7 @@ import static org.apache.lucene.search.SortField.Type.SCORE;
* output. The collapsing is done in a single pass by selecting only the top sorted document per collapse key.
* The value used for the collapse key of each group can be found in {@link CollapseTopFieldDocs#collapseValues}.
*/
public abstract class CollapsingTopDocsCollector<T> extends FirstPassGroupingCollector<T> {
public final class CollapsingTopDocsCollector<T> extends FirstPassGroupingCollector<T> {
protected final String collapseField;
protected final Sort sort;
@ -47,9 +45,9 @@ public abstract class CollapsingTopDocsCollector<T> extends FirstPassGroupingCol
private float maxScore;
private final boolean trackMaxScore;
private CollapsingTopDocsCollector(String collapseField, Sort sort,
CollapsingTopDocsCollector(GroupSelector<T> groupSelector, String collapseField, Sort sort,
int topN, boolean trackMaxScore) throws IOException {
super(sort, topN);
super(groupSelector, sort, topN);
this.collapseField = collapseField;
this.trackMaxScore = trackMaxScore;
if (trackMaxScore) {
@ -65,7 +63,7 @@ public abstract class CollapsingTopDocsCollector<T> extends FirstPassGroupingCol
* {@link CollapseTopFieldDocs}. The collapsing needs only one pass so we can create the final top docs at the end
* of the first pass.
*/
public CollapseTopFieldDocs getTopDocs() {
public CollapseTopFieldDocs getTopDocs() throws IOException {
Collection<SearchGroup<T>> groups = super.getTopGroups(0, true);
if (groups == null) {
return new CollapseTopFieldDocs(collapseField, totalHitCount, new ScoreDoc[0],
@ -121,57 +119,6 @@ public abstract class CollapsingTopDocsCollector<T> extends FirstPassGroupingCol
totalHitCount++;
}
private static class Numeric extends CollapsingTopDocsCollector<Long> {
private final CollapsingDocValuesSource.Numeric source;
private Numeric(String collapseField, Sort sort, int topN, boolean trackMaxScore) throws IOException {
super(collapseField, sort, topN, trackMaxScore);
source = new CollapsingDocValuesSource.Numeric(collapseField);
}
@Override
protected void doSetNextReader(LeafReaderContext readerContext) throws IOException {
super.doSetNextReader(readerContext);
source.setNextReader(readerContext.reader());
}
@Override
protected Long getDocGroupValue(int doc) {
return source.get(doc);
}
@Override
protected Long copyDocGroupValue(Long groupValue, Long reuse) {
return source.copy(groupValue, reuse);
}
}
private static class Keyword extends CollapsingTopDocsCollector<BytesRef> {
private final CollapsingDocValuesSource.Keyword source;
private Keyword(String collapseField, Sort sort, int topN, boolean trackMaxScore) throws IOException {
super(collapseField, sort, topN, trackMaxScore);
source = new CollapsingDocValuesSource.Keyword(collapseField);
}
@Override
protected void doSetNextReader(LeafReaderContext readerContext) throws IOException {
super.doSetNextReader(readerContext);
source.setNextReader(readerContext.reader());
}
@Override
protected BytesRef getDocGroupValue(int doc) {
return source.get(doc);
}
@Override
protected BytesRef copyDocGroupValue(BytesRef groupValue, BytesRef reuse) {
return source.copy(groupValue, reuse);
}
}
/**
* Create a collapsing top docs collector on a {@link org.apache.lucene.index.NumericDocValues} field.
* It accepts also {@link org.apache.lucene.index.SortedNumericDocValues} field but
@ -189,7 +136,8 @@ public abstract class CollapsingTopDocsCollector<T> extends FirstPassGroupingCol
*/
public static CollapsingTopDocsCollector<?> createNumeric(String collapseField, Sort sort,
int topN, boolean trackMaxScore) throws IOException {
return new Numeric(collapseField, sort, topN, trackMaxScore);
return new CollapsingTopDocsCollector<>(new CollapsingDocValuesSource.Numeric(collapseField),
collapseField, sort, topN, trackMaxScore);
}
/**
@ -208,7 +156,8 @@ public abstract class CollapsingTopDocsCollector<T> extends FirstPassGroupingCol
*/
public static CollapsingTopDocsCollector<?> createKeyword(String collapseField, Sort sort,
int topN, boolean trackMaxScore) throws IOException {
return new Keyword(collapseField, sort, topN, trackMaxScore);
return new CollapsingTopDocsCollector<>(new CollapsingDocValuesSource.Keyword(collapseField),
collapseField, sort, topN, trackMaxScore);
}
}

View File

@ -82,7 +82,7 @@ public class Version implements Comparable<Version> {
public static final Version V_5_5_0_UNRELEASED = new Version(V_5_5_0_ID_UNRELEASED, org.apache.lucene.util.Version.LUCENE_6_5_0);
public static final int V_6_0_0_alpha1_ID_UNRELEASED = 6000001;
public static final Version V_6_0_0_alpha1_UNRELEASED =
new Version(V_6_0_0_alpha1_ID_UNRELEASED, org.apache.lucene.util.Version.LUCENE_6_5_0);
new Version(V_6_0_0_alpha1_ID_UNRELEASED, org.apache.lucene.util.Version.LUCENE_7_0_0);
public static final Version CURRENT = V_6_0_0_alpha1_UNRELEASED;
// unreleased versions must be added to the above list with the suffix _UNRELEASED (with the exception of CURRENT)

View File

@ -18,13 +18,13 @@
*/
package org.elasticsearch.common.geo;
import org.apache.lucene.geo.Rectangle;
import org.apache.lucene.spatial.util.MortonEncoder;
import org.apache.lucene.util.BitUtil;
import java.util.ArrayList;
import java.util.Collection;
import org.apache.lucene.spatial.geopoint.document.GeoPointField;
import org.apache.lucene.geo.Rectangle;
import org.apache.lucene.util.BitUtil;
/**
* Utilities for converting to/from the GeoHash standard
*
@ -42,19 +42,35 @@ public class GeoHashUtils {
/** maximum precision for geohash strings */
public static final int PRECISION = 12;
private static final short MORTON_OFFSET = (GeoPointField.BITS<<1) - (PRECISION*5);
/** number of bits used for quantizing latitude and longitude values */
public static final short BITS = 31;
/** scaling factors to convert lat/lon into unsigned space */
private static final double LAT_SCALE = (0x1L<<BITS)/180.0D;
private static final double LON_SCALE = (0x1L<<BITS)/360.0D;
private static final short MORTON_OFFSET = (BITS<<1) - (PRECISION*5);
// No instance:
private GeoHashUtils() {
}
/*************************
* 31 bit encoding utils *
*************************/
public static long encodeLatLon(final double lat, final double lon) {
long result = MortonEncoder.encode(lat, lon);
if (result == 0xFFFFFFFFFFFFFFFFL) {
return result & 0xC000000000000000L;
}
return result >>> 2;
}
/**
* Encode lon/lat to the geohash based long format (lon/lat interleaved, 4 least significant bits = level)
*/
public static final long longEncode(final double lon, final double lat, final int level) {
// shift to appropriate level
final short msf = (short)(((12 - level) * 5) + MORTON_OFFSET);
return ((BitUtil.flipFlop(GeoPointField.encodeLatLon(lat, lon)) >>> msf) << 4) | level;
return ((BitUtil.flipFlop(encodeLatLon(lat, lon)) >>> msf) << 4) | level;
}
/**
@ -120,7 +136,7 @@ public class GeoHashUtils {
*/
public static final String stringEncode(final double lon, final double lat, final int level) {
// convert to geohashlong
final long ghLong = fromMorton(GeoPointField.encodeLatLon(lat, lon), level);
final long ghLong = fromMorton(encodeLatLon(lat, lon), level);
return stringEncode(ghLong);
}
@ -141,7 +157,7 @@ public class GeoHashUtils {
StringBuilder geoHash = new StringBuilder();
short precision = 0;
final short msf = (GeoPointField.BITS<<1)-5;
final short msf = (BITS<<1)-5;
long mask = 31L<<msf;
do {
geoHash.append(BASE_32[(int)((mask & hashedVal)>>>(msf-(precision*5)))]);
@ -303,13 +319,31 @@ public class GeoHashUtils {
return neighbors;
}
/** decode longitude value from morton encoded geo point */
public static final double decodeLongitude(final long hash) {
return unscaleLon(BitUtil.deinterleave(hash));
}
/** decode latitude value from morton encoded geo point */
public static final double decodeLatitude(final long hash) {
return unscaleLat(BitUtil.deinterleave(hash >>> 1));
}
private static double unscaleLon(final long val) {
return (val / LON_SCALE) - 180;
}
private static double unscaleLat(final long val) {
return (val / LAT_SCALE) - 90;
}
/** returns the latitude value from the string based geohash */
public static final double decodeLatitude(final String geohash) {
return GeoPointField.decodeLatitude(mortonEncode(geohash));
return decodeLatitude(mortonEncode(geohash));
}
/** returns the latitude value from the string based geohash */
public static final double decodeLongitude(final String geohash) {
return GeoPointField.decodeLongitude(mortonEncode(geohash));
return decodeLongitude(mortonEncode(geohash));
}
}

View File

@ -23,7 +23,6 @@ import org.apache.lucene.document.LatLonDocValuesField;
import org.apache.lucene.document.LatLonPoint;
import org.apache.lucene.geo.GeoEncodingUtils;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.spatial.geopoint.document.GeoPointField;
import org.apache.lucene.util.BitUtil;
import org.apache.lucene.util.BytesRef;
@ -87,8 +86,8 @@ public final class GeoPoint {
}
public GeoPoint resetFromIndexHash(long hash) {
lon = GeoPointField.decodeLongitude(hash);
lat = GeoPointField.decodeLatitude(hash);
lon = GeoHashUtils.decodeLongitude(hash);
lat = GeoHashUtils.decodeLatitude(hash);
return this;
}
@ -112,7 +111,7 @@ public final class GeoPoint {
public GeoPoint resetFromGeoHash(String geohash) {
final long hash = mortonEncode(geohash);
return this.reset(GeoPointField.decodeLatitude(hash), GeoPointField.decodeLongitude(hash));
return this.reset(GeoHashUtils.decodeLatitude(hash), GeoHashUtils.decodeLongitude(hash));
}
public GeoPoint resetFromGeoHash(long geohashLong) {

View File

@ -22,7 +22,6 @@ package org.elasticsearch.common.geo;
import org.apache.lucene.geo.Rectangle;
import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree;
import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.SloppyMath;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.unit.DistanceUnit;
@ -511,35 +510,40 @@ public class GeoUtils {
final GeoPoint... fromPoints) {
final GeoPointValues singleValues = FieldData.unwrapSingleton(geoPointValues);
if (singleValues != null && fromPoints.length == 1) {
final Bits docsWithField = FieldData.unwrapSingletonBits(geoPointValues);
return FieldData.singleton(new NumericDoubleValues() {
@Override
public double get(int docID) {
if (docsWithField != null && !docsWithField.get(docID)) {
return 0d;
}
final GeoPoint to = singleValues.get(docID);
public boolean advanceExact(int doc) throws IOException {
return singleValues.advanceExact(doc);
}
@Override
public double doubleValue() throws IOException {
final GeoPoint from = fromPoints[0];
final GeoPoint to = singleValues.geoPointValue();
return distance.calculate(from.lat(), from.lon(), to.lat(), to.lon(), unit);
}
}, docsWithField);
});
} else {
return new SortingNumericDoubleValues() {
@Override
public void setDocument(int doc) {
geoPointValues.setDocument(doc);
resize(geoPointValues.count() * fromPoints.length);
int v = 0;
for (GeoPoint from : fromPoints) {
for (int i = 0; i < geoPointValues.count(); ++i) {
final GeoPoint point = geoPointValues.valueAt(i);
values[v] = distance.calculate(from.lat(), from.lon(), point.lat(), point.lon(), unit);
v++;
public boolean advanceExact(int target) throws IOException {
if (geoPointValues.advanceExact(target)) {
resize(geoPointValues.docValueCount() * fromPoints.length);
int v = 0;
for (int i = 0; i < geoPointValues.docValueCount(); ++i) {
final GeoPoint point = geoPointValues.nextValue();
for (GeoPoint from : fromPoints) {
values[v] = distance.calculate(from.lat(), from.lon(), point.lat(), point.lon(), unit);
v++;
}
}
sort();
return true;
} else {
return false;
}
sort();
}
};
}

View File

@ -51,14 +51,14 @@ import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.SimpleCollector;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.SortedNumericSortField;
import org.apache.lucene.search.SortedSetSortField;
import org.apache.lucene.search.TimeLimitingCollector;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TopFieldDocs;
import org.apache.lucene.search.TwoPhaseIterator;
import org.apache.lucene.search.Weight;
import org.apache.lucene.search.grouping.CollapseTopFieldDocs;
import org.apache.lucene.search.SortedNumericSortField;
import org.apache.lucene.search.SortedSetSortField;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
@ -89,9 +89,9 @@ import java.util.Map;
import java.util.Objects;
public class Lucene {
public static final String LATEST_DOC_VALUES_FORMAT = "Lucene54";
public static final String LATEST_DOC_VALUES_FORMAT = "Lucene70";
public static final String LATEST_POSTINGS_FORMAT = "Lucene50";
public static final String LATEST_CODEC = "Lucene62";
public static final String LATEST_CODEC = "Lucene70";
static {
Deprecated annotation = PostingsFormat.forName(LATEST_POSTINGS_FORMAT).getClass().getAnnotation(Deprecated.class);

View File

@ -19,8 +19,8 @@
package org.elasticsearch.common.lucene;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReader.CoreClosedListener;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.shard.ShardUtils;
@ -46,8 +46,8 @@ import java.util.concurrent.ConcurrentHashMap;
*/
public final class ShardCoreKeyMap {
private final Map<Object, ShardId> coreKeyToShard;
private final Map<String, Set<Object>> indexToCoreKey;
private final Map<IndexReader.CacheKey, ShardId> coreKeyToShard;
private final Map<String, Set<IndexReader.CacheKey>> indexToCoreKey;
public ShardCoreKeyMap() {
coreKeyToShard = new ConcurrentHashMap<>();
@ -63,7 +63,11 @@ public final class ShardCoreKeyMap {
if (shardId == null) {
throw new IllegalArgumentException("Could not extract shard id from " + reader);
}
final Object coreKey = reader.getCoreCacheKey();
final IndexReader.CacheHelper cacheHelper = reader.getCoreCacheHelper();
if (cacheHelper == null) {
throw new IllegalArgumentException("Reader " + reader + " does not support caching");
}
final IndexReader.CacheKey coreKey = cacheHelper.getKey();
if (coreKeyToShard.containsKey(coreKey)) {
// Do this check before entering the synchronized block in order to
@ -75,18 +79,18 @@ public final class ShardCoreKeyMap {
final String index = shardId.getIndexName();
synchronized (this) {
if (coreKeyToShard.containsKey(coreKey) == false) {
Set<Object> objects = indexToCoreKey.get(index);
Set<IndexReader.CacheKey> objects = indexToCoreKey.get(index);
if (objects == null) {
objects = new HashSet<>();
indexToCoreKey.put(index, objects);
}
final boolean added = objects.add(coreKey);
assert added;
CoreClosedListener listener = ownerCoreCacheKey -> {
IndexReader.ClosedListener listener = ownerCoreCacheKey -> {
assert coreKey == ownerCoreCacheKey;
synchronized (ShardCoreKeyMap.this) {
coreKeyToShard.remove(ownerCoreCacheKey);
final Set<Object> coreKeys = indexToCoreKey.get(index);
final Set<IndexReader.CacheKey> coreKeys = indexToCoreKey.get(index);
final boolean removed = coreKeys.remove(coreKey);
assert removed;
if (coreKeys.isEmpty()) {
@ -96,7 +100,7 @@ public final class ShardCoreKeyMap {
};
boolean addedListener = false;
try {
reader.addCoreClosedListener(listener);
cacheHelper.addClosedListener(listener);
addedListener = true;
// Only add the core key to the map as a last operation so that
@ -131,7 +135,7 @@ public final class ShardCoreKeyMap {
* Get the set of core cache keys associated with the given index.
*/
public synchronized Set<Object> getCoreKeysForIndex(String index) {
final Set<Object> objects = indexToCoreKey.get(index);
final Set<IndexReader.CacheKey> objects = indexToCoreKey.get(index);
if (objects == null) {
return Collections.emptySet();
}
@ -154,9 +158,9 @@ public final class ShardCoreKeyMap {
if (assertionsEnabled == false) {
throw new AssertionError("only run this if assertions are enabled");
}
Collection<Set<Object>> values = indexToCoreKey.values();
Collection<Set<IndexReader.CacheKey>> values = indexToCoreKey.values();
int size = 0;
for (Set<Object> value : values) {
for (Set<IndexReader.CacheKey> value : values) {
size += value.size();
}
return size == coreKeyToShard.size();

View File

@ -105,27 +105,17 @@ public final class AllTermQuery extends Query {
}
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
if (needsScores == false) {
return new TermQuery(term).createWeight(searcher, needsScores);
return new TermQuery(term).createWeight(searcher, needsScores, boost);
}
final TermContext termStates = TermContext.build(searcher.getTopReaderContext(), term);
final CollectionStatistics collectionStats = searcher.collectionStatistics(term.field());
final TermStatistics termStats = searcher.termStatistics(term, termStates);
final Similarity similarity = searcher.getSimilarity(needsScores);
final SimWeight stats = similarity.computeWeight(collectionStats, termStats);
final SimWeight stats = similarity.computeWeight(boost, collectionStats, termStats);
return new Weight(this) {
@Override
public float getValueForNormalization() throws IOException {
return stats.getValueForNormalization();
}
@Override
public void normalize(float norm, float topLevelBoost) {
stats.normalize(norm, topLevelBoost);
}
@Override
public void extractTerms(Set<Term> terms) {
terms.add(term);

View File

@ -49,6 +49,12 @@ public final class ElasticsearchDirectoryReader extends FilterDirectoryReader {
return this.shardId;
}
@Override
public CacheHelper getReaderCacheHelper() {
// safe to delegate since this reader does not alter the index
return in.getReaderCacheHelper();
}
@Override
protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException {
return new ElasticsearchDirectoryReader(in, wrapper, shardId);
@ -84,14 +90,17 @@ public final class ElasticsearchDirectoryReader extends FilterDirectoryReader {
* @throws IllegalArgumentException if the reader doesn't contain an {@link ElasticsearchDirectoryReader} in it's hierarchy
*/
@SuppressForbidden(reason = "This is the only sane way to add a ReaderClosedListener")
public static void addReaderCloseListener(DirectoryReader reader, IndexReader.ReaderClosedListener listener) {
public static void addReaderCloseListener(DirectoryReader reader, IndexReader.ClosedListener listener) {
ElasticsearchDirectoryReader elasticsearchDirectoryReader = getElasticsearchDirectoryReader(reader);
if (elasticsearchDirectoryReader != null) {
assert reader.getCoreCacheKey() == elasticsearchDirectoryReader.getCoreCacheKey();
elasticsearchDirectoryReader.addReaderClosedListener(listener);
return;
if (elasticsearchDirectoryReader == null) {
throw new IllegalArgumentException("Can't install close listener reader is not an ElasticsearchDirectoryReader/ElasticsearchLeafReader");
}
throw new IllegalArgumentException("Can't install close listener reader is not an ElasticsearchDirectoryReader/ElasticsearchLeafReader");
IndexReader.CacheHelper cacheHelper = elasticsearchDirectoryReader.getReaderCacheHelper();
if (cacheHelper == null) {
throw new IllegalArgumentException("Reader " + elasticsearchDirectoryReader + " does not support caching");
}
assert cacheHelper.getKey() == reader.getReaderCacheHelper().getKey();
cacheHelper.addClosedListener(listener);
}
/**

View File

@ -49,8 +49,13 @@ public final class ElasticsearchLeafReader extends FilterLeafReader {
}
@Override
public Object getCoreCacheKey() {
return in.getCoreCacheKey();
public CacheHelper getCoreCacheHelper() {
return in.getCoreCacheHelper();
}
@Override
public CacheHelper getReaderCacheHelper() {
return in.getReaderCacheHelper();
}
public static ElasticsearchLeafReader getElasticsearchLeafReader(LeafReader reader) {

View File

@ -121,7 +121,6 @@ public class Queries {
if (isNegativeQuery(q)) {
BooleanQuery bq = (BooleanQuery) q;
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.setDisableCoord(bq.isCoordDisabled());
for (BooleanClause clause : bq) {
builder.add(clause);
}
@ -154,7 +153,6 @@ public class Queries {
int msm = calculateMinShouldMatch(optionalClauses, minimumShouldMatch);
if (0 < msm) {
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.setDisableCoord(query.isCoordDisabled());
for (BooleanClause clause : query) {
builder.add(clause);
}
@ -170,10 +168,7 @@ public class Queries {
* otherwise return the original query.
*/
public static Query maybeApplyMinimumShouldMatch(Query query, @Nullable String minimumShouldMatch) {
// If the coordination factor is disabled on a boolean query we don't apply the minimum should match.
// This is done to make sure that the minimum_should_match doesn't get applied when there is only one word
// and multiple variations of the same word in the query (synonyms for instance).
if (query instanceof BooleanQuery && !((BooleanQuery) query).isCoordDisabled()) {
if (query instanceof BooleanQuery) {
return applyMinimumShouldMatch((BooleanQuery) query, minimumShouldMatch);
} else if (query instanceof ExtendedCommonTermsQuery) {
((ExtendedCommonTermsQuery)query).setLowFreqMinimumNumberShouldMatch(minimumShouldMatch);

View File

@ -62,7 +62,7 @@ public class FieldValueFactorFunction extends ScoreFunction {
public LeafScoreFunction getLeafScoreFunction(LeafReaderContext ctx) {
final SortedNumericDoubleValues values;
if(indexFieldData == null) {
values = FieldData.emptySortedNumericDoubles(ctx.reader().maxDoc());
values = FieldData.emptySortedNumericDoubles();
} else {
values = this.indexFieldData.load(ctx).getDoubleValues();
}
@ -70,16 +70,16 @@ public class FieldValueFactorFunction extends ScoreFunction {
return new LeafScoreFunction() {
@Override
public double score(int docId, float subQueryScore) {
values.setDocument(docId);
final int numValues = values.count();
public double score(int docId, float subQueryScore) throws IOException {
double value;
if (numValues > 0) {
value = values.valueAt(0);
} else if (missing != null) {
value = missing;
if (values.advanceExact(docId)) {
value = values.nextValue();
} else {
throw new ElasticsearchException("Missing value for field [" + field + "]");
if (missing != null) {
value = missing;
} else {
throw new ElasticsearchException("Missing value for field [" + field + "]");
}
}
double val = value * boostFactor;
double result = modifier.apply(val);
@ -91,7 +91,7 @@ public class FieldValueFactorFunction extends ScoreFunction {
}
@Override
public Explanation explainScore(int docId, Explanation subQueryScore) {
public Explanation explainScore(int docId, Explanation subQueryScore) throws IOException {
String modifierStr = modifier != null ? modifier.toString() : "";
String defaultStr = missing != null ? "?:" + missing : "";
double score = score(docId, subQueryScore.getValue());

View File

@ -135,9 +135,9 @@ public class FiltersFunctionScoreQuery extends Query {
}
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
if (needsScores == false && minScore == null) {
return subQuery.createWeight(searcher, needsScores);
return subQuery.createWeight(searcher, needsScores, boost);
}
boolean subQueryNeedsScores = combineFunction != CombineFunction.REPLACE;
@ -146,7 +146,7 @@ public class FiltersFunctionScoreQuery extends Query {
subQueryNeedsScores |= filterFunctions[i].function.needsScores();
filterWeights[i] = searcher.createNormalizedWeight(filterFunctions[i].filter, false);
}
Weight subQueryWeight = subQuery.createWeight(searcher, subQueryNeedsScores);
Weight subQueryWeight = subQuery.createWeight(searcher, subQueryNeedsScores, boost);
return new CustomBoostFactorWeight(this, subQueryWeight, filterWeights, subQueryNeedsScores);
}
@ -168,16 +168,6 @@ public class FiltersFunctionScoreQuery extends Query {
subQueryWeight.extractTerms(terms);
}
@Override
public float getValueForNormalization() throws IOException {
return subQueryWeight.getValueForNormalization();
}
@Override
public void normalize(float norm, float boost) {
subQueryWeight.normalize(norm, boost);
}
private FiltersFunctionFactorScorer functionScorer(LeafReaderContext context) throws IOException {
Scorer subQueryScorer = subQueryWeight.scorer(context);
if (subQueryScorer == null) {
@ -281,7 +271,7 @@ public class FiltersFunctionScoreQuery extends Query {
return scoreCombiner.combine(subQueryScore, factor, maxBoost);
}
protected double computeScore(int docId, float subQueryScore) {
protected double computeScore(int docId, float subQueryScore) throws IOException {
double factor = 1d;
switch(scoreMode) {
case FIRST:

View File

@ -91,16 +91,16 @@ public class FunctionScoreQuery extends Query {
}
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
if (needsScores == false && minScore == null) {
return subQuery.createWeight(searcher, needsScores);
return subQuery.createWeight(searcher, needsScores, boost);
}
boolean subQueryNeedsScores =
combineFunction != CombineFunction.REPLACE // if we don't replace we need the original score
|| function == null // when the function is null, we just multiply the score, so we need it
|| function.needsScores(); // some scripts can replace with a script that returns eg. 1/_score
Weight subQueryWeight = subQuery.createWeight(searcher, subQueryNeedsScores);
Weight subQueryWeight = subQuery.createWeight(searcher, subQueryNeedsScores, boost);
return new CustomBoostFactorWeight(this, subQueryWeight, subQueryNeedsScores);
}
@ -120,16 +120,6 @@ public class FunctionScoreQuery extends Query {
subQueryWeight.extractTerms(terms);
}
@Override
public float getValueForNormalization() throws IOException {
return subQueryWeight.getValueForNormalization();
}
@Override
public void normalize(float norm, float boost) {
subQueryWeight.normalize(norm, boost);
}
private FunctionFactorScorer functionScorer(LeafReaderContext context) throws IOException {
Scorer subQueryScorer = subQueryWeight.scorer(context);
if (subQueryScorer == null) {

View File

@ -26,7 +26,7 @@ import java.io.IOException;
/** Per-leaf {@link ScoreFunction}. */
public abstract class LeafScoreFunction {
public abstract double score(int docId, float subQueryScore);
public abstract double score(int docId, float subQueryScore) throws IOException;
public abstract Explanation explainScore(int docId, Explanation subQueryScore) throws IOException;

View File

@ -25,6 +25,7 @@ import org.elasticsearch.index.fielddata.AtomicFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import java.io.IOException;
import java.util.Objects;
/**
@ -68,14 +69,16 @@ public class RandomScoreFunction extends ScoreFunction {
return new LeafScoreFunction() {
@Override
public double score(int docId, float subQueryScore) {
uidByteData.setDocument(docId);
int hash = StringHelper.murmurhash3_x86_32(uidByteData.valueAt(0), saltedSeed);
public double score(int docId, float subQueryScore) throws IOException {
if (uidByteData.advanceExact(docId) == false) {
throw new AssertionError("Document without a _uid");
}
int hash = StringHelper.murmurhash3_x86_32(uidByteData.nextValue(), saltedSeed);
return (hash & 0x00FFFFFF) / (float)(1 << 24); // only use the lower 24 bits to construct a float from 0.0-1.0
}
@Override
public Explanation explainScore(int docId, Explanation subQueryScore) {
public Explanation explainScore(int docId, Explanation subQueryScore) throws IOException {
return Explanation.match(
CombineFunction.toFloat(score(docId, subQueryScore.getValue())),
"random score function (seed: " + originalSeed + ")");

View File

@ -52,7 +52,7 @@ public class WeightFactorFunction extends ScoreFunction {
final LeafScoreFunction leafFunction = scoreFunction.getLeafScoreFunction(ctx);
return new LeafScoreFunction() {
@Override
public double score(int docId, float subQueryScore) {
public double score(int docId, float subQueryScore) throws IOException {
return leafFunction.score(docId, subQueryScore) * getWeight();
}

View File

@ -52,12 +52,7 @@ final class PerThreadIDVersionAndSeqNoLookup {
/** terms enum for uid field */
private final TermsEnum termsEnum;
/** _version data */
private final NumericDocValues versions;
/** _seq_no data */
private final NumericDocValues seqNos;
/** _primary_term data */
private final NumericDocValues primaryTerms;
/** Reused for iteration (when the term exists) */
private PostingsEnum docsEnum;
@ -72,30 +67,33 @@ final class PerThreadIDVersionAndSeqNoLookup {
Terms terms = fields.terms(UidFieldMapper.NAME);
termsEnum = terms.iterator();
if (termsEnum == null) {
throw new IllegalArgumentException("reader misses the [" + UidFieldMapper.NAME +
"] field");
throw new IllegalArgumentException("reader misses the [" + UidFieldMapper.NAME + "] field");
}
versions = reader.getNumericDocValues(VersionFieldMapper.NAME);
if (versions == null) {
throw new IllegalArgumentException("reader misses the [" + VersionFieldMapper.NAME +
"] field");
if (reader.getNumericDocValues(VersionFieldMapper.NAME) == null) {
throw new IllegalArgumentException("reader misses the [" + VersionFieldMapper.NAME + "] field");
}
seqNos = reader.getNumericDocValues(SeqNoFieldMapper.NAME);
primaryTerms = reader.getNumericDocValues(SeqNoFieldMapper.PRIMARY_TERM_NAME);
Object readerKey = null;
assert (readerKey = reader.getCoreCacheKey()) != null;
assert (readerKey = reader.getCoreCacheHelper().getKey()) != null;
this.readerKey = readerKey;
}
/** Return null if id is not found. */
public DocIdAndVersion lookupVersion(BytesRef id, Bits liveDocs, LeafReaderContext context)
throws IOException {
assert context.reader().getCoreCacheKey().equals(readerKey) :
assert context.reader().getCoreCacheHelper().getKey().equals(readerKey) :
"context's reader is not the same as the reader class was initialized on.";
int docID = getDocID(id, liveDocs);
if (docID != DocIdSetIterator.NO_MORE_DOCS) {
return new DocIdAndVersion(docID, versions.get(docID), context);
final NumericDocValues versions = context.reader().getNumericDocValues(VersionFieldMapper.NAME);
if (versions == null) {
throw new IllegalArgumentException("reader misses the [" + VersionFieldMapper.NAME + "] field");
}
if (versions.advanceExact(docID) == false) {
throw new IllegalArgumentException("Document [" + docID + "] misses the [" + VersionFieldMapper.NAME + "] field");
}
return new DocIdAndVersion(docID, versions.longValue(), context);
} else {
return null;
}
@ -124,11 +122,18 @@ final class PerThreadIDVersionAndSeqNoLookup {
/** Return null if id is not found. */
DocIdAndSeqNo lookupSeqNo(BytesRef id, Bits liveDocs, LeafReaderContext context) throws IOException {
assert context.reader().getCoreCacheKey().equals(readerKey) :
assert context.reader().getCoreCacheHelper().getKey().equals(readerKey) :
"context's reader is not the same as the reader class was initialized on.";
int docID = getDocID(id, liveDocs);
if (docID != DocIdSetIterator.NO_MORE_DOCS) {
return new DocIdAndSeqNo(docID, seqNos == null ? SequenceNumbersService.UNASSIGNED_SEQ_NO : seqNos.get(docID), context);
NumericDocValues seqNos = context.reader().getNumericDocValues(SeqNoFieldMapper.NAME);
long seqNo;
if (seqNos != null && seqNos.advanceExact(docID)) {
seqNo = seqNos.longValue();
} else {
seqNo = SequenceNumbersService.UNASSIGNED_SEQ_NO;
}
return new DocIdAndSeqNo(docID, seqNo, context);
} else {
return null;
}
@ -139,7 +144,12 @@ final class PerThreadIDVersionAndSeqNoLookup {
*
* Note that 0 is an illegal primary term. See {@link org.elasticsearch.cluster.metadata.IndexMetaData#primaryTerm(int)}
**/
long lookUpPrimaryTerm(int docID) throws IOException {
return primaryTerms == null ? 0 : primaryTerms.get(docID);
long lookUpPrimaryTerm(int docID, LeafReader reader) throws IOException {
NumericDocValues primaryTerms = reader.getNumericDocValues(SeqNoFieldMapper.PRIMARY_TERM_NAME);
if (primaryTerms != null && primaryTerms.advanceExact(docID)) {
return primaryTerms.longValue();
} else {
return 0;
}
}
}

View File

@ -21,7 +21,6 @@ package org.elasticsearch.common.lucene.uid;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReader.CoreClosedListener;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.Term;
import org.apache.lucene.util.CloseableThreadLocal;
@ -41,7 +40,7 @@ public final class VersionsAndSeqNoResolver {
ConcurrentCollections.newConcurrentMapWithAggressiveConcurrency();
// Evict this reader from lookupStates once it's closed:
private static final CoreClosedListener removeLookupState = key -> {
private static final IndexReader.ClosedListener removeLookupState = key -> {
CloseableThreadLocal<PerThreadIDVersionAndSeqNoLookup> ctl = lookupStates.remove(key);
if (ctl != null) {
ctl.close();
@ -49,15 +48,15 @@ public final class VersionsAndSeqNoResolver {
};
private static PerThreadIDVersionAndSeqNoLookup getLookupState(LeafReader reader) throws IOException {
Object key = reader.getCoreCacheKey();
CloseableThreadLocal<PerThreadIDVersionAndSeqNoLookup> ctl = lookupStates.get(key);
IndexReader.CacheHelper cacheHelper = reader.getCoreCacheHelper();
CloseableThreadLocal<PerThreadIDVersionAndSeqNoLookup> ctl = lookupStates.get(cacheHelper.getKey());
if (ctl == null) {
// First time we are seeing this reader's core; make a new CTL:
ctl = new CloseableThreadLocal<>();
CloseableThreadLocal<PerThreadIDVersionAndSeqNoLookup> other = lookupStates.putIfAbsent(key, ctl);
CloseableThreadLocal<PerThreadIDVersionAndSeqNoLookup> other = lookupStates.putIfAbsent(cacheHelper.getKey(), ctl);
if (other == null) {
// Our CTL won, we must remove it when the core is closed:
reader.addCoreClosedListener(removeLookupState);
cacheHelper.addClosedListener(removeLookupState);
} else {
// Another thread beat us to it: just use their CTL:
ctl = other;
@ -161,7 +160,7 @@ public final class VersionsAndSeqNoResolver {
public static long loadPrimaryTerm(DocIdAndSeqNo docIdAndSeqNo) throws IOException {
LeafReader leaf = docIdAndSeqNo.context.reader();
PerThreadIDVersionAndSeqNoLookup lookup = getLookupState(leaf);
long result = lookup.lookUpPrimaryTerm(docIdAndSeqNo.docId);
long result = lookup.lookUpPrimaryTerm(docIdAndSeqNo.docId, leaf);
assert result > 0 : "should always resolve a primary term for a resolved sequence number. primary_term [" + result + "]"
+ " docId [" + docIdAndSeqNo.docId + "] seqNo [" + docIdAndSeqNo.seqNo + "]";
return result;

View File

@ -21,8 +21,8 @@ package org.elasticsearch.index.cache.bitset;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexReaderContext;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.ReaderUtil;
import org.apache.lucene.search.IndexSearcher;
@ -71,13 +71,13 @@ import java.util.concurrent.Executor;
* and require that it should always be around should use this cache, otherwise the
* {@link org.elasticsearch.index.cache.query.QueryCache} should be used instead.
*/
public final class BitsetFilterCache extends AbstractIndexComponent implements LeafReader.CoreClosedListener, RemovalListener<Object, Cache<Query, BitsetFilterCache.Value>>, Closeable {
public final class BitsetFilterCache extends AbstractIndexComponent implements IndexReader.ClosedListener, RemovalListener<IndexReader.CacheKey, Cache<Query, BitsetFilterCache.Value>>, Closeable {
public static final Setting<Boolean> INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING =
Setting.boolSetting("index.load_fixed_bitset_filters_eagerly", true, Property.IndexScope);
private final boolean loadRandomAccessFiltersEagerly;
private final Cache<Object, Cache<Query, Value>> loadedFilters;
private final Cache<IndexReader.CacheKey, Cache<Query, Value>> loadedFilters;
private final Listener listener;
public BitsetFilterCache(IndexSettings indexSettings, Listener listener) {
@ -86,7 +86,7 @@ public final class BitsetFilterCache extends AbstractIndexComponent implements L
throw new IllegalArgumentException("listener must not be null");
}
this.loadRandomAccessFiltersEagerly = this.indexSettings.getValue(INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING);
this.loadedFilters = CacheBuilder.<Object, Cache<Query, Value>>builder().removalListener(this).build();
this.loadedFilters = CacheBuilder.<IndexReader.CacheKey, Cache<Query, Value>>builder().removalListener(this).build();
this.listener = listener;
}
@ -100,7 +100,7 @@ public final class BitsetFilterCache extends AbstractIndexComponent implements L
}
@Override
public void onClose(Object ownerCoreCacheKey) {
public void onClose(IndexReader.CacheKey ownerCoreCacheKey) {
loadedFilters.invalidate(ownerCoreCacheKey);
}
@ -115,7 +115,11 @@ public final class BitsetFilterCache extends AbstractIndexComponent implements L
}
private BitSet getAndLoadIfNotPresent(final Query query, final LeafReaderContext context) throws IOException, ExecutionException {
final Object coreCacheReader = context.reader().getCoreCacheKey();
final IndexReader.CacheHelper cacheHelper = context.reader().getCoreCacheHelper();
if (cacheHelper == null) {
throw new IllegalArgumentException("Reader " + context.reader() + " does not support caching");
}
final IndexReader.CacheKey coreCacheReader = cacheHelper.getKey();
final ShardId shardId = ShardUtils.extractShardId(context.reader());
if (shardId != null // can't require it because of the percolator
&& indexSettings.getIndex().equals(shardId.getIndex()) == false) {
@ -124,7 +128,7 @@ public final class BitsetFilterCache extends AbstractIndexComponent implements L
+ " with cache of index " + indexSettings.getIndex());
}
Cache<Query, Value> filterToFbs = loadedFilters.computeIfAbsent(coreCacheReader, key -> {
context.reader().addCoreClosedListener(BitsetFilterCache.this);
cacheHelper.addClosedListener(BitsetFilterCache.this);
return CacheBuilder.<Query, Value>builder().build();
});
@ -148,7 +152,7 @@ public final class BitsetFilterCache extends AbstractIndexComponent implements L
}
@Override
public void onRemoval(RemovalNotification<Object, Cache<Query, Value>> notification) {
public void onRemoval(RemovalNotification<IndexReader.CacheKey, Cache<Query, Value>> notification) {
if (notification.getKey() == null) {
return;
}
@ -272,7 +276,7 @@ public final class BitsetFilterCache extends AbstractIndexComponent implements L
}
Cache<Object, Cache<Query, Value>> getLoadedFilters() {
Cache<IndexReader.CacheKey, Cache<Query, Value>> getLoadedFilters() {
return loadedFilters;
}

View File

@ -22,7 +22,7 @@ package org.elasticsearch.index.codec;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat.Mode;
import org.apache.lucene.codecs.lucene62.Lucene62Codec;
import org.apache.lucene.codecs.lucene70.Lucene70Codec;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.index.mapper.MapperService;
@ -47,8 +47,8 @@ public class CodecService {
public CodecService(@Nullable MapperService mapperService, Logger logger) {
final MapBuilder<String, Codec> codecs = MapBuilder.<String, Codec>newMapBuilder();
if (mapperService == null) {
codecs.put(DEFAULT_CODEC, new Lucene62Codec());
codecs.put(BEST_COMPRESSION_CODEC, new Lucene62Codec(Mode.BEST_COMPRESSION));
codecs.put(DEFAULT_CODEC, new Lucene70Codec());
codecs.put(BEST_COMPRESSION_CODEC, new Lucene70Codec(Mode.BEST_COMPRESSION));
} else {
codecs.put(DEFAULT_CODEC,
new PerFieldMappingPostingFormatCodec(Mode.BEST_SPEED, mapperService, logger));

View File

@ -23,7 +23,7 @@ import org.apache.logging.log4j.Logger;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat;
import org.apache.lucene.codecs.lucene62.Lucene62Codec;
import org.apache.lucene.codecs.lucene70.Lucene70Codec;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.index.mapper.CompletionFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
@ -38,7 +38,7 @@ import org.elasticsearch.index.mapper.MapperService;
* configured for a specific field the default postings format is used.
*/
// LUCENE UPGRADE: make sure to move to a new codec depending on the lucene version
public class PerFieldMappingPostingFormatCodec extends Lucene62Codec {
public class PerFieldMappingPostingFormatCodec extends Lucene70Codec {
private final Logger logger;
private final MapperService mapperService;

View File

@ -0,0 +1,55 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.fielddata;
import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.search.DocIdSetIterator;
import java.io.IOException;
/**
* Base implementation that throws an {@link IOException} for the
* {@link DocIdSetIterator} APIs. This impl is safe to use for sorting and
* aggregations, which only use {@link #advanceExact(int)} and
* {@link #binaryValue()}.
*/
public abstract class AbstractBinaryDocValues extends BinaryDocValues {
@Override
public int docID() {
throw new UnsupportedOperationException();
}
@Override
public int nextDoc() throws IOException {
throw new UnsupportedOperationException();
}
@Override
public int advance(int target) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public long cost() {
throw new UnsupportedOperationException();
}
}

View File

@ -0,0 +1,50 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.fielddata;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.search.DocIdSetIterator;
import java.io.IOException;
/**
* Base implementation that throws an {@link IOException} for the
* {@link DocIdSetIterator} APIs. This impl is safe to use for sorting and
* aggregations, which only use {@link #advanceExact(int)} and
* {@link #longValue()}.
*/
public abstract class AbstractNumericDocValues extends NumericDocValues {
@Override
public int nextDoc() throws IOException {
throw new UnsupportedOperationException();
}
@Override
public int advance(int target) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public long cost() {
throw new UnsupportedOperationException();
}
}

View File

@ -19,30 +19,32 @@
package org.elasticsearch.index.fielddata;
import org.apache.lucene.index.RandomAccessOrds;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.search.DocIdSetIterator;
import java.io.IOException;
/**
* Base implementation of a {@link RandomAccessOrds} instance.
* Base implementation that throws an {@link IOException} for the
* {@link DocIdSetIterator} APIs. This impl is safe to use for sorting and
* aggregations, which only use {@link #advanceExact(int)} and
* {@link #ordValue()}.
*/
public abstract class AbstractRandomAccessOrds extends RandomAccessOrds {
int i = 0;
protected abstract void doSetDocument(int docID);
public abstract class AbstractSortedDocValues extends SortedDocValues {
@Override
public final void setDocument(int docID) {
doSetDocument(docID);
i = 0;
public int nextDoc() throws IOException {
throw new UnsupportedOperationException();
}
@Override
public long nextOrd() {
if (i < cardinality()) {
return ordAt(i++);
} else {
return NO_MORE_ORDS;
}
public int advance(int target) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public long cost() {
throw new UnsupportedOperationException();
}
}

View File

@ -0,0 +1,55 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.fielddata;
import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.search.DocIdSetIterator;
import java.io.IOException;
/**
* Base implementation that throws an {@link IOException} for the
* {@link DocIdSetIterator} APIs. This impl is safe to use for sorting and
* aggregations, which only use {@link #advanceExact(int)} and
* {@link #docValueCount()} and {@link #nextValue()}.
*/
public abstract class AbstractSortedNumericDocValues extends SortedNumericDocValues {
@Override
public int docID() {
throw new UnsupportedOperationException();
}
@Override
public int nextDoc() throws IOException {
throw new UnsupportedOperationException();
}
@Override
public int advance(int target) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public long cost() {
throw new UnsupportedOperationException();
}
}

View File

@ -0,0 +1,56 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.fielddata;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.search.DocIdSetIterator;
import java.io.IOException;
/**
* Base implementation that throws an {@link IOException} for the
* {@link DocIdSetIterator} APIs. This impl is safe to use for sorting and
* aggregations, which only use {@link #advanceExact(int)} and
* {@link #getValueCount()} and {@link #nextOrd()} and {@link #lookupOrd(long)}.
*/
public abstract class AbstractSortedSetDocValues extends SortedSetDocValues {
@Override
public int docID() {
throw new UnsupportedOperationException();
}
@Override
public int nextDoc() throws IOException {
throw new UnsupportedOperationException();
}
@Override
public int advance(int target) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public long cost() {
throw new UnsupportedOperationException();
}
}

View File

@ -0,0 +1,54 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.fielddata;
import org.apache.lucene.search.DocIdSetIterator;
import java.io.IOException;
/**
* Base implementation that throws an {@link IOException} for the
* {@link DocIdSetIterator} APIs. This impl is safe to use for sorting and
* aggregations, which only use {@link #advanceExact(int)} and
* {@link #docValueCount()} and {@link #nextValue()}.
*/
public abstract class AbstractSortingNumericDocValues extends SortingNumericDocValues {
@Override
public int docID() {
throw new UnsupportedOperationException();
}
@Override
public int nextDoc() throws IOException {
throw new UnsupportedOperationException();
}
@Override
public int advance(int target) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public long cost() {
throw new UnsupportedOperationException();
}
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.index.fielddata;
import org.apache.lucene.index.RandomAccessOrds;
import org.apache.lucene.index.SortedSetDocValues;
/**
* Specialization of {@link AtomicFieldData} for data that is indexed with
@ -30,6 +30,6 @@ public interface AtomicOrdinalsFieldData extends AtomicFieldData {
/**
* Return the ordinals values for the current atomic reader.
*/
RandomAccessOrds getOrdinalsValues();
SortedSetDocValues getOrdinalsValues();
}

View File

@ -22,15 +22,13 @@ package org.elasticsearch.index.fielddata;
import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.RandomAccessOrds;
import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.common.geo.GeoPoint;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
@ -43,8 +41,8 @@ public enum FieldData {
/**
* Return a {@link SortedBinaryDocValues} that doesn't contain any value.
*/
public static SortedBinaryDocValues emptySortedBinary(int maxDoc) {
return singleton(DocValues.emptyBinary(), new Bits.MatchNoBits(maxDoc));
public static SortedBinaryDocValues emptySortedBinary() {
return singleton(DocValues.emptyBinary());
}
/**
@ -53,8 +51,13 @@ public enum FieldData {
public static NumericDoubleValues emptyNumericDouble() {
return new NumericDoubleValues() {
@Override
public double get(int docID) {
return 0;
public boolean advanceExact(int doc) throws IOException {
return false;
}
@Override
public double doubleValue() throws IOException {
throw new UnsupportedOperationException();
}
};
@ -63,16 +66,20 @@ public enum FieldData {
/**
* Return a {@link SortedNumericDoubleValues} that doesn't contain any value.
*/
public static SortedNumericDoubleValues emptySortedNumericDoubles(int maxDoc) {
return singleton(emptyNumericDouble(), new Bits.MatchNoBits(maxDoc));
public static SortedNumericDoubleValues emptySortedNumericDoubles() {
return singleton(emptyNumericDouble());
}
public static GeoPointValues emptyGeoPoint() {
final GeoPoint point = new GeoPoint();
return new GeoPointValues() {
@Override
public GeoPoint get(int docID) {
return point;
public boolean advanceExact(int doc) throws IOException {
return false;
}
@Override
public GeoPoint geoPointValue() {
throw new UnsupportedOperationException();
}
};
}
@ -80,68 +87,123 @@ public enum FieldData {
/**
* Return a {@link SortedNumericDoubleValues} that doesn't contain any value.
*/
public static MultiGeoPointValues emptyMultiGeoPoints(int maxDoc) {
return singleton(emptyGeoPoint(), new Bits.MatchNoBits(maxDoc));
public static MultiGeoPointValues emptyMultiGeoPoints() {
return singleton(emptyGeoPoint());
}
/**
* Returns a {@link Bits} representing all documents from <code>dv</code> that have a value.
*/
public static Bits docsWithValue(final SortedBinaryDocValues dv, final int maxDoc) {
return new Bits() {
@Override
public boolean get(int index) {
dv.setDocument(index);
return dv.count() != 0;
}
return new Bits() {
@Override
public boolean get(int index) {
try {
return dv.advanceExact(index);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
public int length() {
return maxDoc;
}
};
@Override
public int length() {
return maxDoc;
}
};
}
/**
* Returns a Bits representing all documents from <code>dv</code> that have a value.
* Returns a {@link Bits} representing all documents from <code>dv</code>
* that have a value.
*/
public static Bits docsWithValue(final SortedSetDocValues dv, final int maxDoc) {
return new Bits() {
@Override
public boolean get(int index) {
try {
return dv.advanceExact(index);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
public int length() {
return maxDoc;
}
};
}
/**
* Returns a Bits representing all documents from <code>dv</code> that have
* a value.
*/
public static Bits docsWithValue(final MultiGeoPointValues dv, final int maxDoc) {
return new Bits() {
@Override
public boolean get(int index) {
dv.setDocument(index);
return dv.count() != 0;
}
return new Bits() {
@Override
public boolean get(int index) {
try {
return dv.advanceExact(index);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
public int length() {
return maxDoc;
}
};
@Override
public int length() {
return maxDoc;
}
};
}
/**
* Returns a Bits representing all documents from <code>dv</code> that have a value.
*/
public static Bits docsWithValue(final SortedNumericDoubleValues dv, final int maxDoc) {
return new Bits() {
@Override
public boolean get(int index) {
dv.setDocument(index);
return dv.count() != 0;
}
return new Bits() {
@Override
public boolean get(int index) {
try {
return dv.advanceExact(index);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
public int length() {
return maxDoc;
}
};
@Override
public int length() {
return maxDoc;
}
};
}
/**
* Given a {@link SortedNumericDoubleValues}, return a {@link SortedNumericDocValues}
* instance that will translate double values to sortable long bits using
* {@link NumericUtils#doubleToSortableLong(double)}.
* Returns a Bits representing all documents from <code>dv</code> that have
* a value.
*/
public static Bits docsWithValue(final SortedNumericDocValues dv, final int maxDoc) {
return new Bits() {
@Override
public boolean get(int index) {
try {
return dv.advanceExact(index);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
public int length() {
return maxDoc;
}
};
}
/**
* Given a {@link SortedNumericDoubleValues}, return a
* {@link SortedNumericDocValues} instance that will translate double values
* to sortable long bits using
* {@link org.apache.lucene.util.NumericUtils#doubleToSortableLong(double)}.
*/
public static SortedNumericDocValues toSortableLongBits(SortedNumericDoubleValues values) {
final NumericDoubleValues singleton = unwrapSingleton(values);
@ -152,8 +214,7 @@ public enum FieldData {
} else {
longBits = new SortableLongBitsNumericDocValues(singleton);
}
final Bits docsWithField = unwrapSingletonBits(values);
return DocValues.singleton(longBits, docsWithField);
return DocValues.singleton(longBits);
} else {
if (values instanceof SortableLongBitsToSortedNumericDoubleValues) {
return ((SortableLongBitsToSortedNumericDoubleValues) values).getLongValues();
@ -166,7 +227,7 @@ public enum FieldData {
/**
* Given a {@link SortedNumericDocValues}, return a {@link SortedNumericDoubleValues}
* instance that will translate long values to doubles using
* {@link NumericUtils#sortableLongToDouble(long)}.
* {@link org.apache.lucene.util.NumericUtils#sortableLongToDouble(long)}.
*/
public static SortedNumericDoubleValues sortableLongBitsToDoubles(SortedNumericDocValues values) {
final NumericDocValues singleton = DocValues.unwrapSingleton(values);
@ -177,8 +238,7 @@ public enum FieldData {
} else {
doubles = new SortableLongBitsToNumericDoubleValues(singleton);
}
final Bits docsWithField = DocValues.unwrapSingletonBits(values);
return singleton(doubles, docsWithField);
return singleton(doubles);
} else {
if (values instanceof SortableLongBitsSortedNumericDocValues) {
return ((SortableLongBitsSortedNumericDocValues) values).getDoubleValues();
@ -194,8 +254,7 @@ public enum FieldData {
public static SortedNumericDoubleValues castToDouble(final SortedNumericDocValues values) {
final NumericDocValues singleton = DocValues.unwrapSingleton(values);
if (singleton != null) {
final Bits docsWithField = DocValues.unwrapSingletonBits(values);
return singleton(new DoubleCastedValues(singleton), docsWithField);
return singleton(new DoubleCastedValues(singleton));
} else {
return new SortedDoubleCastedValues(values);
}
@ -207,8 +266,7 @@ public enum FieldData {
public static SortedNumericDocValues castToLong(final SortedNumericDoubleValues values) {
final NumericDoubleValues singleton = unwrapSingleton(values);
if (singleton != null) {
final Bits docsWithField = unwrapSingletonBits(values);
return DocValues.singleton(new LongCastedValues(singleton), docsWithField);
return DocValues.singleton(new LongCastedValues(singleton));
} else {
return new SortedLongCastedValues(values);
}
@ -217,15 +275,14 @@ public enum FieldData {
/**
* Returns a multi-valued view over the provided {@link NumericDoubleValues}.
*/
public static SortedNumericDoubleValues singleton(NumericDoubleValues values, Bits docsWithField) {
return new SingletonSortedNumericDoubleValues(values, docsWithField);
public static SortedNumericDoubleValues singleton(NumericDoubleValues values) {
return new SingletonSortedNumericDoubleValues(values);
}
/**
* Returns a single-valued view of the {@link SortedNumericDoubleValues},
* if it was previously wrapped with {@link DocValues#singleton(NumericDocValues, Bits)},
* if it was previously wrapped with {@link DocValues#singleton(NumericDocValues)},
* or null.
* @see DocValues#unwrapSingletonBits(SortedNumericDocValues)
*/
public static NumericDoubleValues unwrapSingleton(SortedNumericDoubleValues values) {
if (values instanceof SingletonSortedNumericDoubleValues) {
@ -234,31 +291,17 @@ public enum FieldData {
return null;
}
/**
* Returns the documents with a value for the {@link SortedNumericDoubleValues},
* if it was previously wrapped with {@link #singleton(NumericDoubleValues, Bits)},
* or null.
*/
public static Bits unwrapSingletonBits(SortedNumericDoubleValues dv) {
if (dv instanceof SingletonSortedNumericDoubleValues) {
return ((SingletonSortedNumericDoubleValues)dv).getDocsWithField();
} else {
return null;
}
}
/**
* Returns a multi-valued view over the provided {@link GeoPointValues}.
*/
public static MultiGeoPointValues singleton(GeoPointValues values, Bits docsWithField) {
return new SingletonMultiGeoPointValues(values, docsWithField);
public static MultiGeoPointValues singleton(GeoPointValues values) {
return new SingletonMultiGeoPointValues(values);
}
/**
* Returns a single-valued view of the {@link MultiGeoPointValues},
* if it was previously wrapped with {@link #singleton(GeoPointValues, Bits)},
* if it was previously wrapped with {@link #singleton(GeoPointValues)},
* or null.
* @see #unwrapSingletonBits(MultiGeoPointValues)
*/
public static GeoPointValues unwrapSingleton(MultiGeoPointValues values) {
if (values instanceof SingletonMultiGeoPointValues) {
@ -267,30 +310,17 @@ public enum FieldData {
return null;
}
/**
* Returns the documents with a value for the {@link MultiGeoPointValues},
* if it was previously wrapped with {@link #singleton(GeoPointValues, Bits)},
* or null.
*/
public static Bits unwrapSingletonBits(MultiGeoPointValues values) {
if (values instanceof SingletonMultiGeoPointValues) {
return ((SingletonMultiGeoPointValues) values).getDocsWithField();
}
return null;
}
/**
* Returns a multi-valued view over the provided {@link BinaryDocValues}.
*/
public static SortedBinaryDocValues singleton(BinaryDocValues values, Bits docsWithField) {
return new SingletonSortedBinaryDocValues(values, docsWithField);
public static SortedBinaryDocValues singleton(BinaryDocValues values) {
return new SingletonSortedBinaryDocValues(values);
}
/**
* Returns a single-valued view of the {@link SortedBinaryDocValues},
* if it was previously wrapped with {@link #singleton(BinaryDocValues, Bits)},
* if it was previously wrapped with {@link #singleton(BinaryDocValues)},
* or null.
* @see #unwrapSingletonBits(SortedBinaryDocValues)
*/
public static BinaryDocValues unwrapSingleton(SortedBinaryDocValues values) {
if (values instanceof SingletonSortedBinaryDocValues) {
@ -299,18 +329,6 @@ public enum FieldData {
return null;
}
/**
* Returns the documents with a value for the {@link SortedBinaryDocValues},
* if it was previously wrapped with {@link #singleton(BinaryDocValues, Bits)},
* or null.
*/
public static Bits unwrapSingletonBits(SortedBinaryDocValues values) {
if (values instanceof SingletonSortedBinaryDocValues) {
return ((SingletonSortedBinaryDocValues) values).getDocsWithField();
}
return null;
}
/**
* Returns whether the provided values *might* be multi-valued. There is no
* guarantee that this method will return <tt>false</tt> in the single-valued case.
@ -359,10 +377,13 @@ public enum FieldData {
public static SortedBinaryDocValues toString(final SortedNumericDocValues values) {
return toString(new ToStringValues() {
@Override
public void get(int docID, List<CharSequence> list) {
values.setDocument(docID);
for (int i = 0, count = values.count(); i < count; ++i) {
list.add(Long.toString(values.valueAt(i)));
public boolean advanceExact(int doc) throws IOException {
return values.advanceExact(doc);
}
@Override
public void get(List<CharSequence> list) throws IOException {
for (int i = 0, count = values.docValueCount(); i < count; ++i) {
list.add(Long.toString(values.nextValue()));
}
}
});
@ -376,10 +397,13 @@ public enum FieldData {
public static SortedBinaryDocValues toString(final SortedNumericDoubleValues values) {
return toString(new ToStringValues() {
@Override
public void get(int docID, List<CharSequence> list) {
values.setDocument(docID);
for (int i = 0, count = values.count(); i < count; ++i) {
list.add(Double.toString(values.valueAt(i)));
public boolean advanceExact(int doc) throws IOException {
return values.advanceExact(doc);
}
@Override
public void get(List<CharSequence> list) throws IOException {
for (int i = 0, count = values.docValueCount(); i < count; ++i) {
list.add(Double.toString(values.nextValue()));
}
}
});
@ -390,23 +414,37 @@ public enum FieldData {
* typically used for scripts or for the `map` execution mode of terms aggs.
* NOTE: this is slow!
*/
public static SortedBinaryDocValues toString(final RandomAccessOrds values) {
public static SortedBinaryDocValues toString(final SortedSetDocValues values) {
return new SortedBinaryDocValues() {
private int count = 0;
@Override
public BytesRef valueAt(int index) {
return values.lookupOrd(values.ordAt(index));
public boolean advanceExact(int doc) throws IOException {
if (values.advanceExact(doc) == false) {
return false;
}
for (int i = 0; ; ++i) {
if (values.nextOrd() == SortedSetDocValues.NO_MORE_ORDS) {
count = i;
break;
}
}
// reset the iterator on the current doc
boolean advanced = values.advanceExact(doc);
assert advanced;
return true;
}
@Override
public void setDocument(int docId) {
values.setDocument(docId);
public int docValueCount() {
return count;
}
@Override
public int count() {
return values.cardinality();
public BytesRef nextValue() throws IOException {
return values.lookupOrd(values.nextOrd());
}
};
}
@ -418,78 +456,30 @@ public enum FieldData {
public static SortedBinaryDocValues toString(final MultiGeoPointValues values) {
return toString(new ToStringValues() {
@Override
public void get(int docID, List<CharSequence> list) {
values.setDocument(docID);
for (int i = 0, count = values.count(); i < count; ++i) {
list.add(values.valueAt(i).toString());
public boolean advanceExact(int doc) throws IOException {
return values.advanceExact(doc);
}
@Override
public void get(List<CharSequence> list) throws IOException {
for (int i = 0, count = values.docValueCount(); i < count; ++i) {
list.add(values.nextValue().toString());
}
}
});
}
/**
* If <code>dv</code> is an instance of {@link RandomAccessOrds}, then return
* it, otherwise wrap it into a slow wrapper that implements random access.
*/
public static RandomAccessOrds maybeSlowRandomAccessOrds(final SortedSetDocValues dv) {
if (dv instanceof RandomAccessOrds) {
return (RandomAccessOrds) dv;
} else {
assert DocValues.unwrapSingleton(dv) == null : "this method expect singleton to return random-access ords";
return new RandomAccessOrds() {
int cardinality;
long[] ords = new long[0];
int ord;
@Override
public void setDocument(int docID) {
cardinality = 0;
dv.setDocument(docID);
for (long ord = dv.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = dv.nextOrd()) {
ords = ArrayUtil.grow(ords, cardinality + 1);
ords[cardinality++] = ord;
}
ord = 0;
}
@Override
public long nextOrd() {
return ords[ord++];
}
@Override
public BytesRef lookupOrd(long ord) {
return dv.lookupOrd(ord);
}
@Override
public long getValueCount() {
return dv.getValueCount();
}
@Override
public long ordAt(int index) {
return ords[index];
}
@Override
public int cardinality() {
return cardinality;
}
};
}
}
private static SortedBinaryDocValues toString(final ToStringValues toStringValues) {
return new SortingBinaryDocValues() {
final List<CharSequence> list = new ArrayList<>();
@Override
public void setDocument(int docID) {
public boolean advanceExact(int docID) throws IOException {
if (toStringValues.advanceExact(docID) == false) {
return false;
}
list.clear();
toStringValues.get(docID, list);
toStringValues.get(list);
count = list.size();
grow();
for (int i = 0; i < count; ++i) {
@ -497,6 +487,7 @@ public enum FieldData {
values[i].copyChars(s);
}
sort();
return true;
}
};
@ -504,7 +495,14 @@ public enum FieldData {
private interface ToStringValues {
void get(int docID, List<CharSequence> values);
/**
* Advance this instance to the given document id
* @return true if there is a value for this document
*/
boolean advanceExact(int doc) throws IOException;
/** Fill the list of charsquences with the list of values for the current document. */
void get(List<CharSequence> values) throws IOException;
}
@ -517,8 +515,13 @@ public enum FieldData {
}
@Override
public double get(int docID) {
return values.get(docID);
public double doubleValue() throws IOException {
return values.longValue();
}
@Override
public boolean advanceExact(int doc) throws IOException {
return values.advanceExact(doc);
}
}
@ -532,38 +535,49 @@ public enum FieldData {
}
@Override
public double valueAt(int index) {
return values.valueAt(index);
public boolean advanceExact(int target) throws IOException {
return values.advanceExact(target);
}
@Override
public void setDocument(int doc) {
values.setDocument(doc);
public double nextValue() throws IOException {
return values.nextValue();
}
@Override
public int count() {
return values.count();
public int docValueCount() {
return values.docValueCount();
}
}
private static class LongCastedValues extends NumericDocValues {
private static class LongCastedValues extends AbstractNumericDocValues {
private final NumericDoubleValues values;
private int docID = -1;
LongCastedValues(NumericDoubleValues values) {
this.values = values;
}
@Override
public long get(int docID) {
return (long) values.get(docID);
public boolean advanceExact(int target) throws IOException {
docID = target;
return values.advanceExact(target);
}
@Override
public long longValue() throws IOException {
return (long) values.doubleValue();
}
@Override
public int docID() {
return docID;
}
}
private static class SortedLongCastedValues extends SortedNumericDocValues {
private static class SortedLongCastedValues extends AbstractSortedNumericDocValues {
private final SortedNumericDoubleValues values;
@ -572,18 +586,18 @@ public enum FieldData {
}
@Override
public long valueAt(int index) {
return (long) values.valueAt(index);
public boolean advanceExact(int target) throws IOException {
return values.advanceExact(target);
}
@Override
public void setDocument(int doc) {
values.setDocument(doc);
public int docValueCount() {
return values.docValueCount();
}
@Override
public int count() {
return values.count();
public long nextValue() throws IOException {
return (long) values.nextValue();
}
}

View File

@ -21,17 +21,23 @@ package org.elasticsearch.index.fielddata;
import org.elasticsearch.common.geo.GeoPoint;
import java.io.IOException;
/**
* Per-document geo-point values.
*/
public abstract class GeoPointValues {
/**
* Get the {@link GeoPoint} associated with <code>docID</code>.
* The returned {@link GeoPoint} might be reused across calls.
* If the given <code>docID</code> does not have a value then the returned
* geo point mught have both latitude and longitude set to 0.
* Advance this instance to the given document id
* @return true if there is a value for this document
*/
public abstract GeoPoint get(int docID);
public abstract boolean advanceExact(int doc) throws IOException;
/**
* Get the {@link GeoPoint} associated with the current document.
* The returned {@link GeoPoint} might be reused across calls.
*/
public abstract GeoPoint geoPointValue();
}

View File

@ -20,6 +20,8 @@ package org.elasticsearch.index.fielddata;
import org.elasticsearch.common.geo.GeoPoint;
import java.io.IOException;
/**
* A stateful lightweight per document set of {@link GeoPoint} values.
* To iterate over values in a document use the following pattern:
@ -44,28 +46,24 @@ public abstract class MultiGeoPointValues {
}
/**
* Sets iteration to the specified docID.
* @param docId document ID
*
* @see #valueAt(int)
* @see #count()
* Advance this instance to the given document id
* @return true if there is a value for this document
*/
public abstract void setDocument(int docId);
public abstract boolean advanceExact(int doc) throws IOException;
/**
* Return the number of geo points the current document has.
*/
public abstract int count();
public abstract int docValueCount();
/**
* Return the <code>i-th</code> value associated with the current document.
* Behavior is undefined when <code>i</code> is undefined or greater than
* or equal to {@link #count()}.
* Return the next value associated with the current document. This must not be
* called more than {@link #docValueCount()} times.
*
* Note: the returned {@link GeoPoint} might be shared across invocations.
*
* @return the next value for the current docID set to {@link #setDocument(int)}.
* @return the next value for the current docID set to {@link #advanceExact(int)}.
*/
public abstract GeoPoint valueAt(int i);
public abstract GeoPoint nextValue() throws IOException;
}

View File

@ -20,43 +20,58 @@
package org.elasticsearch.index.fielddata;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.search.DoubleValues;
import java.io.IOException;
/**
* A per-document numeric value.
*/
public abstract class NumericDoubleValues {
public abstract class NumericDoubleValues extends DoubleValues {
/** Sole constructor. (For invocation by subclass
* constructors, typically implicit.) */
protected NumericDoubleValues() {}
/**
* Returns the numeric value for the specified document ID. This must return
* <tt>0d</tt> if the given doc ID has no value.
* @param docID document ID to lookup
* @return numeric value
*/
public abstract double get(int docID);
// TODO: this interaction with sort comparators is really ugly...
/** Returns numeric docvalues view of raw double bits */
public NumericDocValues getRawDoubleValues() {
return new NumericDocValues() {
@Override
public long get(int docID) {
return Double.doubleToRawLongBits(NumericDoubleValues.this.get(docID));
}
return new AbstractNumericDocValues() {
private int docID = -1;
@Override
public boolean advanceExact(int target) throws IOException {
docID = target;
return NumericDoubleValues.this.advanceExact(target);
}
@Override
public long longValue() throws IOException {
return Double.doubleToRawLongBits(NumericDoubleValues.this.doubleValue());
}
@Override
public int docID() {
return docID;
}
};
}
// yes... this is doing what the previous code was doing...
/** Returns numeric docvalues view of raw float bits */
public NumericDocValues getRawFloatValues() {
return new NumericDocValues() {
@Override
public long get(int docID) {
return Float.floatToRawIntBits((float)NumericDoubleValues.this.get(docID));
}
return new AbstractNumericDocValues() {
private int docID = -1;
@Override
public boolean advanceExact(int target) throws IOException {
docID = target;
return NumericDoubleValues.this.advanceExact(target);
}
@Override
public long longValue() throws IOException {
return Float.floatToRawIntBits((float)NumericDoubleValues.this.doubleValue());
}
@Override
public int docID() {
return docID;
}
};
}
}

View File

@ -21,7 +21,9 @@ package org.elasticsearch.index.fielddata;
import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.elasticsearch.common.geo.GeoHashUtils;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils;
@ -32,7 +34,9 @@ import org.joda.time.DateTimeZone;
import org.joda.time.MutableDateTime;
import org.joda.time.ReadableDateTime;
import java.io.IOException;
import java.util.AbstractList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
import java.util.function.UnaryOperator;
@ -46,7 +50,7 @@ public abstract class ScriptDocValues<T> extends AbstractList<T> {
/**
* Set the current doc ID.
*/
public abstract void setNextDocId(int docId);
public abstract void setNextDocId(int docId) throws IOException;
/**
* Return a copy of the list of the values for the current document.
@ -83,24 +87,48 @@ public abstract class ScriptDocValues<T> extends AbstractList<T> {
public static final class Strings extends ScriptDocValues<String> {
private final SortedBinaryDocValues values;
private final SortedBinaryDocValues in;
private BytesRefBuilder[] values = new BytesRefBuilder[0];
private int count;
public Strings(SortedBinaryDocValues values) {
this.values = values;
public Strings(SortedBinaryDocValues in) {
this.in = in;
}
@Override
public void setNextDocId(int docId) {
values.setDocument(docId);
public void setNextDocId(int docId) throws IOException {
if (in.advanceExact(docId)) {
resize(in.docValueCount());
for (int i = 0; i < count; i++) {
values[i].copyBytes(in.nextValue());
}
} else {
resize(0);
}
}
/**
* Set the {@link #size()} and ensure that the {@link #values} array can
* store at least that many entries.
*/
protected void resize(int newSize) {
count = newSize;
if (newSize > values.length) {
final int oldLength = values.length;
values = ArrayUtil.grow(values, count);
for (int i = oldLength; i < values.length; ++i) {
values[i] = new BytesRefBuilder();
}
}
}
public SortedBinaryDocValues getInternalValues() {
return this.values;
return this.in;
}
public BytesRef getBytesValue() {
if (values.count() > 0) {
return values.valueAt(0);
if (size() > 0) {
return values[0].get();
} else {
return null;
}
@ -117,12 +145,12 @@ public abstract class ScriptDocValues<T> extends AbstractList<T> {
@Override
public String get(int index) {
return values.valueAt(index).utf8ToString();
return values[index].get().utf8ToString();
}
@Override
public int size() {
return values.count();
return count;
}
}
@ -130,61 +158,81 @@ public abstract class ScriptDocValues<T> extends AbstractList<T> {
public static final class Longs extends ScriptDocValues<Long> {
protected static final DeprecationLogger deprecationLogger = new DeprecationLogger(ESLoggerFactory.getLogger(Longs.class));
private final SortedNumericDocValues values;
private final SortedNumericDocValues in;
private long[] values = new long[0];
private int count;
private Dates dates;
private int docId = -1;
public Longs(SortedNumericDocValues in) {
this.in = in;
public Longs(SortedNumericDocValues values) {
this.values = values;
}
@Override
public void setNextDocId(int docId) {
values.setDocument(docId);
if (dates != null) {
dates.refreshArray();
public void setNextDocId(int docId) throws IOException {
this.docId = docId;
if (in.advanceExact(docId)) {
resize(in.docValueCount());
for (int i = 0; i < count; i++) {
values[i] = in.nextValue();
}
} else {
resize(0);
}
if (dates != null) {
dates.setNextDocId(docId);
}
}
/**
* Set the {@link #size()} and ensure that the {@link #values} array can
* store at least that many entries.
*/
protected void resize(int newSize) {
count = newSize;
values = ArrayUtil.grow(values, count);
}
public SortedNumericDocValues getInternalValues() {
return this.values;
return this.in;
}
public long getValue() {
int numValues = values.count();
if (numValues == 0) {
if (count == 0) {
return 0L;
}
return values.valueAt(0);
return values[0];
}
@Deprecated
public ReadableDateTime getDate() {
public ReadableDateTime getDate() throws IOException {
deprecationLogger.deprecated("getDate on numeric fields is deprecated. Use a date field to get dates.");
if (dates == null) {
dates = new Dates(values);
dates.refreshArray();
dates = new Dates(in);
dates.setNextDocId(docId);
}
return dates.getValue();
}
@Deprecated
public List<ReadableDateTime> getDates() {
public List<ReadableDateTime> getDates() throws IOException {
deprecationLogger.deprecated("getDates on numeric fields is deprecated. Use a date field to get dates.");
if (dates == null) {
dates = new Dates(values);
dates.refreshArray();
dates = new Dates(in);
dates.setNextDocId(docId);
}
return dates;
}
@Override
public Long get(int index) {
return values.valueAt(index);
return values[index];
}
@Override
public int size() {
return values.count();
return count;
}
}
@ -193,22 +241,24 @@ public abstract class ScriptDocValues<T> extends AbstractList<T> {
private static final ReadableDateTime EPOCH = new DateTime(0, DateTimeZone.UTC);
private final SortedNumericDocValues values;
private final SortedNumericDocValues in;
/**
* Values wrapped in {@link MutableDateTime}. Null by default an allocated on first usage so we allocate a reasonably size. We keep
* this array so we don't have allocate new {@link MutableDateTime}s on every usage. Instead we reuse them for every document.
*/
private MutableDateTime[] dates;
private int count;
public Dates(SortedNumericDocValues values) {
this.values = values;
public Dates(SortedNumericDocValues in) {
this.in = in;
}
/**
* Fetch the first field value or 0 millis after epoch if there are no values.
* Fetch the first field value or 0 millis after epoch if there are no
* in.
*/
public ReadableDateTime getValue() {
if (values.count() == 0) {
if (count == 0) {
return EPOCH;
}
return get(0);
@ -234,113 +284,159 @@ public abstract class ScriptDocValues<T> extends AbstractList<T> {
@Override
public ReadableDateTime get(int index) {
if (index >= values.count()) {
if (index >= count) {
throw new IndexOutOfBoundsException(
"attempted to fetch the [" + index + "] date when there are only [" + values.count() + "] dates.");
"attempted to fetch the [" + index + "] date when there are only ["
+ count + "] dates.");
}
return dates[index];
}
@Override
public int size() {
return values.count();
return count;
}
@Override
public void setNextDocId(int docId) {
values.setDocument(docId);
public void setNextDocId(int docId) throws IOException {
if (in.advanceExact(docId)) {
count = in.docValueCount();
} else {
count = 0;
}
refreshArray();
}
/**
* Refresh the backing array. Package private so it can be called when {@link Longs} loads dates.
*/
void refreshArray() {
if (values.count() == 0) {
void refreshArray() throws IOException {
if (count == 0) {
return;
}
if (dates == null) {
// Happens for the document. We delay allocating dates so we can allocate it with a reasonable size.
dates = new MutableDateTime[values.count()];
dates = new MutableDateTime[count];
for (int i = 0; i < dates.length; i++) {
dates[i] = new MutableDateTime(values.valueAt(i), DateTimeZone.UTC);
dates[i] = new MutableDateTime(in.nextValue(), DateTimeZone.UTC);
}
return;
}
if (values.count() > dates.length) {
if (count > dates.length) {
// Happens when we move to a new document and it has more dates than any documents before it.
MutableDateTime[] backup = dates;
dates = new MutableDateTime[values.count()];
dates = new MutableDateTime[count];
System.arraycopy(backup, 0, dates, 0, backup.length);
for (int i = 0; i < backup.length; i++) {
dates[i].setMillis(values.valueAt(i));
dates[i].setMillis(in.nextValue());
}
for (int i = backup.length; i < dates.length; i++) {
dates[i] = new MutableDateTime(values.valueAt(i), DateTimeZone.UTC);
dates[i] = new MutableDateTime(in.nextValue(), DateTimeZone.UTC);
}
return;
}
for (int i = 0; i < values.count(); i++) {
dates[i].setMillis(values.valueAt(i));
for (int i = 0; i < count; i++) {
dates[i].setMillis(in.nextValue());
}
}
}
public static final class Doubles extends ScriptDocValues<Double> {
private final SortedNumericDoubleValues values;
private final SortedNumericDoubleValues in;
private double[] values = new double[0];
private int count;
public Doubles(SortedNumericDoubleValues values) {
this.values = values;
public Doubles(SortedNumericDoubleValues in) {
this.in = in;
}
@Override
public void setNextDocId(int docId) {
values.setDocument(docId);
public void setNextDocId(int docId) throws IOException {
if (in.advanceExact(docId)) {
resize(in.docValueCount());
for (int i = 0; i < count; i++) {
values[i] = in.nextValue();
}
} else {
resize(0);
}
}
/**
* Set the {@link #size()} and ensure that the {@link #values} array can
* store at least that many entries.
*/
protected void resize(int newSize) {
count = newSize;
values = ArrayUtil.grow(values, count);
}
public SortedNumericDoubleValues getInternalValues() {
return this.values;
return this.in;
}
public double getValue() {
int numValues = values.count();
if (numValues == 0) {
if (count == 0) {
return 0d;
}
return values.valueAt(0);
return values[0];
}
@Override
public Double get(int index) {
return values.valueAt(index);
return values[index];
}
@Override
public int size() {
return values.count();
return count;
}
}
public static final class GeoPoints extends ScriptDocValues<GeoPoint> {
private final MultiGeoPointValues values;
private final MultiGeoPointValues in;
private GeoPoint[] values = new GeoPoint[0];
private int count;
public GeoPoints(MultiGeoPointValues values) {
this.values = values;
public GeoPoints(MultiGeoPointValues in) {
this.in = in;
}
@Override
public void setNextDocId(int docId) {
values.setDocument(docId);
public void setNextDocId(int docId) throws IOException {
if (in.advanceExact(docId)) {
resize(in.docValueCount());
for (int i = 0; i < count; i++) {
GeoPoint point = in.nextValue();
values[i].reset(point.lat(), point.lon());
}
} else {
resize(0);
}
}
/**
* Set the {@link #size()} and ensure that the {@link #values} array can
* store at least that many entries.
*/
protected void resize(int newSize) {
count = newSize;
if (newSize > values.length) {
int oldLength = values.length;
values = ArrayUtil.grow(values, count);
for (int i = oldLength; i < values.length; ++i) {
values[i] = new GeoPoint();
}
}
}
public GeoPoint getValue() {
int numValues = values.count();
if (numValues == 0) {
if (count == 0) {
return null;
}
return values.valueAt(0);
return values[0];
}
public double getLat() {
@ -371,13 +467,13 @@ public abstract class ScriptDocValues<T> extends AbstractList<T> {
@Override
public GeoPoint get(int index) {
final GeoPoint point = values.valueAt(index);
final GeoPoint point = values[index];
return new GeoPoint(point.lat(), point.lon());
}
@Override
public int size() {
return values.count();
return count;
}
public double arcDistance(double lat, double lon) {
@ -420,66 +516,114 @@ public abstract class ScriptDocValues<T> extends AbstractList<T> {
public static final class Booleans extends ScriptDocValues<Boolean> {
private final SortedNumericDocValues values;
private final SortedNumericDocValues in;
private boolean[] values = new boolean[0];
private int count;
public Booleans(SortedNumericDocValues values) {
this.values = values;
public Booleans(SortedNumericDocValues in) {
this.in = in;
}
@Override
public void setNextDocId(int docId) {
values.setDocument(docId);
public void setNextDocId(int docId) throws IOException {
if (in.advanceExact(docId)) {
resize(in.docValueCount());
for (int i = 0; i < count; i++) {
values[i] = in.nextValue() == 1;
}
} else {
resize(0);
}
}
/**
* Set the {@link #size()} and ensure that the {@link #values} array can
* store at least that many entries.
*/
protected void resize(int newSize) {
count = newSize;
values = grow(values, count);
}
public boolean getValue() {
return values.count() != 0 && values.valueAt(0) == 1;
return count != 0 && values[0];
}
@Override
public Boolean get(int index) {
return values.valueAt(index) == 1;
return values[index];
}
@Override
public int size() {
return values.count();
return count;
}
private static boolean[] grow(boolean[] array, int minSize) {
assert minSize >= 0 : "size must be positive (got " + minSize
+ "): likely integer overflow?";
if (array.length < minSize) {
return Arrays.copyOf(array, ArrayUtil.oversize(minSize, 1));
} else
return array;
}
}
public static final class BytesRefs extends ScriptDocValues<BytesRef> {
private final SortedBinaryDocValues values;
private final SortedBinaryDocValues in;
private BytesRef[] values;
private int count;
public BytesRefs(SortedBinaryDocValues values) {
this.values = values;
public BytesRefs(SortedBinaryDocValues in) {
this.in = in;
}
@Override
public void setNextDocId(int docId) {
values.setDocument(docId);
public void setNextDocId(int docId) throws IOException {
if (in.advanceExact(docId)) {
resize(in.docValueCount());
for (int i = 0; i < count; i++) {
values[i] = in.nextValue();
}
} else {
resize(0);
}
}
/**
* Set the {@link #size()} and ensure that the {@link #values} array can
* store at least that many entries.
*/
protected void resize(int newSize) {
count = newSize;
if (values == null) {
values = new BytesRef[newSize];
} else {
values = ArrayUtil.grow(values, count);
}
}
public SortedBinaryDocValues getInternalValues() {
return this.values;
return this.in;
}
public BytesRef getValue() {
int numValues = values.count();
if (numValues == 0) {
if (count == 0) {
return new BytesRef();
}
return values.valueAt(0);
return values[0];
}
@Override
public BytesRef get(int index) {
return values.valueAt(index);
return values[index];
}
@Override
public int size() {
return values.count();
return count;
}
}
}

View File

@ -19,48 +19,34 @@
package org.elasticsearch.index.fielddata;
import org.apache.lucene.util.Bits;
import org.elasticsearch.common.geo.GeoPoint;
import java.io.IOException;
final class SingletonMultiGeoPointValues extends MultiGeoPointValues {
private final GeoPointValues in;
private final Bits docsWithField;
private GeoPoint value;
private int count;
SingletonMultiGeoPointValues(GeoPointValues in, Bits docsWithField) {
SingletonMultiGeoPointValues(GeoPointValues in) {
this.in = in;
this.docsWithField = docsWithField;
}
@Override
public void setDocument(int docID) {
value = in.get(docID);
if (value.lat() == Double.NaN && value.lon() == Double.NaN || (docsWithField != null && !docsWithField.get(docID))) {
count = 0;
} else {
count = 1;
}
public boolean advanceExact(int doc) throws IOException {
return in.advanceExact(doc);
}
@Override
public int count() {
return count;
public int docValueCount() {
return 1;
}
@Override
public GeoPoint valueAt(int index) {
assert index == 0;
return value;
public GeoPoint nextValue() {
return in.geoPointValue();
}
public GeoPointValues getGeoPointValues() {
GeoPointValues getGeoPointValues() {
return in;
}
public Bits getDocsWithField() {
return docsWithField;
}
}

View File

@ -20,49 +20,35 @@
package org.elasticsearch.index.fielddata;
import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.Bits.MatchAllBits;
import org.apache.lucene.util.BytesRef;
import java.io.IOException;
final class SingletonSortedBinaryDocValues extends SortedBinaryDocValues {
private final BinaryDocValues in;
private final Bits docsWithField;
private BytesRef value;
private int count;
SingletonSortedBinaryDocValues(BinaryDocValues in, Bits docsWithField) {
SingletonSortedBinaryDocValues(BinaryDocValues in) {
this.in = in;
this.docsWithField = docsWithField instanceof MatchAllBits ? null : docsWithField;
}
@Override
public void setDocument(int docID) {
value = in.get(docID);
if (value.length == 0 && docsWithField != null && !docsWithField.get(docID)) {
count = 0;
} else {
count = 1;
}
public boolean advanceExact(int doc) throws IOException {
return in.advanceExact(doc);
}
@Override
public int count() {
return count;
public int docValueCount() {
return 1;
}
@Override
public BytesRef valueAt(int index) {
assert index == 0;
return value;
public BytesRef nextValue() throws IOException {
return in.binaryValue();
}
public BinaryDocValues getBinaryDocValues() {
return in;
}
public Bits getDocsWithField() {
return docsWithField;
}
}

View File

@ -19,8 +19,7 @@
package org.elasticsearch.index.fielddata;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.Bits.MatchAllBits;
import java.io.IOException;
/**
* Exposes multi-valued view over a single-valued instance.
@ -29,43 +28,30 @@ import org.apache.lucene.util.Bits.MatchAllBits;
* that works for single or multi-valued types.
*/
final class SingletonSortedNumericDoubleValues extends SortedNumericDoubleValues {
private final NumericDoubleValues in;
private final Bits docsWithField;
private double value;
private int count;
private final NumericDoubleValues in;
SingletonSortedNumericDoubleValues(NumericDoubleValues in, Bits docsWithField) {
this.in = in;
this.docsWithField = docsWithField instanceof MatchAllBits ? null : docsWithField;
}
/** Return the wrapped {@link NumericDoubleValues} */
public NumericDoubleValues getNumericDoubleValues() {
return in;
}
/** Return the wrapped {@link Bits} */
public Bits getDocsWithField() {
return docsWithField;
}
@Override
public void setDocument(int doc) {
value = in.get(doc);
if (docsWithField != null && value == 0 && docsWithField.get(doc) == false) {
count = 0;
} else {
count = 1;
SingletonSortedNumericDoubleValues(NumericDoubleValues in) {
this.in = in;
}
}
@Override
public double valueAt(int index) {
return value;
}
/** Return the wrapped {@link NumericDoubleValues} */
public NumericDoubleValues getNumericDoubleValues() {
return in;
}
@Override
public boolean advanceExact(int target) throws IOException {
return in.advanceExact(target);
}
@Override
public int docValueCount() {
return 1;
}
@Override
public double nextValue() throws IOException {
return in.doubleValue();
}
@Override
public int count() {
return count;
}
}

View File

@ -22,13 +22,16 @@ package org.elasticsearch.index.fielddata;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.util.NumericUtils;
import java.io.IOException;
/**
* {@link NumericDocValues} instance that wraps a {@link NumericDoubleValues}
* and converts the doubles to sortable long bits using
* {@link NumericUtils#doubleToSortableLong(double)}.
*/
final class SortableLongBitsNumericDocValues extends NumericDocValues {
final class SortableLongBitsNumericDocValues extends AbstractNumericDocValues {
private int docID = -1;
private final NumericDoubleValues values;
SortableLongBitsNumericDocValues(NumericDoubleValues values) {
@ -36,8 +39,19 @@ final class SortableLongBitsNumericDocValues extends NumericDocValues {
}
@Override
public long get(int docID) {
return NumericUtils.doubleToSortableLong(values.get(docID));
public long longValue() throws IOException {
return NumericUtils.doubleToSortableLong(values.doubleValue());
}
@Override
public boolean advanceExact(int target) throws IOException {
docID = target;
return values.advanceExact(target);
}
@Override
public int docID() {
return docID;
}
/** Return the wrapped values. */

View File

@ -22,12 +22,14 @@ package org.elasticsearch.index.fielddata;
import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.util.NumericUtils;
import java.io.IOException;
/**
* {@link SortedNumericDocValues} instance that wraps a {@link SortedNumericDoubleValues}
* and converts the doubles to sortable long bits using
* {@link NumericUtils#doubleToSortableLong(double)}.
*/
final class SortableLongBitsSortedNumericDocValues extends SortedNumericDocValues {
final class SortableLongBitsSortedNumericDocValues extends AbstractSortedNumericDocValues {
private final SortedNumericDoubleValues values;
@ -36,18 +38,18 @@ final class SortableLongBitsSortedNumericDocValues extends SortedNumericDocValue
}
@Override
public void setDocument(int doc) {
values.setDocument(doc);
public boolean advanceExact(int target) throws IOException {
return values.advanceExact(target);
}
@Override
public long valueAt(int index) {
return NumericUtils.doubleToSortableLong(values.valueAt(index));
public long nextValue() throws IOException {
return NumericUtils.doubleToSortableLong(values.nextValue());
}
@Override
public int count() {
return values.count();
public int docValueCount() {
return values.docValueCount();
}
/** Return the wrapped values. */

View File

@ -22,6 +22,8 @@ package org.elasticsearch.index.fielddata;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.util.NumericUtils;
import java.io.IOException;
/**
* {@link NumericDoubleValues} instance that wraps a {@link NumericDocValues}
* and converts the doubles to sortable long bits using
@ -36,8 +38,13 @@ final class SortableLongBitsToNumericDoubleValues extends NumericDoubleValues {
}
@Override
public double get(int docID) {
return NumericUtils.sortableLongToDouble(values.get(docID));
public double doubleValue() throws IOException {
return NumericUtils.sortableLongToDouble(values.longValue());
}
@Override
public boolean advanceExact(int doc) throws IOException {
return values.advanceExact(doc);
}
/** Return the wrapped values. */

View File

@ -22,6 +22,8 @@ package org.elasticsearch.index.fielddata;
import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.util.NumericUtils;
import java.io.IOException;
/**
* {@link SortedNumericDoubleValues} instance that wraps a {@link SortedNumericDocValues}
* and converts the doubles to sortable long bits using
@ -36,18 +38,18 @@ final class SortableLongBitsToSortedNumericDoubleValues extends SortedNumericDou
}
@Override
public void setDocument(int doc) {
values.setDocument(doc);
public boolean advanceExact(int target) throws IOException {
return values.advanceExact(target);
}
@Override
public double valueAt(int index) {
return NumericUtils.sortableLongToDouble(values.valueAt(index));
public double nextValue() throws IOException {
return NumericUtils.sortableLongToDouble(values.nextValue());
}
@Override
public int count() {
return values.count();
public int docValueCount() {
return values.docValueCount();
}
/** Return the wrapped values. */

View File

@ -21,28 +21,35 @@ package org.elasticsearch.index.fielddata;
import org.apache.lucene.util.BytesRef;
import java.io.IOException;
/**
* A list of per-document binary values, sorted
* according to {@link BytesRef#compareTo(BytesRef)}.
* There might be dups however.
*/
// TODO: Should it expose a count (current approach) or return null when there are no more values?
public abstract class SortedBinaryDocValues {
/**
* Positions to the specified document
* Advance this instance to the given document id
* @return true if there is a value for this document
*/
public abstract void setDocument(int docId);
public abstract boolean advanceExact(int doc) throws IOException;
/**
* Return the number of values of the current document.
/**
* Retrieves the number of values for the current document. This must always
* be greater than zero.
* It is illegal to call this method after {@link #advanceExact(int)}
* returned {@code false}.
*/
public abstract int count();
public abstract int docValueCount();
/**
* Retrieve the value for the current document at the specified index.
* An index ranges from {@code 0} to {@code count()-1}.
/**
* Iterates to the next value in the current document. Do not call this more than
* {@link #docValueCount} times for the document.
* Note that the returned {@link BytesRef} might be reused across invocations.
*/
public abstract BytesRef valueAt(int index);
public abstract BytesRef nextValue() throws IOException;
}

View File

@ -21,6 +21,8 @@ package org.elasticsearch.index.fielddata;
import org.apache.lucene.index.SortedNumericDocValues;
import java.io.IOException;
/**
* Clone of {@link SortedNumericDocValues} for double values.
*/
@ -30,21 +32,25 @@ public abstract class SortedNumericDoubleValues {
* constructors, typically implicit.) */
protected SortedNumericDoubleValues() {}
/**
* Positions to the specified document
*/
public abstract void setDocument(int doc);
/** Advance the iterator to exactly {@code target} and return whether
* {@code target} has a value.
* {@code target} must be greater than or equal to the current
* doc ID and must be a valid doc ID, ie. &ge; 0 and
* &lt; {@code maxDoc}.*/
public abstract boolean advanceExact(int target) throws IOException;
/**
* Retrieve the value for the current document at the specified index.
* An index ranges from {@code 0} to {@code count()-1}.
/**
* Iterates to the next value in the current document. Do not call this more than
* {@link #docValueCount} times for the document.
*/
public abstract double valueAt(int index);
/**
* Retrieves the count of values for the current document.
* This may be zero if a document has no values.
public abstract double nextValue() throws IOException;
/**
* Retrieves the number of values for the current document. This must always
* be greater than zero.
* It is illegal to call this method after {@link #advanceExact(int)}
* returned {@code false}.
*/
public abstract int count();
public abstract int docValueCount();
}

View File

@ -33,6 +33,7 @@ import java.util.Arrays;
*/
public abstract class SortingBinaryDocValues extends SortedBinaryDocValues {
private int index;
protected int count;
protected BytesRefBuilder[] values;
private final Sorter sorter;
@ -73,15 +74,17 @@ public abstract class SortingBinaryDocValues extends SortedBinaryDocValues {
*/
protected final void sort() {
sorter.sort(0, count);
index = 0;
}
@Override
public final int count() {
public int docValueCount() {
return count;
}
@Override
public final BytesRef valueAt(int index) {
return values[index].get();
public final BytesRef nextValue() {
assert index < count;
return values[index++].get();
}
}

View File

@ -31,10 +31,12 @@ public abstract class SortingNumericDocValues extends SortedNumericDocValues {
private int count;
protected long[] values;
protected int valuesCursor;
private final Sorter sorter;
protected SortingNumericDocValues() {
values = new long[1];
valuesCursor = 0;
sorter = new InPlaceMergeSorter() {
@Override
@ -52,12 +54,13 @@ public abstract class SortingNumericDocValues extends SortedNumericDocValues {
}
/**
* Set the {@link #count()} and ensure that the {@link #values} array can
* Set the {@link #docValueCount()} and ensure that the {@link #values} array can
* store at least that many entries.
*/
protected final void resize(int newSize) {
count = newSize;
values = ArrayUtil.grow(values, count);
valuesCursor = 0;
}
/**
@ -69,12 +72,12 @@ public abstract class SortingNumericDocValues extends SortedNumericDocValues {
}
@Override
public final int count() {
public final int docValueCount() {
return count;
}
@Override
public final long valueAt(int index) {
return values[index];
public final long nextValue() {
return values[valuesCursor++];
}
}

View File

@ -29,11 +29,13 @@ import org.apache.lucene.util.Sorter;
public abstract class SortingNumericDoubleValues extends SortedNumericDoubleValues {
private int count;
private int valuesCursor;
protected double[] values;
private final Sorter sorter;
protected SortingNumericDoubleValues() {
values = new double[1];
valuesCursor = 0;
sorter = new InPlaceMergeSorter() {
@Override
@ -51,29 +53,30 @@ public abstract class SortingNumericDoubleValues extends SortedNumericDoubleValu
}
/**
* Set the {@link #count()} and ensure that the {@link #values} array can
* Set the {@link #docValueCount()} and ensure that the {@link #values} array can
* store at least that many entries.
*/
protected final void resize(int newSize) {
count = newSize;
values = ArrayUtil.grow(values, count);
valuesCursor = 0;
}
/**
* Sort values that are stored between offsets <code>0</code> and
* {@link #count} of {@link #values}.
* {@link #docValueCount} of {@link #values}.
*/
protected final void sort() {
sorter.sort(0, count);
}
@Override
public final int count() {
public final int docValueCount() {
return count;
}
@Override
public final double valueAt(int index) {
return values[index];
public final double nextValue() {
return values[valuesCursor++];
}
}

View File

@ -21,15 +21,15 @@ package org.elasticsearch.index.fielddata.fieldcomparator;
import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.RandomAccessOrds;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.FieldComparator;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.SortField;
import org.apache.lucene.util.BitSet;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.index.fielddata.AbstractSortedDocValues;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
@ -89,7 +89,7 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat
@Override
protected SortedDocValues getSortedDocValues(LeafReaderContext context, String field) throws IOException {
final RandomAccessOrds values = ((IndexOrdinalsFieldData) indexFieldData).load(context).getOrdinalsValues();
final SortedSetDocValues values = ((IndexOrdinalsFieldData) indexFieldData).load(context).getOrdinalsValues();
final SortedDocValues selectedValues;
if (nested == null) {
selectedValues = sortMode.select(values);
@ -113,8 +113,6 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat
};
}
final BytesRef nullPlaceHolder = new BytesRef();
final BytesRef nonNullMissingBytes = missingBytes == null ? nullPlaceHolder : missingBytes;
return new FieldComparator.TermValComparator(numHits, null, sortMissingLast) {
@Override
@ -122,25 +120,15 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat
final SortedBinaryDocValues values = getValues(context);
final BinaryDocValues selectedValues;
if (nested == null) {
selectedValues = sortMode.select(values, nonNullMissingBytes);
selectedValues = sortMode.select(values, missingBytes);
} else {
final BitSet rootDocs = nested.rootDocs(context);
final DocIdSetIterator innerDocs = nested.innerDocs(context);
selectedValues = sortMode.select(values, nonNullMissingBytes, rootDocs, innerDocs, context.reader().maxDoc());
selectedValues = sortMode.select(values, missingBytes, rootDocs, innerDocs, context.reader().maxDoc());
}
return selectedValues;
}
@Override
protected Bits getDocsWithField(LeafReaderContext context, String field) throws IOException {
return new Bits.MatchAllBits(context.reader().maxDoc());
}
@Override
protected boolean isNull(int doc, BytesRef term) {
return term == nullPlaceHolder;
}
@Override
public void setScorer(Scorer scorer) {
BytesRefFieldComparatorSource.this.setScorer(scorer);
@ -154,13 +142,14 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat
* are replaced with the specified term
*/
// TODO: move this out if we need it for other reasons
static class ReplaceMissing extends SortedDocValues {
static class ReplaceMissing extends AbstractSortedDocValues {
final SortedDocValues in;
final int substituteOrd;
final BytesRef substituteTerm;
final boolean exists;
boolean hasValue;
ReplaceMissing(SortedDocValues in, BytesRef term) {
ReplaceMissing(SortedDocValues in, BytesRef term) throws IOException {
this.in = in;
this.substituteTerm = term;
int sub = in.lookupTerm(term);
@ -174,17 +163,29 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat
}
@Override
public int getOrd(int docID) {
int ord = in.getOrd(docID);
if (ord < 0) {
public int ordValue() throws IOException {
if (hasValue == false) {
return substituteOrd;
} else if (exists == false && ord >= substituteOrd) {
}
int ord = in.ordValue();
if (exists == false && ord >= substituteOrd) {
return ord + 1;
} else {
return ord;
}
}
@Override
public boolean advanceExact(int target) throws IOException {
hasValue = in.advanceExact(target);
return true;
}
@Override
public int docID() {
return in.docID();
}
@Override
public int getValueCount() {
if (exists) {
@ -195,7 +196,7 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat
}
@Override
public BytesRef lookupOrd(int ord) {
public BytesRef lookupOrd(int ord) throws IOException {
if (ord == substituteOrd) {
return substituteTerm;
} else if (exists == false && ord > substituteOrd) {

View File

@ -20,22 +20,23 @@
package org.elasticsearch.index.fielddata.ordinals;
import org.apache.lucene.index.MultiDocValues.OrdinalMap;
import org.apache.lucene.index.RandomAccessOrds;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LongValues;
import org.elasticsearch.index.fielddata.AbstractRandomAccessOrds;
import java.io.IOException;
/**
* A {@link RandomAccessOrds} implementation that returns ordinals that are global.
* A {@link SortedSetDocValues} implementation that returns ordinals that are global.
*/
public class GlobalOrdinalMapping extends AbstractRandomAccessOrds {
public class GlobalOrdinalMapping extends SortedSetDocValues {
private final RandomAccessOrds values;
private final SortedSetDocValues values;
private final OrdinalMap ordinalMap;
private final LongValues mapping;
private final RandomAccessOrds[] bytesValues;
private final SortedSetDocValues[] bytesValues;
GlobalOrdinalMapping(OrdinalMap ordinalMap, RandomAccessOrds[] bytesValues, int segmentIndex) {
GlobalOrdinalMapping(OrdinalMap ordinalMap, SortedSetDocValues[] bytesValues, int segmentIndex) {
super();
this.values = bytesValues[segmentIndex];
this.bytesValues = bytesValues;
@ -53,25 +54,45 @@ public class GlobalOrdinalMapping extends AbstractRandomAccessOrds {
}
@Override
public long ordAt(int index) {
return getGlobalOrd(values.ordAt(index));
public boolean advanceExact(int target) throws IOException {
return values.advanceExact(target);
}
@Override
public void doSetDocument(int docId) {
values.setDocument(docId);
public long nextOrd() throws IOException {
long segmentOrd = values.nextOrd();
if (segmentOrd == SortedSetDocValues.NO_MORE_ORDS) {
return SortedSetDocValues.NO_MORE_ORDS;
} else {
return getGlobalOrd(segmentOrd);
}
}
@Override
public int cardinality() {
return values.cardinality();
}
@Override
public BytesRef lookupOrd(long globalOrd) {
public BytesRef lookupOrd(long globalOrd) throws IOException {
final long segmentOrd = ordinalMap.getFirstSegmentOrd(globalOrd);
int readerIndex = ordinalMap.getFirstSegmentNumber(globalOrd);
return bytesValues[readerIndex].lookupOrd(segmentOrd);
}
@Override
public int docID() {
return values.docID();
}
@Override
public int nextDoc() throws IOException {
return values.nextDoc();
}
@Override
public int advance(int target) throws IOException {
return values.advance(target);
}
@Override
public long cost() {
return values.cost();
}
}

View File

@ -23,7 +23,7 @@ import org.apache.logging.log4j.Logger;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.MultiDocValues.OrdinalMap;
import org.apache.lucene.index.RandomAccessOrds;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.packed.PackedInts;
import org.elasticsearch.common.breaker.CircuitBreaker;
@ -52,12 +52,12 @@ public enum GlobalOrdinalsBuilder {
*/
public static IndexOrdinalsFieldData build(final IndexReader indexReader, IndexOrdinalsFieldData indexFieldData,
IndexSettings indexSettings, CircuitBreakerService breakerService, Logger logger,
Function<RandomAccessOrds, ScriptDocValues<?>> scriptFunction) throws IOException {
Function<SortedSetDocValues, ScriptDocValues<?>> scriptFunction) throws IOException {
assert indexReader.leaves().size() > 1;
long startTimeNS = System.nanoTime();
final AtomicOrdinalsFieldData[] atomicFD = new AtomicOrdinalsFieldData[indexReader.leaves().size()];
final RandomAccessOrds[] subs = new RandomAccessOrds[indexReader.leaves().size()];
final SortedSetDocValues[] subs = new SortedSetDocValues[indexReader.leaves().size()];
for (int i = 0; i < indexReader.leaves().size(); ++i) {
atomicFD[i] = indexFieldData.load(indexReader.leaves().get(i));
subs[i] = atomicFD[i].getOrdinalsValues();
@ -83,11 +83,11 @@ public enum GlobalOrdinalsBuilder {
assert indexReader.leaves().size() > 1;
final AtomicOrdinalsFieldData[] atomicFD = new AtomicOrdinalsFieldData[indexReader.leaves().size()];
final RandomAccessOrds[] subs = new RandomAccessOrds[indexReader.leaves().size()];
final SortedSetDocValues[] subs = new SortedSetDocValues[indexReader.leaves().size()];
for (int i = 0; i < indexReader.leaves().size(); ++i) {
atomicFD[i] = new AbstractAtomicOrdinalsFieldData(AbstractAtomicOrdinalsFieldData.DEFAULT_SCRIPT_FUNCTION) {
@Override
public RandomAccessOrds getOrdinalsValues() {
public SortedSetDocValues getOrdinalsValues() {
return DocValues.emptySortedSet();
}

View File

@ -20,7 +20,7 @@ package org.elasticsearch.index.fielddata.ordinals;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.MultiDocValues.OrdinalMap;
import org.apache.lucene.index.RandomAccessOrds;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.util.Accountable;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData;
@ -36,10 +36,10 @@ import java.util.function.Function;
final class InternalGlobalOrdinalsIndexFieldData extends GlobalOrdinalsIndexFieldData {
private final Atomic[] atomicReaders;
private final Function<RandomAccessOrds, ScriptDocValues<?>> scriptFunction;
private final Function<SortedSetDocValues, ScriptDocValues<?>> scriptFunction;
InternalGlobalOrdinalsIndexFieldData(IndexSettings indexSettings, String fieldName, AtomicOrdinalsFieldData[] segmentAfd,
OrdinalMap ordinalMap, long memorySizeInBytes, Function<RandomAccessOrds, ScriptDocValues<?>> scriptFunction) {
OrdinalMap ordinalMap, long memorySizeInBytes, Function<SortedSetDocValues, ScriptDocValues<?>> scriptFunction) {
super(indexSettings, fieldName, memorySizeInBytes);
this.atomicReaders = new Atomic[segmentAfd.length];
for (int i = 0; i < segmentAfd.length; i++) {
@ -67,13 +67,13 @@ final class InternalGlobalOrdinalsIndexFieldData extends GlobalOrdinalsIndexFiel
}
@Override
public RandomAccessOrds getOrdinalsValues() {
final RandomAccessOrds values = afd.getOrdinalsValues();
public SortedSetDocValues getOrdinalsValues() {
final SortedSetDocValues values = afd.getOrdinalsValues();
if (values.getValueCount() == ordinalMap.getValueCount()) {
// segment ordinals match global ordinals
return values;
}
final RandomAccessOrds[] bytesValues = new RandomAccessOrds[atomicReaders.length];
final SortedSetDocValues[] bytesValues = new SortedSetDocValues[atomicReaders.length];
for (int i = 0; i < bytesValues.length; i++) {
bytesValues[i] = atomicReaders[i].afd.getOrdinalsValues();
}

View File

@ -20,16 +20,17 @@
package org.elasticsearch.index.fielddata.ordinals;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.RandomAccessOrds;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Accountables;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LongsRef;
import org.apache.lucene.util.packed.PackedInts;
import org.apache.lucene.util.packed.PackedLongValues;
import org.elasticsearch.index.fielddata.AbstractRandomAccessOrds;
import org.elasticsearch.index.fielddata.AbstractSortedDocValues;
import org.elasticsearch.index.fielddata.AbstractSortedSetDocValues;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
@ -100,21 +101,25 @@ public class MultiOrdinals extends Ordinals {
}
@Override
public RandomAccessOrds ordinals(ValuesHolder values) {
public SortedSetDocValues ordinals(ValuesHolder values) {
if (multiValued) {
return new MultiDocs(this, values);
} else {
return (RandomAccessOrds) DocValues.singleton(new SingleDocs(this, values));
return (SortedSetDocValues) DocValues.singleton(new SingleDocs(this, values));
}
}
private static class SingleDocs extends SortedDocValues {
private static class SingleDocs extends AbstractSortedDocValues {
private final int valueCount;
private final PackedLongValues endOffsets;
private final PackedLongValues ords;
private final ValuesHolder values;
private int currentDoc = -1;
private long currentStartOffset;
private long currentEndOffset;
SingleDocs(MultiOrdinals ordinals, ValuesHolder values) {
this.valueCount = (int) ordinals.valueCount;
this.endOffsets = ordinals.endOffsets;
@ -123,10 +128,21 @@ public class MultiOrdinals extends Ordinals {
}
@Override
public int getOrd(int docId) {
final long startOffset = docId != 0 ? endOffsets.get(docId - 1) : 0;
final long endOffset = endOffsets.get(docId);
return startOffset == endOffset ? -1 : (int) ords.get(startOffset);
public int ordValue() {
return (int) ords.get(currentStartOffset);
}
@Override
public boolean advanceExact(int docId) throws IOException {
currentDoc = docId;
currentStartOffset = docId != 0 ? endOffsets.get(docId - 1) : 0;
currentEndOffset = endOffsets.get(docId);
return currentStartOffset != currentEndOffset;
}
@Override
public int docID() {
return currentDoc;
}
@Override
@ -141,15 +157,16 @@ public class MultiOrdinals extends Ordinals {
}
private static class MultiDocs extends AbstractRandomAccessOrds {
private static class MultiDocs extends AbstractSortedSetDocValues {
private final long valueCount;
private final PackedLongValues endOffsets;
private final PackedLongValues ords;
private long offset;
private int cardinality;
private final ValuesHolder values;
private long currentOffset;
private long currentEndOffset;
MultiDocs(MultiOrdinals ordinals, ValuesHolder values) {
this.valueCount = ordinals.valueCount;
this.endOffsets = ordinals.endOffsets;
@ -163,21 +180,19 @@ public class MultiOrdinals extends Ordinals {
}
@Override
public void doSetDocument(int docId) {
final long startOffset = docId != 0 ? endOffsets.get(docId - 1) : 0;
final long endOffset = endOffsets.get(docId);
offset = startOffset;
cardinality = (int) (endOffset - startOffset);
public boolean advanceExact(int docId) throws IOException {
currentOffset = docId != 0 ? endOffsets.get(docId - 1) : 0;
currentEndOffset = endOffsets.get(docId);
return currentOffset != currentEndOffset;
}
@Override
public int cardinality() {
return cardinality;
}
@Override
public long ordAt(int index) {
return ords.get(offset + index);
public long nextOrd() throws IOException {
if (currentOffset == currentEndOffset) {
return SortedSetDocValues.NO_MORE_ORDS;
} else {
return ords.get(currentOffset++);
}
}
@Override

View File

@ -19,7 +19,7 @@
package org.elasticsearch.index.fielddata.ordinals;
import org.apache.lucene.index.RandomAccessOrds;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.BytesRef;
@ -41,9 +41,9 @@ public abstract class Ordinals implements Accountable {
@Override
public abstract long ramBytesUsed();
public abstract RandomAccessOrds ordinals(ValuesHolder values);
public abstract SortedSetDocValues ordinals(ValuesHolder values);
public final RandomAccessOrds ordinals() {
public final SortedSetDocValues ordinals() {
return ordinals(NO_VALUES);
}

View File

@ -19,17 +19,14 @@
package org.elasticsearch.index.fielddata.ordinals;
import org.apache.lucene.index.FilteredTermsEnum;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.spatial.geopoint.document.GeoPointField;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BitSet;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefIterator;
import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.LegacyNumericUtils;
import org.apache.lucene.util.LongsRef;
import org.apache.lucene.util.packed.GrowableWriter;
import org.apache.lucene.util.packed.PackedInts;
@ -414,23 +411,6 @@ public final class OrdinalsBuilder implements Closeable {
}
}
/**
* A {@link TermsEnum} that iterates only highest resolution geo prefix coded terms.
*
* @see #buildFromTerms(TermsEnum)
*/
public static TermsEnum wrapGeoPointTerms(TermsEnum termsEnum) {
return new FilteredTermsEnum(termsEnum, false) {
@Override
protected AcceptStatus accept(BytesRef term) throws IOException {
// accept only the max resolution terms
// todo is this necessary?
return GeoPointField.getPrefixCodedShift(term) == GeoPointField.PRECISION_STEP * 4 ?
AcceptStatus.YES : AcceptStatus.END;
}
};
}
/**
* Returns the maximum document ID this builder can associate with an ordinal
@ -439,51 +419,12 @@ public final class OrdinalsBuilder implements Closeable {
return maxDoc;
}
/**
* A {@link TermsEnum} that iterates only full precision prefix coded 64 bit values.
*
* @see #buildFromTerms(TermsEnum)
*/
public static TermsEnum wrapNumeric64Bit(TermsEnum termsEnum) {
return new FilteredTermsEnum(termsEnum, false) {
@Override
protected AcceptStatus accept(BytesRef term) throws IOException {
// we stop accepting terms once we moved across the prefix codec terms - redundant values!
return LegacyNumericUtils.getPrefixCodedLongShift(term) == 0 ? AcceptStatus.YES : AcceptStatus.END;
}
};
}
/**
* A {@link TermsEnum} that iterates only full precision prefix coded 32 bit values.
*
* @see #buildFromTerms(TermsEnum)
*/
public static TermsEnum wrapNumeric32Bit(TermsEnum termsEnum) {
return new FilteredTermsEnum(termsEnum, false) {
@Override
protected AcceptStatus accept(BytesRef term) throws IOException {
// we stop accepting terms once we moved across the prefix codec terms - redundant values!
return LegacyNumericUtils.getPrefixCodedIntShift(term) == 0 ? AcceptStatus.YES : AcceptStatus.END;
}
};
}
/**
* This method iterates all terms in the given {@link TermsEnum} and
* associates each terms ordinal with the terms documents. The caller must
* exhaust the returned {@link BytesRefIterator} which returns all values
* where the first returned value is associated with the ordinal <tt>1</tt>
* etc.
* <p>
* If the {@link TermsEnum} contains prefix coded numerical values the terms
* enum should be wrapped with either {@link #wrapNumeric32Bit(TermsEnum)}
* or {@link #wrapNumeric64Bit(TermsEnum)} depending on its precision. If
* the {@link TermsEnum} is not wrapped the returned
* {@link BytesRefIterator} will contain partial precision terms rather than
* only full-precision terms.
* </p>
*/
public BytesRefIterator buildFromTerms(final TermsEnum termsEnum) throws IOException {
return new BytesRefIterator() {

View File

@ -20,14 +20,15 @@
package org.elasticsearch.index.fielddata.ordinals;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.RandomAccessOrds;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Accountables;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.packed.PackedInts;
import org.elasticsearch.index.fielddata.AbstractSortedDocValues;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
@ -57,16 +58,19 @@ public class SinglePackedOrdinals extends Ordinals {
}
@Override
public RandomAccessOrds ordinals(ValuesHolder values) {
return (RandomAccessOrds) DocValues.singleton(new Docs(this, values));
public SortedSetDocValues ordinals(ValuesHolder values) {
return (SortedSetDocValues) DocValues.singleton(new Docs(this, values));
}
private static class Docs extends SortedDocValues {
private static class Docs extends AbstractSortedDocValues {
private final int maxOrd;
private final PackedInts.Reader reader;
private final ValuesHolder values;
private int currentDoc = -1;
private int currentOrd;
Docs(SinglePackedOrdinals parent, ValuesHolder values) {
this.maxOrd = parent.valueCount;
this.reader = parent.reader;
@ -84,8 +88,20 @@ public class SinglePackedOrdinals extends Ordinals {
}
@Override
public int getOrd(int docID) {
return (int) (reader.get(docID) - 1);
public int ordValue() {
return currentOrd;
}
@Override
public boolean advanceExact(int docID) throws IOException {
currentDoc = docID;
currentOrd = (int) (reader.get(docID) - 1);
return currentOrd != -1;
}
@Override
public int docID() {
return currentDoc;
}
}
}

View File

@ -59,7 +59,7 @@ public abstract class AbstractAtomicGeoPointFieldData implements AtomicGeoPointF
@Override
public MultiGeoPointValues getGeoPointValues() {
return FieldData.emptyMultiGeoPoints(maxDoc);
return FieldData.emptyMultiGeoPoints();
}
};
}

View File

@ -20,7 +20,7 @@
package org.elasticsearch.index.fielddata.plain;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.RandomAccessOrds;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.util.Accountable;
import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData;
import org.elasticsearch.index.fielddata.FieldData;
@ -34,13 +34,13 @@ import java.util.function.Function;
public abstract class AbstractAtomicOrdinalsFieldData implements AtomicOrdinalsFieldData {
public static final Function<RandomAccessOrds, ScriptDocValues<?>> DEFAULT_SCRIPT_FUNCTION =
((Function<RandomAccessOrds, SortedBinaryDocValues>) FieldData::toString)
public static final Function<SortedSetDocValues, ScriptDocValues<?>> DEFAULT_SCRIPT_FUNCTION =
((Function<SortedSetDocValues, SortedBinaryDocValues>) FieldData::toString)
.andThen(ScriptDocValues.Strings::new);
private final Function<RandomAccessOrds, ScriptDocValues<?>> scriptFunction;
private final Function<SortedSetDocValues, ScriptDocValues<?>> scriptFunction;
protected AbstractAtomicOrdinalsFieldData(Function<RandomAccessOrds, ScriptDocValues<?>> scriptFunction) {
protected AbstractAtomicOrdinalsFieldData(Function<SortedSetDocValues, ScriptDocValues<?>> scriptFunction) {
this.scriptFunction = scriptFunction;
}
@ -72,7 +72,7 @@ public abstract class AbstractAtomicOrdinalsFieldData implements AtomicOrdinalsF
}
@Override
public RandomAccessOrds getOrdinalsValues() {
public SortedSetDocValues getOrdinalsValues() {
return DocValues.emptySortedSet();
}
};

View File

@ -28,6 +28,7 @@ import org.elasticsearch.index.fielddata.AtomicParentChildFieldData;
import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.Set;
@ -46,16 +47,28 @@ abstract class AbstractAtomicParentChildFieldData implements AtomicParentChildFi
public final SortedBinaryDocValues getBytesValues() {
return new SortedBinaryDocValues() {
private final SortedDocValues[] perTypeValues;
private final BytesRef[] terms = new BytesRef[2];
private int count;
private int termsCursor;
{
Set<String> types = types();
perTypeValues = new SortedDocValues[types.size()];
int i = 0;
for (String type : types) {
perTypeValues[i++] = getOrdinalsValues(type);
}
}
@Override
public void setDocument(int docId) {
public boolean advanceExact(int docId) throws IOException {
count = 0;
for (String type : types()) {
final SortedDocValues values = getOrdinalsValues(type);
final int ord = values.getOrd(docId);
if (ord >= 0) {
termsCursor = 0;
for (SortedDocValues values : perTypeValues) {
if (values.advanceExact(docId)) {
final int ord = values.ordValue();
terms[count++] = values.lookupOrd(ord);
}
}
@ -69,16 +82,17 @@ abstract class AbstractAtomicParentChildFieldData implements AtomicParentChildFi
count = 1;
}
}
return count != 0;
}
@Override
public int count() {
public int docValueCount() {
return count;
}
@Override
public BytesRef valueAt(int index) {
return terms[index];
public BytesRef nextValue() throws IOException {
return terms[termsCursor++];
}
};
}

View File

@ -1,83 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.fielddata.plain;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.SortField;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.AtomicGeoPointFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.search.MultiValueMode;
import java.io.IOException;
public abstract class AbstractGeoPointDVIndexFieldData extends DocValuesIndexFieldData implements IndexGeoPointFieldData {
AbstractGeoPointDVIndexFieldData(Index index, String fieldName) {
super(index, fieldName);
}
@Override
public SortField sortField(@Nullable Object missingValue, MultiValueMode sortMode, Nested nested, boolean reverse) {
throw new IllegalArgumentException("can't sort on geo_point field without using specific sorting feature, like geo_distance");
}
/**
* Lucene 5.4 GeoPointFieldType
*/
public static class GeoPointDVIndexFieldData extends AbstractGeoPointDVIndexFieldData {
public GeoPointDVIndexFieldData(Index index, String fieldName) {
super(index, fieldName);
}
@Override
public AtomicGeoPointFieldData load(LeafReaderContext context) {
try {
return new GeoPointDVAtomicFieldData(DocValues.getSortedNumeric(context.reader(), fieldName));
} catch (IOException e) {
throw new IllegalStateException("Cannot load doc values", e);
}
}
@Override
public AtomicGeoPointFieldData loadDirect(LeafReaderContext context) throws Exception {
return load(context);
}
}
public static class Builder implements IndexFieldData.Builder {
@Override
public IndexFieldData<?> build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache,
CircuitBreakerService breakerService, MapperService mapperService) {
// Ignore breaker
return new GeoPointDVIndexFieldData(indexSettings.getIndex(), fieldType.name());
}
}
}

View File

@ -1,117 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.fielddata.plain;
import org.apache.lucene.search.SortField;
import org.apache.lucene.spatial.geopoint.document.GeoPointField;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefIterator;
import org.apache.lucene.util.CharsRefBuilder;
import org.apache.lucene.util.LegacyNumericUtils;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.AtomicGeoPointFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.search.MultiValueMode;
import java.io.IOException;
abstract class AbstractIndexGeoPointFieldData extends AbstractIndexFieldData<AtomicGeoPointFieldData> implements IndexGeoPointFieldData {
protected abstract static class BaseGeoPointTermsEnum {
protected final BytesRefIterator termsEnum;
protected BaseGeoPointTermsEnum(BytesRefIterator termsEnum) {
this.termsEnum = termsEnum;
}
}
protected static class GeoPointTermsEnum extends BaseGeoPointTermsEnum {
private final GeoPointField.TermEncoding termEncoding;
protected GeoPointTermsEnum(BytesRefIterator termsEnum, GeoPointField.TermEncoding termEncoding) {
super(termsEnum);
this.termEncoding = termEncoding;
}
public Long next() throws IOException {
final BytesRef term = termsEnum.next();
if (term == null) {
return null;
}
if (termEncoding == GeoPointField.TermEncoding.PREFIX) {
return GeoPointField.prefixCodedToGeoCoded(term);
} else if (termEncoding == GeoPointField.TermEncoding.NUMERIC) {
return LegacyNumericUtils.prefixCodedToLong(term);
}
throw new IllegalArgumentException("GeoPoint.TermEncoding should be one of: " + GeoPointField.TermEncoding.PREFIX
+ " or " + GeoPointField.TermEncoding.NUMERIC + " found: " + termEncoding);
}
}
protected static class GeoPointTermsEnumLegacy extends BaseGeoPointTermsEnum {
private final GeoPoint next;
private final CharsRefBuilder spare;
protected GeoPointTermsEnumLegacy(BytesRefIterator termsEnum) {
super(termsEnum);
next = new GeoPoint();
spare = new CharsRefBuilder();
}
public GeoPoint next() throws IOException {
final BytesRef term = termsEnum.next();
if (term == null) {
return null;
}
spare.copyUTF8Bytes(term);
int commaIndex = -1;
for (int i = 0; i < spare.length(); i++) {
if (spare.charAt(i) == ',') { // saves a string creation
commaIndex = i;
break;
}
}
if (commaIndex == -1) {
assert false;
return next.reset(0, 0);
}
final double lat = Double.parseDouble(new String(spare.chars(), 0, commaIndex));
final double lon = Double.parseDouble(new String(spare.chars(), commaIndex + 1, spare.length() - (commaIndex + 1)));
return next.reset(lat, lon);
}
}
AbstractIndexGeoPointFieldData(IndexSettings indexSettings, String fieldName, IndexFieldDataCache cache) {
super(indexSettings, fieldName, cache);
}
@Override
public SortField sortField(@Nullable Object missingValue, MultiValueMode sortMode, Nested nested, boolean reverse) {
throw new IllegalArgumentException("can't sort on geo_point field without using specific sorting feature, like geo_distance");
}
@Override
protected AtomicGeoPointFieldData empty(int maxDoc) {
return AbstractAtomicGeoPointFieldData.empty(maxDoc);
}
}

View File

@ -19,13 +19,11 @@
package org.elasticsearch.index.fielddata.plain;
import org.apache.lucene.document.LatLonDocValuesField;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.SortField;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
@ -38,8 +36,6 @@ import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.search.MultiValueMode;
import java.io.IOException;
public abstract class AbstractLatLonPointDVIndexFieldData extends DocValuesIndexFieldData
implements IndexGeoPointFieldData {
AbstractLatLonPointDVIndexFieldData(Index index, String fieldName) {
@ -58,16 +54,12 @@ public abstract class AbstractLatLonPointDVIndexFieldData extends DocValuesIndex
@Override
public AtomicGeoPointFieldData load(LeafReaderContext context) {
try {
LeafReader reader = context.reader();
FieldInfo info = reader.getFieldInfos().fieldInfo(fieldName);
if (info != null) {
checkCompatible(info);
}
return new LatLonPointDVAtomicFieldData(DocValues.getSortedNumeric(reader, fieldName));
} catch (IOException e) {
throw new IllegalStateException("Cannot load doc values", e);
LeafReader reader = context.reader();
FieldInfo info = reader.getFieldInfos().fieldInfo(fieldName);
if (info != null) {
checkCompatible(info);
}
return new LatLonPointDVAtomicFieldData(reader, fieldName);
}
@Override

View File

@ -67,7 +67,7 @@ abstract class AtomicDoubleFieldData implements AtomicNumericFieldData {
@Override
public SortedNumericDoubleValues getDoubleValues() {
return FieldData.emptySortedNumericDoubles(maxDoc);
return FieldData.emptySortedNumericDoubles();
}
@Override

View File

@ -23,7 +23,6 @@ import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Bits;
import org.elasticsearch.index.fielddata.AtomicFieldData;
import org.elasticsearch.index.fielddata.FieldData;
import org.elasticsearch.index.fielddata.ScriptDocValues;
@ -49,8 +48,7 @@ public class BinaryDVAtomicFieldData implements AtomicFieldData {
public SortedBinaryDocValues getBytesValues() {
try {
final BinaryDocValues values = DocValues.getBinary(reader, field);
final Bits docsWithField = DocValues.getDocsWithField(reader, field);
return FieldData.singleton(values, docsWithField);
return FieldData.singleton(values);
} catch (IOException e) {
throw new IllegalStateException("Cannot load doc values", e);
}

Some files were not shown because too many files have changed in this diff Show More