Upgrade to latest Lucene snapshot (#33505)

* LeafCollector.setScorer() now takes a Scorable
* Scorers may not have null Weights
* IndexWriter.getFlushingBytes() reports how much memory is being used by IW threads writing to disk
This commit is contained in:
Alan Woodward 2018-09-10 20:51:55 +01:00 committed by GitHub
parent 5f4244755e
commit 39c3234c2f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
88 changed files with 207 additions and 279 deletions

View File

@ -1,5 +1,5 @@
elasticsearch = 7.0.0-alpha1
lucene = 8.0.0-snapshot-4d78db26be
lucene = 8.0.0-snapshot-66c671ea80
# optional dependencies
spatial4j = 0.7

View File

@ -1 +0,0 @@
5f469e925dde5dff81b9d56f465a8babb56cd26b

View File

@ -0,0 +1 @@
58b9db095c569b4c4da491810f14e1429878b594

View File

@ -26,7 +26,7 @@ import org.apache.lucene.expressions.js.VariableContext;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.DoubleConstValueSource;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Scorable;
import org.apache.lucene.search.SortField;
import org.elasticsearch.SpecialPermission;
import org.elasticsearch.common.Nullable;
@ -336,7 +336,7 @@ public class ExpressionScriptEngine extends AbstractComponent implements ScriptE
}
@Override
public void setScorer(Scorer scorer) {
public void setScorer(Scorable scorer) {
script.setScorer(scorer);
}

View File

@ -19,39 +19,25 @@
package org.elasticsearch.painless;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Scorable;
import java.io.IOException;
import java.util.Collections;
public class ScoreTests extends ScriptTestCase {
/** Most of a dummy scorer impl that requires overriding just score(). */
abstract class MockScorer extends Scorer {
MockScorer() {
super(null);
}
abstract class MockScorer extends Scorable {
@Override
public int docID() {
return 0;
}
@Override
public DocIdSetIterator iterator() {
throw new UnsupportedOperationException();
}
}
public void testScoreWorks() {
assertEquals(2.5, exec("_score", Collections.emptyMap(), Collections.emptyMap(),
new MockScorer() {
@Override
public float score() throws IOException {
return 2.5f;
}
@Override
public float getMaxScore(int upTo) throws IOException {
public float score() {
return 2.5f;
}
},
@ -62,14 +48,9 @@ public class ScoreTests extends ScriptTestCase {
assertEquals(3.5, exec("3.5", Collections.emptyMap(), Collections.emptyMap(),
new MockScorer() {
@Override
public float score() throws IOException {
public float score() {
throw new AssertionError("score() should not be called");
}
@Override
public float getMaxScore(int upTo) throws IOException {
return Float.MAX_VALUE;
}
},
true));
}
@ -79,17 +60,12 @@ public class ScoreTests extends ScriptTestCase {
new MockScorer() {
private boolean used = false;
@Override
public float score() throws IOException {
public float score() {
if (used == false) {
return 4.5f;
}
throw new AssertionError("score() should not be called twice");
}
@Override
public float getMaxScore(int upTo) throws IOException {
return 4.5f;
}
},
true));
}

View File

@ -20,7 +20,7 @@
package org.elasticsearch.painless;
import junit.framework.AssertionFailedError;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Scorable;
import org.elasticsearch.common.lucene.ScorerAware;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.painless.antlr.Walker;
@ -91,7 +91,7 @@ public abstract class ScriptTestCase extends ESTestCase {
}
/** Compiles and returns the result of {@code script} with access to {@code vars} and compile-time parameters */
public Object exec(String script, Map<String, Object> vars, Map<String,String> compileParams, Scorer scorer, boolean picky) {
public Object exec(String script, Map<String, Object> vars, Map<String,String> compileParams, Scorable scorer, boolean picky) {
// test for ambiguity errors before running the actual script if picky is true
if (picky) {
ScriptClassInfo scriptClassInfo = new ScriptClassInfo(PAINLESS_LOOKUP, GenericElasticsearchScript.class);

View File

@ -19,13 +19,11 @@
package org.elasticsearch.painless;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Scorable;
import org.elasticsearch.painless.spi.Whitelist;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptedMetricAggContexts;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
@ -66,20 +64,12 @@ public class ScriptedMetricAggContextsTests extends ScriptTestCase {
Map<String, Object> params = new HashMap<>();
Map<String, Object> state = new HashMap<>();
Scorer scorer = new Scorer(null) {
Scorable scorer = new Scorable() {
@Override
public int docID() { return 0; }
@Override
public float score() { return 0.5f; }
@Override
public DocIdSetIterator iterator() { return null; }
@Override
public float getMaxScore(int upTo) throws IOException {
return 0.5f;
}
};
ScriptedMetricAggContexts.MapScript.LeafFactory leafFactory = factory.newFactory(params, state, null);

View File

@ -21,9 +21,9 @@ package org.elasticsearch.join.aggregations;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.search.ConstantScoreScorer;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorable;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
@ -148,7 +148,17 @@ public class ParentToChildrenAggregator extends BucketsAggregator implements Sin
final SortedSetDocValues globalOrdinals = valuesSource.globalOrdinalsValues(ctx);
// Set the scorer, since we now replay only the child docIds
sub.setScorer(new ConstantScoreScorer(null, 1f, childDocsIter));
sub.setScorer(new Scorable() {
@Override
public float score() {
return 1f;
}
@Override
public int docID() {
return childDocsIter.docID();
}
});
final Bits liveDocs = ctx.reader().getLiveDocs();
for (int docId = childDocsIter

View File

@ -1 +0,0 @@
97a3758487272ba4d15720b0ca15b0f980310c89

View File

@ -0,0 +1 @@
f009ee188453aabae77fad55aea08bc60323bb3e

View File

@ -1 +0,0 @@
12ed739794cd317754684308ddc5bdbdcc46cdde

View File

@ -0,0 +1 @@
af3d2ae975e3560c1ea69222d6c46072857952ba

View File

@ -1 +0,0 @@
4da6e5c17a17f0a9a99b518ea9985ea06996b63b

View File

@ -0,0 +1 @@
f17bc5e532d9dc2786a13bd577df64023d1baae1

View File

@ -1 +0,0 @@
a36b2db18a2a22966ab0bf9fced775f22dd7029d

View File

@ -0,0 +1 @@
7ad89d33c1cd960c91afa05b22024137fe108567

View File

@ -1 +0,0 @@
5f1d360a47d2fd166e970d17c46b284830e64258

View File

@ -0,0 +1 @@
3f11fb254256d74e911b953994b47e7a95915954

View File

@ -1 +0,0 @@
b07883b5e988d1d991503aa49d9b59059518825d

View File

@ -0,0 +1 @@
b2348d140ef0c3e674cb81173f61c5e5f430facb

View File

@ -1 +0,0 @@
1b46b3ee62932de7ba7b670820a13eb973ec5777

View File

@ -0,0 +1 @@
485a0c3be58a5942b4a28639f1019181ef4cd0e3

View File

@ -1 +0,0 @@
fa8e0fbef3e3fcf49ace4a4153580070def770eb

View File

@ -0,0 +1 @@
a22f1c6749ca4a3fbc9b330161a8ea3301cac8de

View File

@ -1 +0,0 @@
3d636541581e338a1be7e3e176aac73d7ae0b323

View File

@ -0,0 +1 @@
41ce415b93d75662cc2e790d09120bc0234d6b1b

View File

@ -1 +0,0 @@
126faacb28d1b8cc1ab81d702973d057892120d1

View File

@ -0,0 +1 @@
06c1e4fa838807059d27aaf5405cfdfe7303369c

View File

@ -1 +0,0 @@
abd514ec02837f48b8c478287fde7cc5d6439ada

View File

@ -0,0 +1 @@
5b0a019a938deb58160647e7640b348bb99c10a8

View File

@ -1 +0,0 @@
778e87a263184b8ddcbb4ef9d244467933f32993

View File

@ -0,0 +1 @@
4d813f3ba0ddd56bac728edb88ed8875e6acfd18

View File

@ -1 +0,0 @@
96aff29ad966204c73f8dd98d8116f09e34b6ebd

View File

@ -0,0 +1 @@
00c7e20b6a35ebecc875dd52bfb324967c5555d6

View File

@ -1 +0,0 @@
e72e2accebb1277c57dfe21bc011195eed91dbfd

View File

@ -0,0 +1 @@
e4dbff54a0befdc7d67c0f39890586c220df718e

View File

@ -1 +0,0 @@
bf25587ebf6823781f5d7acffd7d65c46c21cb27

View File

@ -0,0 +1 @@
74d17f6bdf1fa4d499f02904432aa3b1024bde88

View File

@ -1 +0,0 @@
6cad42923bcb6e1c6060ae1cbab574646e8c808e

View File

@ -0,0 +1 @@
bec78be38f777765146c35f65e247909563d6814

View File

@ -1 +0,0 @@
e5841d7e877e51bbd2d325709353f5ab7e94b49a

View File

@ -0,0 +1 @@
74b76f8fed44400bc2a5d938ca2611a97b4d7a7c

View File

@ -1 +0,0 @@
fefe17f6ac0c7d505c5051e96d0f4916fec2bf9e

View File

@ -0,0 +1 @@
2f65fa728b3bc924db6538f4c3caf2fcd25451cf

View File

@ -1 +0,0 @@
22b0a9d9fb675f7c82a7a2b18f593f3278b40f11

View File

@ -0,0 +1 @@
916a91f0cab2d3684707c59e9adca7b3030b2c66

View File

@ -1 +0,0 @@
bd6449cc67a36891f6b3201489c5ed44d795fab0

View File

@ -0,0 +1 @@
eb3e630d6013e41838fb277943ce921f256f1c61

View File

@ -1 +0,0 @@
5e2a8b3e9e19ad61fcbd27a138cf55f2d6cbfb2d

View File

@ -0,0 +1 @@
fa10ff14eab2f579cff2f0fa33c9c7f3b24daf12

View File

@ -1 +0,0 @@
bd5931d1d5ca3f84565534182881565a44aeb72a

View File

@ -0,0 +1 @@
3dd65ca6612b4f98530847b99ab348fd83055fdf

View File

@ -19,9 +19,9 @@
package org.apache.lucene.search.grouping;
import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.Scorable;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TotalHits;
@ -44,7 +44,7 @@ public final class CollapsingTopDocsCollector<T> extends FirstPassGroupingCollec
protected final String collapseField;
protected final Sort sort;
protected Scorer scorer;
protected Scorable scorer;
private int totalHitCount;
@ -102,7 +102,7 @@ public final class CollapsingTopDocsCollector<T> extends FirstPassGroupingCollec
}
@Override
public void setScorer(Scorer scorer) throws IOException {
public void setScorer(Scorable scorer) throws IOException {
super.setScorer(scorer);
this.scorer = scorer;
}

View File

@ -19,8 +19,8 @@
package org.elasticsearch.action.search;
import org.apache.lucene.search.Scorable;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.SimpleCollector;
import java.io.IOException;
@ -30,12 +30,12 @@ import java.io.IOException;
*/
public class MaxScoreCollector extends SimpleCollector {
private Scorer scorer;
private Scorable scorer;
private float maxScore = Float.NEGATIVE_INFINITY;
private boolean hasHits = false;
@Override
public void setScorer(Scorer scorer) {
public void setScorer(Scorable scorer) {
this.scorer = scorer;
}

View File

@ -745,31 +745,6 @@ public class Lucene {
}
}
/**
* Return a Scorer that throws an ElasticsearchIllegalStateException
* on all operations with the given message.
*/
public static Scorer illegalScorer(final String message) {
return new Scorer(null) {
@Override
public float score() throws IOException {
throw new IllegalStateException(message);
}
@Override
public int docID() {
throw new IllegalStateException(message);
}
@Override
public DocIdSetIterator iterator() {
throw new IllegalStateException(message);
}
@Override
public float getMaxScore(int upTo) throws IOException {
throw new IllegalStateException(message);
}
};
}
private static final class CommitPoint extends IndexCommit {
private String segmentsFileName;
private final Collection<String> files;

View File

@ -22,9 +22,9 @@ package org.elasticsearch.common.lucene;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.search.Scorable;
import org.apache.lucene.search.ScoreCachingWrappingScorer;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.SimpleCollector;
import java.io.IOException;
@ -34,7 +34,7 @@ public class MinimumScoreCollector extends SimpleCollector {
private final Collector collector;
private final float minimumScore;
private Scorer scorer;
private Scorable scorer;
private LeafCollector leafCollector;
public MinimumScoreCollector(Collector collector, float minimumScore) {
@ -43,7 +43,7 @@ public class MinimumScoreCollector extends SimpleCollector {
}
@Override
public void setScorer(Scorer scorer) throws IOException {
public void setScorer(Scorable scorer) throws IOException {
if (!(scorer instanceof ScoreCachingWrappingScorer)) {
scorer = new ScoreCachingWrappingScorer(scorer);
}

View File

@ -18,10 +18,10 @@
*/
package org.elasticsearch.common.lucene;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Scorable;
public interface ScorerAware {
void setScorer(Scorer scorer);
void setScorer(Scorable scorer);
}

View File

@ -19,14 +19,13 @@
package org.elasticsearch.common.lucene.search.function;
import java.io.IOException;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.ScoreCachingWrappingScorer;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.TwoPhaseIterator;
import org.apache.lucene.search.Weight;
import java.io.IOException;
/** A {@link Scorer} that filters out documents that have a score that is
* lower than a configured constant. */
final class MinScoreScorer extends Scorer {
@ -34,13 +33,10 @@ final class MinScoreScorer extends Scorer {
private final Scorer in;
private final float minScore;
private float curScore;
MinScoreScorer(Weight weight, Scorer scorer, float minScore) {
super(weight);
if (scorer instanceof ScoreCachingWrappingScorer == false) {
// when minScore is set, scores might be requested twice: once
// to verify the match, and once by the collector
scorer = new ScoreCachingWrappingScorer(scorer);
}
this.in = scorer;
this.minScore = minScore;
}
@ -55,8 +51,8 @@ final class MinScoreScorer extends Scorer {
}
@Override
public float score() throws IOException {
return in.score();
public float score() {
return curScore;
}
@Override
@ -87,7 +83,8 @@ final class MinScoreScorer extends Scorer {
if (inTwoPhase != null && inTwoPhase.matches() == false) {
return false;
}
return in.score() >= minScore;
curScore = in.score();
return curScore >= minScore;
}
@Override

View File

@ -20,9 +20,8 @@
package org.elasticsearch.common.lucene.search.function;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Scorable;
import org.elasticsearch.script.ExplainableSearchScript;
import org.elasticsearch.script.ScoreScript;
import org.elasticsearch.script.Script;
@ -32,33 +31,19 @@ import java.util.Objects;
public class ScriptScoreFunction extends ScoreFunction {
static final class CannedScorer extends Scorer {
static final class CannedScorer extends Scorable {
protected int docid;
protected float score;
CannedScorer() {
super(null);
}
@Override
public int docID() {
return docid;
}
@Override
public float score() throws IOException {
public float score() {
return score;
}
@Override
public DocIdSetIterator iterator() {
throw new UnsupportedOperationException();
}
@Override
public float getMaxScore(int upTo) throws IOException {
throw new UnsupportedOperationException();
}
}
private final Script sScript;

View File

@ -25,7 +25,7 @@ import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.FieldComparator;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Scorable;
import org.apache.lucene.search.SortField;
import org.apache.lucene.util.BitSet;
import org.apache.lucene.util.BytesRef;
@ -71,7 +71,7 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat
return indexFieldData.load(context).getBytesValues();
}
protected void setScorer(Scorer scorer) {}
protected void setScorer(Scorable scorer) {}
@Override
public FieldComparator<?> newComparator(String fieldname, int numHits, int sortPos, boolean reversed) {
@ -101,7 +101,7 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat
}
@Override
public void setScorer(Scorer scorer) {
public void setScorer(Scorable scorer) {
BytesRefFieldComparatorSource.this.setScorer(scorer);
}
@ -125,7 +125,7 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat
}
@Override
public void setScorer(Scorer scorer) {
public void setScorer(Scorable scorer) {
BytesRefFieldComparatorSource.this.setScorer(scorer);
}

View File

@ -23,7 +23,7 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.FieldComparator;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Scorable;
import org.apache.lucene.search.SortField;
import org.apache.lucene.util.BitSet;
import org.elasticsearch.common.Nullable;
@ -57,7 +57,7 @@ public class DoubleValuesComparatorSource extends IndexFieldData.XFieldComparato
return indexFieldData.load(context).getDoubleValues();
}
protected void setScorer(Scorer scorer) {}
protected void setScorer(Scorable scorer) {}
@Override
public FieldComparator<?> newComparator(String fieldname, int numHits, int sortPos, boolean reversed) {
@ -81,7 +81,7 @@ public class DoubleValuesComparatorSource extends IndexFieldData.XFieldComparato
return selectedValues.getRawDoubleValues();
}
@Override
public void setScorer(Scorer scorer) {
public void setScorer(Scorable scorer) {
DoubleValuesComparatorSource.this.setScorer(scorer);
}
};

View File

@ -19,7 +19,7 @@
package org.elasticsearch.script;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Scorable;
import org.elasticsearch.search.lookup.DocLookup;
import java.io.IOException;
@ -32,9 +32,9 @@ import java.io.IOException;
*/
public final class ScoreAccessor extends Number implements Comparable<Number> {
Scorer scorer;
Scorable scorer;
public ScoreAccessor(Scorer scorer) {
public ScoreAccessor(Scorable scorer) {
this.scorer = scorer;
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.script;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Scorable;
import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.search.lookup.LeafSearchLookup;
import org.elasticsearch.search.lookup.SearchLookup;
@ -33,40 +33,40 @@ import java.util.function.DoubleSupplier;
* A script used for adjusting the score on a per document basis.
*/
public abstract class ScoreScript {
public static final String[] PARAMETERS = new String[]{};
/** The generic runtime parameters for the script. */
private final Map<String, Object> params;
/** A leaf lookup for the bound segment this script will operate on. */
private final LeafSearchLookup leafLookup;
private DoubleSupplier scoreSupplier = () -> 0.0;
public ScoreScript(Map<String, Object> params, SearchLookup lookup, LeafReaderContext leafContext) {
this.params = params;
this.leafLookup = lookup.getLeafSearchLookup(leafContext);
}
public abstract double execute();
/** Return the parameters for this script. */
public Map<String, Object> getParams() {
return params;
}
/** The doc lookup for the Lucene segment this script was created for. */
public final Map<String, ScriptDocValues<?>> getDoc() {
return leafLookup.doc();
}
/** Set the current document to run the script on next. */
public void setDocument(int docid) {
leafLookup.setDocument(docid);
}
public void setScorer(Scorer scorer) {
public void setScorer(Scorable scorer) {
this.scoreSupplier = () -> {
try {
return scorer.score();
@ -75,28 +75,28 @@ public abstract class ScoreScript {
}
};
}
public double get_score() {
return scoreSupplier.getAsDouble();
}
/** A factory to construct {@link ScoreScript} instances. */
public interface LeafFactory {
/**
* Return {@code true} if the script needs {@code _score} calculated, or {@code false} otherwise.
*/
boolean needs_score();
ScoreScript newInstance(LeafReaderContext ctx) throws IOException;
}
/** A factory to construct stateful {@link ScoreScript} factories for a specific index. */
public interface Factory {
ScoreScript.LeafFactory newFactory(Map<String, Object> params, SearchLookup lookup);
}
public static final ScriptContext<ScoreScript.Factory> CONTEXT = new ScriptContext<>("score", ScoreScript.Factory.class);
}

View File

@ -20,7 +20,7 @@
package org.elasticsearch.script;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Scorable;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.search.lookup.LeafSearchLookup;
@ -66,7 +66,7 @@ public class ScriptedMetricAggContexts {
public abstract static class MapScript extends ParamsAndStateBase {
private final LeafSearchLookup leafLookup;
private Scorer scorer;
private Scorable scorer;
public MapScript(Map<String, Object> params, Map<String, Object> state, SearchLookup lookup, LeafReaderContext leafContext) {
super(params, state);
@ -86,7 +86,7 @@ public class ScriptedMetricAggContexts {
}
}
public void setScorer(Scorer scorer) {
public void setScorer(Scorable scorer) {
this.scorer = scorer;
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.script;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Scorable;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.lucene.ScorerAware;
import org.elasticsearch.search.lookup.LeafDocLookup;
@ -50,7 +50,7 @@ public abstract class SearchScript implements ScorerAware, ExecutableScript {
private final LeafSearchLookup leafLookup;
/** A scorer that will return the score for the current document when the script is run. */
private Scorer scorer;
private Scorable scorer;
public SearchScript(Map<String, Object> params, SearchLookup lookup, LeafReaderContext leafContext) {
this.params = params;
@ -83,7 +83,7 @@ public abstract class SearchScript implements ScorerAware, ExecutableScript {
}
@Override
public void setScorer(Scorer scorer) {
public void setScorer(Scorable scorer) {
this.scorer = scorer;
}

View File

@ -20,8 +20,8 @@
package org.elasticsearch.search.aggregations;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.Scorable;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.search.Scorer;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.ObjectArray;
@ -110,10 +110,10 @@ public abstract class AggregatorFactory<AF extends AggregatorFactory<AF>> {
collectors.set(i, null);
}
return new LeafBucketCollector() {
Scorer scorer;
Scorable scorer;
@Override
public void setScorer(Scorer scorer) throws IOException {
public void setScorer(Scorable scorer) throws IOException {
this.scorer = scorer;
}

View File

@ -20,7 +20,7 @@
package org.elasticsearch.search.aggregations;
import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Scorable;
import java.io.IOException;
import java.util.stream.Stream;
@ -33,7 +33,7 @@ public abstract class LeafBucketCollector implements LeafCollector {
public static final LeafBucketCollector NO_OP_COLLECTOR = new LeafBucketCollector() {
@Override
public void setScorer(Scorer arg0) throws IOException {
public void setScorer(Scorable arg0) throws IOException {
// no-op
}
@Override
@ -55,7 +55,7 @@ public abstract class LeafBucketCollector implements LeafCollector {
return new LeafBucketCollector() {
@Override
public void setScorer(Scorer s) throws IOException {
public void setScorer(Scorable s) throws IOException {
for (LeafBucketCollector c : colls) {
c.setScorer(s);
}
@ -83,7 +83,7 @@ public abstract class LeafBucketCollector implements LeafCollector {
}
@Override
public void setScorer(Scorer scorer) throws IOException {
public void setScorer(Scorable scorer) throws IOException {
// no-op by default
}
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.search.aggregations;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Scorable;
import org.elasticsearch.common.lucene.ScorerAware;
import java.io.IOException;
@ -48,7 +48,7 @@ public class LeafBucketCollectorBase extends LeafBucketCollector {
}
@Override
public void setScorer(Scorer s) throws IOException {
public void setScorer(Scorable s) throws IOException {
sub.setScorer(s);
if (values != null) {
values.setScorer(s);

View File

@ -24,9 +24,9 @@ import org.apache.lucene.search.CollectionTerminatedException;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.search.MultiCollector;
import org.apache.lucene.search.Scorable;
import org.apache.lucene.search.ScoreCachingWrappingScorer;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.search.Scorer;
import java.io.IOException;
import java.util.ArrayList;
@ -174,7 +174,7 @@ public class MultiBucketCollector extends BucketCollector {
}
@Override
public void setScorer(Scorer scorer) throws IOException {
public void setScorer(Scorable scorer) throws IOException {
if (cacheScores) {
scorer = new ScoreCachingWrappingScorer(scorer);
}

View File

@ -19,11 +19,10 @@
package org.elasticsearch.search.aggregations.bucket.sampler;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.search.Scorable;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TopDocsCollector;
import org.apache.lucene.search.TopScoreDocCollector;
@ -89,7 +88,7 @@ public class BestDocsDeferringCollector extends DeferringBucketCollector impleme
// Deferring collector
return new LeafBucketCollector() {
@Override
public void setScorer(Scorer scorer) throws IOException {
public void setScorer(Scorable scorer) throws IOException {
perSegCollector.setScorer(scorer);
}
@ -156,7 +155,7 @@ public class BestDocsDeferringCollector extends DeferringBucketCollector impleme
private long parentBucket;
private int matchedDocs;
PerParentBucketSamples(long parentBucket, Scorer scorer, LeafReaderContext readerContext) {
PerParentBucketSamples(long parentBucket, Scorable scorer, LeafReaderContext readerContext) {
try {
this.parentBucket = parentBucket;
tdc = createTopDocsCollector(shardSize);
@ -185,7 +184,7 @@ public class BestDocsDeferringCollector extends DeferringBucketCollector impleme
currentLeafCollector.collect(doc);
}
public void setScorer(Scorer scorer) throws IOException {
public void setScorer(Scorable scorer) throws IOException {
currentLeafCollector.setScorer(scorer);
}
@ -198,19 +197,18 @@ public class BestDocsDeferringCollector extends DeferringBucketCollector impleme
}
}
class PerSegmentCollects extends Scorer {
class PerSegmentCollects extends Scorable {
private LeafReaderContext readerContext;
int maxDocId = Integer.MIN_VALUE;
private float currentScore;
private int currentDocId = -1;
private Scorer currentScorer;
private Scorable currentScorer;
PerSegmentCollects(LeafReaderContext readerContext) throws IOException {
// The publisher behaviour for Reader/Scorer listeners triggers a
// call to this constructor with a null scorer so we can't call
// scorer.getWeight() and pass the Weight to our base class.
// However, passing null seems to have no adverse effects here...
super(null);
this.readerContext = readerContext;
for (int i = 0; i < perBucketSamples.size(); i++) {
PerParentBucketSamples perBucketSample = perBucketSamples.get(i);
@ -221,7 +219,7 @@ public class BestDocsDeferringCollector extends DeferringBucketCollector impleme
}
}
public void setScorer(Scorer scorer) throws IOException {
public void setScorer(Scorable scorer) throws IOException {
this.currentScorer = scorer;
for (int i = 0; i < perBucketSamples.size(); i++) {
PerParentBucketSamples perBucketSample = perBucketSamples.get(i);
@ -266,11 +264,6 @@ public class BestDocsDeferringCollector extends DeferringBucketCollector impleme
return currentDocId;
}
@Override
public DocIdSetIterator iterator() {
throw new ElasticsearchException("This caching scorer implementation only implements score() and docID()");
}
public void collect(int docId, long parentBucket) throws IOException {
perBucketSamples = bigArrays.grow(perBucketSamples, parentBucket + 1);
PerParentBucketSamples sampler = perBucketSamples.get((int) parentBucket);
@ -282,10 +275,6 @@ public class BestDocsDeferringCollector extends DeferringBucketCollector impleme
maxDocId = Math.max(maxDocId, docId);
}
@Override
public float getMaxScore(int upTo) throws IOException {
return Float.MAX_VALUE;
}
}
public int getDocCount(long parentBucket) {

View File

@ -20,11 +20,11 @@
package org.elasticsearch.search.aggregations.metrics;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.Scorable;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.search.Scorer;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.script.ScriptedMetricAggContexts;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptedMetricAggContexts;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
@ -70,7 +70,7 @@ class ScriptedMetricAggregator extends MetricsAggregator {
final ScriptedMetricAggContexts.MapScript leafMapScript = mapScript.newInstance(ctx);
return new LeafBucketCollectorBase(sub, leafMapScript) {
@Override
public void setScorer(Scorer scorer) throws IOException {
public void setScorer(Scorable scorer) throws IOException {
leafMapScript.setScorer(scorer);
}

View File

@ -21,15 +21,14 @@ package org.elasticsearch.search.aggregations.metrics;
import com.carrotsearch.hppc.LongObjectHashMap;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.search.MultiCollector;
import org.apache.lucene.search.Scorable;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TopDocsCollector;
import org.apache.lucene.search.TopFieldCollector;
@ -106,10 +105,10 @@ class TopHitsAggregator extends MetricsAggregator {
final LongObjectHashMap<LeafCollector> leafCollectors = new LongObjectHashMap<>(1);
return new LeafBucketCollectorBase(sub, null) {
Scorer scorer;
Scorable scorer;
@Override
public void setScorer(Scorer scorer) throws IOException {
public void setScorer(Scorable scorer) throws IOException {
this.scorer = scorer;
super.setScorer(scorer);
for (ObjectCursor<LeafCollector> cursor : leafCollectors.values()) {

View File

@ -26,7 +26,7 @@ import org.apache.lucene.index.OrdinalMap;
import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Scorable;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.lucene.ScorerAware;
import org.elasticsearch.common.util.CollectionUtils;
@ -295,7 +295,7 @@ public abstract class ValuesSource {
}
@Override
public void setScorer(Scorer scorer) {
public void setScorer(Scorable scorer) {
script.setScorer(scorer);
}
@ -326,7 +326,7 @@ public abstract class ValuesSource {
}
@Override
public void setScorer(Scorer scorer) {
public void setScorer(Scorable scorer) {
script.setScorer(scorer);
}
@ -445,7 +445,7 @@ public abstract class ValuesSource {
}
@Override
public void setScorer(Scorer scorer) {
public void setScorer(Scorable scorer) {
script.setScorer(scorer);
}

View File

@ -18,7 +18,7 @@
*/
package org.elasticsearch.search.aggregations.support.values;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Scorable;
import org.elasticsearch.common.lucene.ScorerAware;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
@ -85,7 +85,7 @@ public class ScriptBytesValues extends SortingBinaryDocValues implements ScorerA
}
@Override
public void setScorer(Scorer scorer) {
public void setScorer(Scorable scorer) {
script.setScorer(scorer);
}
}

View File

@ -18,7 +18,7 @@
*/
package org.elasticsearch.search.aggregations.support.values;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Scorable;
import org.elasticsearch.common.lucene.ScorerAware;
import org.elasticsearch.index.fielddata.SortingNumericDoubleValues;
import org.elasticsearch.script.SearchScript;
@ -107,7 +107,7 @@ public class ScriptDoubleValues extends SortingNumericDoubleValues implements Sc
}
@Override
public void setScorer(Scorer scorer) {
public void setScorer(Scorable scorer) {
script.setScorer(scorer);
}
}

View File

@ -18,7 +18,7 @@
*/
package org.elasticsearch.search.aggregations.support.values;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Scorable;
import org.apache.lucene.util.LongValues;
import org.elasticsearch.common.lucene.ScorerAware;
import org.elasticsearch.index.fielddata.AbstractSortingNumericDocValues;
@ -106,7 +106,7 @@ public class ScriptLongValues extends AbstractSortingNumericDocValues implements
}
@Override
public void setScorer(Scorer scorer) {
public void setScorer(Scorable scorer) {
script.setScorer(scorer);
}
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.search.profile.aggregation;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Scorable;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.profile.Timer;
@ -46,7 +46,7 @@ public class ProfilingLeafBucketCollector extends LeafBucketCollector {
}
@Override
public void setScorer(Scorer scorer) throws IOException {
public void setScorer(Scorable scorer) throws IOException {
delegate.setScorer(scorer);
}

View File

@ -24,8 +24,8 @@ import org.apache.lucene.search.Collector;
import org.apache.lucene.search.FilterCollector;
import org.apache.lucene.search.FilterLeafCollector;
import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.search.Scorable;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.search.Scorer;
import java.io.IOException;
@ -76,7 +76,7 @@ final class ProfileCollector extends FilterCollector {
}
@Override
public void setScorer(Scorer scorer) throws IOException {
public void setScorer(Scorable scorer) throws IOException {
final long start = System.nanoTime();
try {
super.setScorer(scorer);

View File

@ -71,7 +71,7 @@ final class ProfileScorer extends Scorer {
}
@Override
public Collection<ChildScorer> getChildren() throws IOException {
public Collection<ChildScorable> getChildren() throws IOException {
return scorer.getChildren();
}

View File

@ -21,7 +21,7 @@ package org.elasticsearch.search.sort;
import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Scorable;
import org.apache.lucene.search.SortField;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
@ -351,7 +351,7 @@ public class ScriptSortBuilder extends SortBuilder<ScriptSortBuilder> {
return FieldData.singleton(values);
}
@Override
protected void setScorer(Scorer scorer) {
protected void setScorer(Scorable scorer) {
leafScript.setScorer(scorer);
}
};
@ -376,7 +376,7 @@ public class ScriptSortBuilder extends SortBuilder<ScriptSortBuilder> {
return FieldData.singleton(values);
}
@Override
protected void setScorer(Scorer scorer) {
protected void setScorer(Scorable scorer) {
leafScript.setScorer(scorer);
}
};

View File

@ -19,9 +19,14 @@
package org.elasticsearch.common.lucene.search.function;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.TwoPhaseIterator;
import org.apache.lucene.search.Weight;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
@ -36,7 +41,7 @@ public class MinScoreScorerTests extends LuceneTestCase {
return new DocIdSetIterator() {
int i = -1;
@Override
public int nextDoc() throws IOException {
if (i + 1 == docs.length) {
@ -45,17 +50,17 @@ public class MinScoreScorerTests extends LuceneTestCase {
return docs[++i];
}
}
@Override
public int docID() {
return i < 0 ? -1 : i == docs.length ? NO_MORE_DOCS : docs[i];
}
@Override
public long cost() {
return docs.length;
}
@Override
public int advance(int target) throws IOException {
return slowAdvance(target);
@ -63,9 +68,36 @@ public class MinScoreScorerTests extends LuceneTestCase {
};
}
private static Weight fakeWeight() {
return new Weight(new MatchAllDocsQuery()) {
@Override
public void extractTerms(Set<Term> terms) {
}
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
return null;
}
@Override
public Scorer scorer(LeafReaderContext context) throws IOException {
return null;
}
@Override
public boolean isCacheable(LeafReaderContext ctx) {
return false;
}
};
}
private static Scorer scorer(int maxDoc, final int[] docs, final float[] scores, final boolean twoPhase) {
final DocIdSetIterator iterator = twoPhase ? DocIdSetIterator.all(maxDoc) : iterator(docs);
return new Scorer(null) {
return new Scorer(fakeWeight()) {
int lastScoredDoc = -1;
public DocIdSetIterator iterator() {
if (twoPhase) {
return TwoPhaseIterator.asDocIdSetIterator(twoPhaseIterator());
@ -77,12 +109,12 @@ public class MinScoreScorerTests extends LuceneTestCase {
public TwoPhaseIterator twoPhaseIterator() {
if (twoPhase) {
return new TwoPhaseIterator(iterator) {
@Override
public boolean matches() throws IOException {
return Arrays.binarySearch(docs, iterator.docID()) >= 0;
}
@Override
public float matchCost() {
return 10;
@ -100,6 +132,8 @@ public class MinScoreScorerTests extends LuceneTestCase {
@Override
public float score() throws IOException {
assertNotEquals("score() called twice on doc " + docID(), lastScoredDoc, docID());
lastScoredDoc = docID();
final int idx = Arrays.binarySearch(docs, docID());
return scores[idx];
}
@ -130,7 +164,7 @@ public class MinScoreScorerTests extends LuceneTestCase {
}
Scorer scorer = scorer(maxDoc, docs, scores, twoPhase);
final float minScore = random().nextFloat();
Scorer minScoreScorer = new MinScoreScorer(null, scorer, minScore);
Scorer minScoreScorer = new MinScoreScorer(fakeWeight(), scorer, minScore);
int doc = -1;
while (doc != DocIdSetIterator.NO_MORE_DOCS) {
final int target;
@ -152,7 +186,7 @@ public class MinScoreScorerTests extends LuceneTestCase {
assertEquals(DocIdSetIterator.NO_MORE_DOCS, doc);
} else {
assertEquals(docs[idx], doc);
assertEquals(scores[idx], scorer.score(), 0f);
assertEquals(scores[idx], minScoreScorer.score(), 0f);
}
}
}

View File

@ -24,19 +24,16 @@ import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.search.CollectionTerminatedException;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Scorable;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.store.Directory;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
@ -44,14 +41,10 @@ import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
public class MultiBucketCollectorTests extends ESTestCase {
private static class FakeScorer extends Scorer {
private static class ScoreAndDoc extends Scorable {
float score;
int doc = -1;
FakeScorer() {
super(null);
}
@Override
public int docID() {
return doc;
@ -61,26 +54,6 @@ public class MultiBucketCollectorTests extends ESTestCase {
public float score() {
return score;
}
@Override
public DocIdSetIterator iterator() {
throw new UnsupportedOperationException();
}
@Override
public float getMaxScore(int upTo) throws IOException {
return Float.MAX_VALUE;
}
@Override
public Weight getWeight() {
throw new UnsupportedOperationException();
}
@Override
public Collection<ChildScorer> getChildren() {
throw new UnsupportedOperationException();
}
}
private static class TerminateAfterBucketCollector extends BucketCollector {
@ -171,7 +144,7 @@ public class MultiBucketCollectorTests extends ESTestCase {
final LeafBucketCollector leafCollector = in.getLeafCollector(context);
return new LeafBucketCollectorBase(leafCollector, null) {
@Override
public void setScorer(Scorer scorer) throws IOException {
public void setScorer(Scorable scorer) throws IOException {
super.setScorer(scorer);
setScorerCalled.set(true);
}
@ -235,7 +208,7 @@ public class MultiBucketCollectorTests extends ESTestCase {
collector1 = new TerminateAfterBucketCollector(collector1, 1);
collector2 = new TerminateAfterBucketCollector(collector2, 2);
Scorer scorer = new FakeScorer();
Scorable scorer = new ScoreAndDoc();
List<BucketCollector> collectors = Arrays.asList(collector1, collector2);
Collections.shuffle(collectors, random());

View File

@ -20,7 +20,7 @@
package org.elasticsearch.search.aggregations.support;
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Scorable;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.aggregations.support.values.ScriptBytesValues;
@ -59,7 +59,7 @@ public class ScriptValuesTests extends ESTestCase {
}
@Override
public void setScorer(Scorer scorer) {
public void setScorer(Scorable scorer) {
}
@Override

View File

@ -27,11 +27,11 @@ import org.apache.lucene.document.StringField;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.QueryUtils;
import org.apache.lucene.search.Scorable;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.NumericUtils;
@ -99,7 +99,7 @@ public class DocValuesSliceQueryTests extends ESTestCase {
public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException {
return new LeafCollector() {
@Override
public void setScorer(Scorer scorer) throws IOException {
public void setScorer(Scorable scorer) throws IOException {
}
@Override

View File

@ -26,11 +26,11 @@ import org.apache.lucene.document.StringField;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.QueryUtils;
import org.apache.lucene.search.Scorable;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
@ -92,7 +92,7 @@ public class TermsSliceQueryTests extends ESTestCase {
public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException {
return new LeafCollector() {
@Override
public void setScorer(Scorer scorer) throws IOException {
public void setScorer(Scorable scorer) throws IOException {
}
@Override

View File

@ -20,7 +20,7 @@
package org.elasticsearch.script;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Scorable;
import org.elasticsearch.index.similarity.ScriptedSimilarity.Doc;
import org.elasticsearch.index.similarity.ScriptedSimilarity.Field;
import org.elasticsearch.index.similarity.ScriptedSimilarity.Query;
@ -334,7 +334,7 @@ public class MockScriptEngine implements ScriptEngine {
}
@Override
public void setScorer(Scorer scorer) {
public void setScorer(Scorable scorer) {
ctx.put("_score", new ScoreAccessor(scorer));
}
@ -553,7 +553,7 @@ public class MockScriptEngine implements ScriptEngine {
@Override
public ScoreScript newInstance(LeafReaderContext ctx) throws IOException {
Scorer[] scorerHolder = new Scorer[1];
Scorable[] scorerHolder = new Scorable[1];
return new ScoreScript(params, lookup, ctx) {
@Override
public double execute() {
@ -566,7 +566,7 @@ public class MockScriptEngine implements ScriptEngine {
}
@Override
public void setScorer(Scorer scorer) {
public void setScorer(Scorable scorer) {
scorerHolder[0] = scorer;
}
};

View File

@ -1 +0,0 @@
126faacb28d1b8cc1ab81d702973d057892120d1

View File

@ -0,0 +1 @@
06c1e4fa838807059d27aaf5405cfdfe7303369c