Upgrade to lucene 8.1.0-snapshot-e460356abe (#40952)

This commit is contained in:
Jim Ferenczi 2019-04-16 09:29:22 +02:00 committed by jimczi
parent 0777223bab
commit 4ca5649a0d
63 changed files with 69 additions and 295 deletions

View File

@ -1,5 +1,5 @@
elasticsearch = 7.3.0
lucene = 8.0.0
lucene = 8.1.0-snapshot-e460356abe
bundled_jdk = 12.0.1+12@69cfe15208a647278a19ef0990eea691

View File

@ -5,8 +5,8 @@ bare_version never includes -alpha or -beta
:bare_version: 7.3.0
:major-version: 7.x
:prev-major-version: 6.x
:lucene_version: 8.0.0
:lucene_version_path: 8_0_0
:lucene_version: 8.1.0
:lucene_version_path: 8_1_0
:branch: 7.x
:jdk: 1.8.0_131
:jdk_major: 8

View File

@ -47,9 +47,9 @@ public class WordDelimiterGraphTokenFilterFactoryTests
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_word_delimiter");
String source = "PowerShot 500-42 wi-fi wi-fi-4000 j2se O'Neil's";
String[] expected = new String[] { "PowerShot", "PowerShot", "Power", "Shot", "50042",
"500-42", "500", "42", "wifi", "wi-fi", "wi", "fi", "wifi4000", "wi-fi-4000", "wi",
"fi", "4000", "j2se", "j2se", "j", "2", "se", "ONeil", "O'Neil's", "O", "Neil" };
String[] expected = new String[] { "PowerShot", "PowerShot", "Power", "Shot", "500-42",
"50042", "500", "42", "wi-fi", "wifi", "wi", "fi", "wi-fi-4000", "wifi4000", "wi",
"fi", "4000", "j2se", "j2se", "j", "2", "se", "O'Neil's", "ONeil", "O", "Neil" };
Tokenizer tokenizer = new WhitespaceTokenizer();
tokenizer.setReader(new StringReader(source));
int[] expectedIncr = new int[] { 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0,

View File

@ -1 +0,0 @@
106b35cf2739f7d2350f3ef5c107d9d066d17cd6

View File

@ -0,0 +1 @@
0a1addebde14147501b7d24a581a7a7288bc585d

View File

@ -1 +0,0 @@
1bc195d1b036b59cdf7704f758df8f43c7e229c4

View File

@ -0,0 +1 @@
b8b7d744e6294706b379ec7fdd2d6f1b6badc95b

View File

@ -1 +0,0 @@
760d9451396c45fdb271750f4e52acc2ff5e7bb2

View File

@ -0,0 +1 @@
c9dcc5568ccd4589f4a6871d2019661546106c83

View File

@ -1 +0,0 @@
5bb5a241d95d9753a5e9fb341476306cb96a34a9

View File

@ -0,0 +1 @@
bef6d901a9c8b4c6139de7419b3024e0c9fd4ad3

View File

@ -1 +0,0 @@
e3384e1b5c1e5f39026d3d6e48e00df84f614911

View File

@ -0,0 +1 @@
074c06d6f2254edae04acdd53bf6989a4343acc8

View File

@ -1 +0,0 @@
3e413379d7e61eb43dee64ec5e756cbeb3478a05

View File

@ -0,0 +1 @@
5cd2a341ab4524ec7ff40ba29faa4ead5e805413

View File

@ -1 +0,0 @@
50d81559e2604da31ca7961581fda41257ab0600

View File

@ -0,0 +1 @@
ba55aba7d278f6201b4ebd6dafbc7edb6fe94f8c

View File

@ -1 +0,0 @@
a0b165cb156178a0a91baa4b8d2f4c37278d92e0

View File

@ -0,0 +1 @@
543d99fd2ba4302f3555792236350b201514d821

View File

@ -1 +0,0 @@
26fdada04adbb02164ef2d0f9abfa3b46ec30a0b

View File

@ -0,0 +1 @@
c20a8ae0c3bd769aa6c415ebea94ba466d9a631d

View File

@ -1 +0,0 @@
90bda2357016dc0f4582938b01f5ae1142089d5f

View File

@ -0,0 +1 @@
6e8921ab37facdcc5c4b71f2612d72300d6de217

View File

@ -1 +0,0 @@
407c555efb2d3253f51a676cc2089a5d29a3b7b7

View File

@ -0,0 +1 @@
3e85f77d8f8ed1db53dba387fbdec55a9f912639

View File

@ -1 +0,0 @@
a9004071d79e9f1eb5f2fe81c4b2b736d9d838bf

View File

@ -0,0 +1 @@
426a1822d888a6341f6bafccaad19e4a2ad88e25

View File

@ -1 +0,0 @@
95c55c400dcfd5e08da1bab4f33eb3b6a65b1d16

View File

@ -0,0 +1 @@
f83fa4b264198dfb12436a803309a60f5588481d

View File

@ -1 +0,0 @@
9364f8fd4fff476e619e84cb22f4cb0108039eda

View File

@ -0,0 +1 @@
f381131abef51f77d26bccbb213d1c8563c19ec4

View File

@ -1 +0,0 @@
9cce58e14792735cb7dc85fc84239193521d45eb

View File

@ -0,0 +1 @@
8d8733551b9eb71e1f59688b8e78e0b481974d7a

View File

@ -1 +0,0 @@
c444c2c41ab46744e2bbc61df5bdd2ac62ffe6a5

View File

@ -0,0 +1 @@
13da0b22f01dff4a01c9907425464a440695104b

View File

@ -1 +0,0 @@
197e4cf95fcbc787f128a33e4675528cfee65065

View File

@ -0,0 +1 @@
6c3de4dbb98b5cc00875d76e817929374bb9e710

View File

@ -1 +0,0 @@
aa7d2e07736356405b4bece971d0a9ff1036dac3

View File

@ -0,0 +1 @@
539ef199c74ae6891ac93f55632fe140b9d4c291

View File

@ -1 +0,0 @@
12f32d95596ff55c43c4c2378bf26e9fe3ea7dd9

View File

@ -0,0 +1 @@
0371141f658e2157babd490f0a8ddbcd5114b371

View File

@ -1 +0,0 @@
5cd61c5c166a69571f39178b50d304d6e3914050

View File

@ -0,0 +1 @@
1bae56fbce29d6c597c00889dab1909f51f4aaac

View File

@ -1 +0,0 @@
72474064a247566c4c759eda1dfaac4d48778cd1

View File

@ -0,0 +1 @@
6eaed1dea9a18502ab9dffe55f081da6060373f7

View File

@ -1 +0,0 @@
d40eb969881f58b47bace23865a1d5a9dd4ebf0a

View File

@ -0,0 +1 @@
e54c6be78275637544a3080874dd04b0d92755e5

View File

@ -1 +0,0 @@
57ebd0c31e90f5f73aad7dbf7448cd59d8418f03

View File

@ -0,0 +1 @@
e4c95d0bb740f18af520faebcebb968da3e0a687

View File

@ -21,12 +21,10 @@ package org.apache.lucene.search.uhighlight;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.index.Term;
import org.apache.lucene.queries.CommonTermsQuery;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.PrefixQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper;
import org.apache.lucene.search.spans.SpanNearQuery;
import org.apache.lucene.search.spans.SpanOrQuery;
@ -36,12 +34,9 @@ import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.automaton.CharacterRunAutomaton;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery;
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;
import org.elasticsearch.index.search.ESToParentBlockJoinQuery;
import java.io.IOException;
import java.text.BreakIterator;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
@ -151,11 +146,6 @@ public class CustomUnifiedHighlighter extends UnifiedHighlighter {
getScorer(field), maxPassages, (noMatchSize > 0 ? 1 : 0), getFormatter(field), noMatchSize, fieldValue);
}
@Override
protected Collection<Query> preMultiTermQueryRewrite(Query query) {
return rewriteCustomQuery(query);
}
@Override
protected Collection<Query> preSpanQueryRewrite(Query query) {
return rewriteCustomQuery(query);
@ -175,7 +165,7 @@ public class CustomUnifiedHighlighter extends UnifiedHighlighter {
SpanQuery[] innerQueries = new SpanQuery[terms[i].length];
for (int j = 0; j < terms[i].length; j++) {
if (i == sizeMinus1) {
innerQueries[j] = new SpanMultiTermQueryWrapper<PrefixQuery>(new PrefixQuery(terms[i][j]));
innerQueries[j] = new SpanMultiTermQueryWrapper<>(new PrefixQuery(terms[i][j]));
} else {
innerQueries[j] = new SpanTermQuery(terms[i][j]);
}
@ -200,17 +190,6 @@ public class CustomUnifiedHighlighter extends UnifiedHighlighter {
boolean inorder = (mpq.getSlop() == 0);
return Collections.singletonList(new SpanNearQuery(positionSpanQueries,
mpq.getSlop() + positionGaps, inorder));
} else if (query instanceof CommonTermsQuery) {
CommonTermsQuery ctq = (CommonTermsQuery) query;
List<Query> tqs = new ArrayList<> ();
for (Term term : ctq.getTerms()) {
tqs.add(new TermQuery(term));
}
return tqs;
} else if (query instanceof FunctionScoreQuery) {
return Collections.singletonList(((FunctionScoreQuery) query).getSubQuery());
} else if (query instanceof ESToParentBlockJoinQuery) {
return Collections.singletonList(((ESToParentBlockJoinQuery) query).getChildQuery());
} else {
return null;
}

View File

@ -143,7 +143,7 @@ public class Version implements Comparable<Version>, ToXContentFragment {
public static final int V_7_2_0_ID = 7020099;
public static final Version V_7_2_0 = new Version(V_7_2_0_ID, org.apache.lucene.util.Version.LUCENE_8_0_0);
public static final int V_7_3_0_ID = 7030099;
public static final Version V_7_3_0 = new Version(V_7_3_0_ID, org.apache.lucene.util.Version.LUCENE_8_0_0);
public static final Version V_7_3_0 = new Version(V_7_3_0_ID, org.apache.lucene.util.Version.LUCENE_8_1_0);
public static final Version CURRENT = V_7_3_0;
static {

View File

@ -22,10 +22,12 @@ package org.elasticsearch.common.lucene.search.function;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.FilterScorer;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryVisitor;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.ScorerSupplier;
import org.apache.lucene.search.Weight;
@ -195,6 +197,12 @@ public class FunctionScoreQuery extends Query {
return combineFunction;
}
@Override
public void visit(QueryVisitor visitor) {
// Highlighters must visit the child query to extract terms
subQuery.visit(visitor.getSubVisitor(BooleanClause.Occur.MUST, this));
}
@Override
public Query rewrite(IndexReader reader) throws IOException {
Query rewritten = super.rewrite(reader);

View File

@ -21,6 +21,7 @@ package org.elasticsearch.index.codec;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.lucene50.Lucene50PostingsFormat.FSTLoadMode;
import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat.Mode;
import org.apache.lucene.codecs.lucene80.Lucene80Codec;
import org.elasticsearch.common.Nullable;
@ -48,7 +49,7 @@ public class CodecService {
final MapBuilder<String, Codec> codecs = MapBuilder.<String, Codec>newMapBuilder();
if (mapperService == null) {
codecs.put(DEFAULT_CODEC, new Lucene80Codec());
codecs.put(BEST_COMPRESSION_CODEC, new Lucene80Codec(Mode.BEST_COMPRESSION));
codecs.put(BEST_COMPRESSION_CODEC, new Lucene80Codec(Mode.BEST_COMPRESSION, FSTLoadMode.AUTO));
} else {
codecs.put(DEFAULT_CODEC,
new PerFieldMappingPostingFormatCodec(Mode.BEST_SPEED, mapperService, logger));

View File

@ -22,6 +22,7 @@ package org.elasticsearch.index.codec;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.codecs.lucene50.Lucene50PostingsFormat.FSTLoadMode;
import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat;
import org.apache.lucene.codecs.lucene80.Lucene80Codec;
import org.elasticsearch.common.lucene.Lucene;
@ -47,7 +48,7 @@ public class PerFieldMappingPostingFormatCodec extends Lucene80Codec {
}
public PerFieldMappingPostingFormatCodec(Lucene50StoredFieldsFormat.Mode compressionMode, MapperService mapperService, Logger logger) {
super(compressionMode);
super(compressionMode, FSTLoadMode.AUTO);
this.mapperService = mapperService;
this.logger = logger;
}

View File

@ -26,9 +26,9 @@ import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionLengthAttribute;
import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.MatchesIterator;
import org.apache.lucene.search.QueryVisitor;
import org.apache.lucene.search.intervals.IntervalIterator;
import org.apache.lucene.search.intervals.Intervals;
import org.apache.lucene.search.intervals.IntervalsSource;
@ -37,9 +37,10 @@ import org.apache.lucene.util.graph.GraphTokenStreamFiniteStrings;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
/**
* Constructs an IntervalsSource based on analyzed text
@ -287,14 +288,17 @@ public class IntervalBuilder {
return null;
}
@Override
public void visit(String field, QueryVisitor visitor) {}
@Override
public int minExtent() {
return 0;
}
@Override
public void extractTerms(String field, Set<Term> terms) {
public Collection<IntervalsSource> pullUpDisjunctions() {
return Collections.emptyList();
}
@Override

View File

@ -20,8 +20,10 @@
package org.elasticsearch.index.search;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryVisitor;
import org.apache.lucene.search.Weight;
import org.apache.lucene.search.join.BitSetProducer;
import org.apache.lucene.search.join.ScoreMode;
@ -74,6 +76,12 @@ public final class ESToParentBlockJoinQuery extends Query {
return super.rewrite(reader);
}
@Override
public void visit(QueryVisitor visitor) {
// Highlighters must visit the child query to extract terms
query.getChildQuery().visit(visitor.getSubVisitor(BooleanClause.Occur.MUST, this));
}
@Override
public Weight createWeight(IndexSearcher searcher, org.apache.lucene.search.ScoreMode scoreMode, float boost) throws IOException {
return query.createWeight(searcher, scoreMode, boost);

View File

@ -19,9 +19,7 @@
package org.elasticsearch.search.profile.query;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Scorable;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.TwoPhaseIterator;
import org.apache.lucene.search.Weight;
@ -41,7 +39,6 @@ final class ProfileScorer extends Scorer {
private final Timer scoreTimer, nextDocTimer, advanceTimer, matchTimer, shallowAdvanceTimer, computeMaxScoreTimer,
setMinCompetitiveScoreTimer;
private final boolean isConstantScoreQuery;
ProfileScorer(ProfileWeight w, Scorer scorer, QueryProfileBreakdown profile) throws IOException {
super(w);
@ -54,26 +51,6 @@ final class ProfileScorer extends Scorer {
shallowAdvanceTimer = profile.getTimer(QueryTimingType.SHALLOW_ADVANCE);
computeMaxScoreTimer = profile.getTimer(QueryTimingType.COMPUTE_MAX_SCORE);
setMinCompetitiveScoreTimer = profile.getTimer(QueryTimingType.SET_MIN_COMPETITIVE_SCORE);
ProfileScorer profileScorer = null;
if (w.getQuery() instanceof ConstantScoreQuery && scorer instanceof ProfileScorer) {
//Case when we have a totalHits query and it is not cached
profileScorer = (ProfileScorer) scorer;
} else if (w.getQuery() instanceof ConstantScoreQuery && scorer.getChildren().size() == 1) {
//Case when we have a top N query. If the scorer has no children, it is because it is cached
//and in that case we do not do any special treatment
Scorable childScorer = scorer.getChildren().iterator().next().child;
if (childScorer instanceof ProfileScorer) {
profileScorer = (ProfileScorer) childScorer;
}
}
if (profileScorer != null) {
isConstantScoreQuery = true;
profile.setTimer(QueryTimingType.NEXT_DOC, profileScorer.nextDocTimer);
profile.setTimer(QueryTimingType.ADVANCE, profileScorer.advanceTimer);
profile.setTimer(QueryTimingType.MATCH, profileScorer.matchTimer);
} else {
isConstantScoreQuery = false;
}
}
@Override
@ -103,9 +80,6 @@ final class ProfileScorer extends Scorer {
@Override
public DocIdSetIterator iterator() {
if (isConstantScoreQuery) {
return scorer.iterator();
}
final DocIdSetIterator in = scorer.iterator();
return new DocIdSetIterator() {
@ -143,9 +117,6 @@ final class ProfileScorer extends Scorer {
@Override
public TwoPhaseIterator twoPhaseIterator() {
if (isConstantScoreQuery) {
return scorer.twoPhaseIterator();
}
final TwoPhaseIterator in = scorer.twoPhaseIterator();
if (in == null) {
return null;

View File

@ -205,7 +205,7 @@ public class VersionsTests extends ESTestCase {
assertEquals(Version.V_6_0_0.luceneVersion.major - 1, version.luceneVersion.major);
// future version, should be the same version as today
version = Version.fromString("7.77.1");
version = Version.fromString("8.77.1");
assertEquals(Version.CURRENT.luceneVersion, version.luceneVersion);
}
}

View File

@ -27,6 +27,7 @@ import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.core.LogEvent;
import org.apache.logging.log4j.core.appender.AbstractAppender;
import org.apache.logging.log4j.core.filter.RegexFilter;
import org.apache.lucene.codecs.lucene50.Lucene50PostingsFormat;
import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.LongPoint;
@ -300,7 +301,13 @@ public class InternalEngineTests extends EngineTestCase {
assertThat(segments.get(0).getDeletedDocs(), equalTo(0));
assertThat(segments.get(0).isCompound(), equalTo(true));
assertThat(segments.get(0).ramTree, nullValue());
assertThat(segments.get(0).getAttributes().keySet(), Matchers.contains(Lucene50StoredFieldsFormat.MODE_KEY));
assertThat(segments.get(0).getAttributes().keySet(),
Matchers.contains(
// TODO: Lucene50PostingsFormat#MODE_KEY should be public ?
Lucene50PostingsFormat.class.getSimpleName() + ".fstMode",
Lucene50StoredFieldsFormat.MODE_KEY
)
);
engine.flush();

View File

@ -28,7 +28,6 @@ import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.LeafCollector;
@ -120,209 +119,6 @@ public class QueryProfilerTests extends ESTestCase {
assertThat(rewriteTime, greaterThan(0L));
}
public void testConstantScoreQuery() throws IOException {
QueryProfiler profiler = new QueryProfiler();
searcher.setProfiler(profiler);
Query query = new ConstantScoreQuery(new TermQuery(new Term("foo", "bar")));
searcher.search(query, 1);
List<ProfileResult> results = profiler.getTree();
assertEquals(1, results.size());
Map<String, Long> breakdownConstantScoreQuery = results.get(0).getTimeBreakdown();
assertEquals(1, results.get(0).getProfiledChildren().size());
Map<String, Long> breakdownTermQuery = results.get(0).getProfiledChildren().get(0).getTimeBreakdown();
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.CREATE_WEIGHT.toString()).longValue(), greaterThan(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.BUILD_SCORER.toString()).longValue(), greaterThan(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.NEXT_DOC.toString()).longValue(), greaterThan(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.ADVANCE.toString()).longValue(), equalTo(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.SCORE.toString()).longValue(), greaterThan(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.MATCH.toString()).longValue(), equalTo(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.CREATE_WEIGHT.toString() + "_count").longValue(), greaterThan(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.BUILD_SCORER.toString() + "_count").longValue(), greaterThan(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.NEXT_DOC.toString() + "_count").longValue(), greaterThan(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.ADVANCE.toString() + "_count").longValue(), equalTo(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.SCORE.toString() + "_count").longValue(), greaterThan(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.MATCH.toString() + "_count").longValue(), equalTo(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.CREATE_WEIGHT.toString()).longValue(), greaterThan(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.BUILD_SCORER.toString()).longValue(), greaterThan(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.NEXT_DOC.toString()).longValue(), greaterThan(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.ADVANCE.toString()).longValue(), equalTo(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.SCORE.toString()).longValue(), equalTo(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.MATCH.toString()).longValue(), equalTo(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.CREATE_WEIGHT.toString() + "_count").longValue(), greaterThan(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.BUILD_SCORER.toString() + "_count").longValue(), greaterThan(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.NEXT_DOC.toString() + "_count").longValue(), greaterThan(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.ADVANCE.toString() + "_count").longValue(), equalTo(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.SCORE.toString() + "_count").longValue(), equalTo(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.MATCH.toString() + "_count").longValue(), equalTo(0L));
assertEquals(breakdownConstantScoreQuery.get(QueryTimingType.NEXT_DOC.toString()).longValue(),
breakdownTermQuery.get(QueryTimingType.NEXT_DOC.toString()).longValue());
long rewriteTime = profiler.getRewriteTime();
assertThat(rewriteTime, greaterThan(0L));
}
public void testConstantScoreTotalHitsBeingCachedQuery() throws IOException {
Query query = new ConstantScoreQuery(new TermQuery(new Term("foo", "bar")));
//clean cache and make sure queries will be cached
searcher.setQueryCache(IndexSearcher.getDefaultQueryCache());
searcher.setQueryCachingPolicy(ALWAYS_CACHE_POLICY);
QueryProfiler profiler = new QueryProfiler();
searcher.setProfiler(profiler);
TotalHitCountCollector collector = new TotalHitCountCollector();
searcher.search(query, collector);
List<ProfileResult> results = profiler.getTree();
assertEquals(1, results.size());
Map<String, Long> breakdownConstantScoreQuery = results.get(0).getTimeBreakdown();
assertEquals(1, results.get(0).getProfiledChildren().size());
Map<String, Long> breakdownTermQuery = results.get(0).getProfiledChildren().get(0).getTimeBreakdown();
//In this case scorers for constant score query and term query are disconnected.
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.CREATE_WEIGHT.toString()).longValue(), greaterThan(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.BUILD_SCORER.toString()).longValue(), greaterThan(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.NEXT_DOC.toString()).longValue(), greaterThan(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.ADVANCE.toString()).longValue(), equalTo(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.SCORE.toString()).longValue(), equalTo(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.MATCH.toString()).longValue(), equalTo(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.CREATE_WEIGHT.toString() + "_count").longValue(), greaterThan(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.BUILD_SCORER.toString() + "_count").longValue(), greaterThan(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.NEXT_DOC.toString() + "_count").longValue(), greaterThan(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.ADVANCE.toString() + "_count").longValue(), equalTo(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.SCORE.toString() + "_count").longValue(), equalTo(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.MATCH.toString() + "_count").longValue(), equalTo(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.CREATE_WEIGHT.toString()).longValue(), greaterThan(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.BUILD_SCORER.toString()).longValue(), greaterThan(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.NEXT_DOC.toString()).longValue(), greaterThan(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.ADVANCE.toString()).longValue(), equalTo(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.SCORE.toString()).longValue(), equalTo(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.MATCH.toString()).longValue(), equalTo(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.CREATE_WEIGHT.toString() + "_count").longValue(), greaterThan(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.BUILD_SCORER.toString() + "_count").longValue(), greaterThan(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.NEXT_DOC.toString() + "_count").longValue(), greaterThan(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.ADVANCE.toString() + "_count").longValue(), equalTo(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.SCORE.toString() + "_count").longValue(), equalTo(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.MATCH.toString() + "_count").longValue(), equalTo(0L));
long rewriteTime = profiler.getRewriteTime();
assertThat(rewriteTime, greaterThan(0L));
}
public void testConstantScoreTotalHitsNotCachedQuery() throws IOException {
Query query = new ConstantScoreQuery(new TermQuery(new Term("foo", "bar")));
//clean cache and make sure queries will not be cached
searcher.setQueryCache(IndexSearcher.getDefaultQueryCache());
searcher.setQueryCachingPolicy(NEVER_CACHE_POLICY);
QueryProfiler profiler = new QueryProfiler();
searcher.setProfiler(profiler);
TotalHitCountCollector collector = new TotalHitCountCollector();
searcher.search(query, collector);
List<ProfileResult> results = profiler.getTree();
assertEquals(1, results.size());
Map<String, Long> breakdownConstantScoreQuery = results.get(0).getTimeBreakdown();
assertEquals(1, results.get(0).getProfiledChildren().size());
Map<String, Long> breakdownTermQuery = results.get(0).getProfiledChildren().get(0).getTimeBreakdown();
//Timing from the scorer of term query are inherited by constant score query scorer.
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.CREATE_WEIGHT.toString()).longValue(), greaterThan(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.BUILD_SCORER.toString()).longValue(), greaterThan(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.NEXT_DOC.toString()).longValue(), greaterThan(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.ADVANCE.toString()).longValue(), equalTo(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.SCORE.toString()).longValue(), equalTo(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.MATCH.toString()).longValue(), equalTo(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.CREATE_WEIGHT.toString() + "_count").longValue(), greaterThan(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.BUILD_SCORER.toString() + "_count").longValue(), greaterThan(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.NEXT_DOC.toString() + "_count").longValue(), greaterThan(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.ADVANCE.toString() + "_count").longValue(), equalTo(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.SCORE.toString() + "_count").longValue(), equalTo(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.MATCH.toString() + "_count").longValue(), equalTo(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.BUILD_SCORER.toString()).longValue(), greaterThan(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.NEXT_DOC.toString()).longValue(), greaterThan(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.ADVANCE.toString()).longValue(), equalTo(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.SCORE.toString()).longValue(), equalTo(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.MATCH.toString()).longValue(), equalTo(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.CREATE_WEIGHT.toString() + "_count").longValue(), greaterThan(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.BUILD_SCORER.toString() + "_count").longValue(), greaterThan(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.NEXT_DOC.toString() + "_count").longValue(), greaterThan(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.ADVANCE.toString() + "_count").longValue(), equalTo(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.SCORE.toString() + "_count").longValue(), equalTo(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.MATCH.toString() + "_count").longValue(), equalTo(0L));
assertEquals(breakdownConstantScoreQuery.get(QueryTimingType.NEXT_DOC.toString()).longValue(),
breakdownTermQuery.get(QueryTimingType.NEXT_DOC.toString()).longValue());
long rewriteTime = profiler.getRewriteTime();
assertThat(rewriteTime, greaterThan(0L));
}
public void testConstantScoreTotalHitsCachedQuery() throws IOException {
Query query = new ConstantScoreQuery(new TermQuery(new Term("foo", "bar")));
//clean cache and make sure queries will be cached
searcher.setQueryCache(IndexSearcher.getDefaultQueryCache());
searcher.setQueryCachingPolicy(ALWAYS_CACHE_POLICY);
//Put query on cache
TotalHitCountCollector collector = new TotalHitCountCollector();
searcher.search(query, collector);
QueryProfiler profiler = new QueryProfiler();
searcher.setProfiler(profiler);
collector = new TotalHitCountCollector();
searcher.search(query, collector);
List<ProfileResult> results = profiler.getTree();
assertEquals(1, results.size());
Map<String, Long> breakdownConstantScoreQuery = results.get(0).getTimeBreakdown();
assertEquals(1, results.get(0).getProfiledChildren().size());
Map<String, Long> breakdownTermQuery = results.get(0).getProfiledChildren().get(0).getTimeBreakdown();
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.CREATE_WEIGHT.toString()).longValue(), greaterThan(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.BUILD_SCORER.toString()).longValue(), greaterThan(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.NEXT_DOC.toString()).longValue(), greaterThan(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.ADVANCE.toString()).longValue(), equalTo(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.SCORE.toString()).longValue(), equalTo(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.MATCH.toString()).longValue(), equalTo(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.CREATE_WEIGHT.toString() + "_count").longValue(), greaterThan(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.BUILD_SCORER.toString() + "_count").longValue(), greaterThan(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.NEXT_DOC.toString() + "_count").longValue(), greaterThan(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.ADVANCE.toString() + "_count").longValue(), equalTo(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.SCORE.toString() + "_count").longValue(), equalTo(0L));
assertThat(breakdownConstantScoreQuery.get(QueryTimingType.MATCH.toString() + "_count").longValue(), equalTo(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.CREATE_WEIGHT.toString()).longValue(), greaterThan(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.BUILD_SCORER.toString()).longValue(), equalTo(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.NEXT_DOC.toString()).longValue(), equalTo(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.ADVANCE.toString()).longValue(), equalTo(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.SCORE.toString()).longValue(), equalTo(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.MATCH.toString()).longValue(), equalTo(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.CREATE_WEIGHT.toString() + "_count").longValue(), greaterThan(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.BUILD_SCORER.toString() + "_count").longValue(), equalTo(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.NEXT_DOC.toString() + "_count").longValue(), equalTo(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.ADVANCE.toString() + "_count").longValue(), equalTo(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.SCORE.toString() + "_count").longValue(), equalTo(0L));
assertThat(breakdownTermQuery.get(QueryTimingType.MATCH.toString() + "_count").longValue(), equalTo(0L));
long rewriteTime = profiler.getRewriteTime();
assertThat(rewriteTime, greaterThan(0L));
}
public void testNoScoring() throws IOException {
QueryProfiler profiler = new QueryProfiler();
searcher.setProfiler(profiler);

View File

@ -317,7 +317,6 @@ public class QueryPhaseTests extends IndexShardTestCase {
contextSearcher = getAssertingEarlyTerminationSearcher(reader, size);
QueryPhase.execute(context, contextSearcher, checkCancelled -> {});
assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs));
assertTrue(context.queryResult().terminatedEarly());
assertThat(context.terminateAfter(), equalTo(size));
assertThat(context.queryResult().getTotalHits().value, equalTo((long) numDocs));
assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0].doc, greaterThanOrEqualTo(size));

View File

@ -1 +0,0 @@
407c555efb2d3253f51a676cc2089a5d29a3b7b7

View File

@ -0,0 +1 @@
3e85f77d8f8ed1db53dba387fbdec55a9f912639