mirror of https://github.com/apache/lucene.git
Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/lucene-solr
This commit is contained in:
commit
22eeba9920
|
@ -771,7 +771,7 @@ public class MockDirectoryWrapper extends BaseDirectoryWrapper {
|
||||||
}
|
}
|
||||||
ii = new SlowOpeningMockIndexInputWrapper(this, name, delegateInput);
|
ii = new SlowOpeningMockIndexInputWrapper(this, name, delegateInput);
|
||||||
} else {
|
} else {
|
||||||
ii = new MockIndexInputWrapper(this, name, delegateInput);
|
ii = new MockIndexInputWrapper(this, name, delegateInput, null);
|
||||||
}
|
}
|
||||||
addFileHandle(ii, name, Handle.Input);
|
addFileHandle(ii, name, Handle.Input);
|
||||||
return ii;
|
return ii;
|
||||||
|
|
|
@ -30,12 +30,19 @@ public class MockIndexInputWrapper extends IndexInput {
|
||||||
private MockDirectoryWrapper dir;
|
private MockDirectoryWrapper dir;
|
||||||
final String name;
|
final String name;
|
||||||
private IndexInput delegate;
|
private IndexInput delegate;
|
||||||
private boolean isClone;
|
private volatile boolean closed;
|
||||||
private boolean closed;
|
|
||||||
|
|
||||||
/** Construct an empty output buffer. */
|
// Which MockIndexInputWrapper we were cloned from, or null if we are not a clone:
|
||||||
public MockIndexInputWrapper(MockDirectoryWrapper dir, String name, IndexInput delegate) {
|
private final MockIndexInputWrapper parent;
|
||||||
|
|
||||||
|
/** Sole constructor */
|
||||||
|
public MockIndexInputWrapper(MockDirectoryWrapper dir, String name, IndexInput delegate, MockIndexInputWrapper parent) {
|
||||||
super("MockIndexInputWrapper(name=" + name + " delegate=" + delegate + ")");
|
super("MockIndexInputWrapper(name=" + name + " delegate=" + delegate + ")");
|
||||||
|
|
||||||
|
// If we are a clone then our parent better not be a clone!
|
||||||
|
assert parent == null || parent.parent == null;
|
||||||
|
|
||||||
|
this.parent = parent;
|
||||||
this.name = name;
|
this.name = name;
|
||||||
this.dir = dir;
|
this.dir = dir;
|
||||||
this.delegate = delegate;
|
this.delegate = delegate;
|
||||||
|
@ -54,7 +61,7 @@ public class MockIndexInputWrapper extends IndexInput {
|
||||||
// remove the conditional check so we also track that
|
// remove the conditional check so we also track that
|
||||||
// all clones get closed:
|
// all clones get closed:
|
||||||
assert delegate != null;
|
assert delegate != null;
|
||||||
if (!isClone) {
|
if (parent == null) {
|
||||||
dir.removeIndexInput(this, name);
|
dir.removeIndexInput(this, name);
|
||||||
}
|
}
|
||||||
dir.maybeThrowDeterministicException();
|
dir.maybeThrowDeterministicException();
|
||||||
|
@ -62,9 +69,13 @@ public class MockIndexInputWrapper extends IndexInput {
|
||||||
}
|
}
|
||||||
|
|
||||||
private void ensureOpen() {
|
private void ensureOpen() {
|
||||||
|
// TODO: not great this is a volatile read (closed) ... we should deploy heavy JVM voodoo like SwitchPoint to avoid this
|
||||||
if (closed) {
|
if (closed) {
|
||||||
throw new RuntimeException("Abusing closed IndexInput!");
|
throw new RuntimeException("Abusing closed IndexInput!");
|
||||||
}
|
}
|
||||||
|
if (parent != null && parent.closed) {
|
||||||
|
throw new RuntimeException("Abusing clone of a closed IndexInput!");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -75,8 +86,7 @@ public class MockIndexInputWrapper extends IndexInput {
|
||||||
}
|
}
|
||||||
dir.inputCloneCount.incrementAndGet();
|
dir.inputCloneCount.incrementAndGet();
|
||||||
IndexInput iiclone = delegate.clone();
|
IndexInput iiclone = delegate.clone();
|
||||||
MockIndexInputWrapper clone = new MockIndexInputWrapper(dir, name, iiclone);
|
MockIndexInputWrapper clone = new MockIndexInputWrapper(dir, name, iiclone, parent != null ? parent : this);
|
||||||
clone.isClone = true;
|
|
||||||
// Pending resolution on LUCENE-686 we may want to
|
// Pending resolution on LUCENE-686 we may want to
|
||||||
// uncomment this code so that we also track that all
|
// uncomment this code so that we also track that all
|
||||||
// clones get closed:
|
// clones get closed:
|
||||||
|
@ -102,8 +112,7 @@ public class MockIndexInputWrapper extends IndexInput {
|
||||||
}
|
}
|
||||||
dir.inputCloneCount.incrementAndGet();
|
dir.inputCloneCount.incrementAndGet();
|
||||||
IndexInput slice = delegate.slice(sliceDescription, offset, length);
|
IndexInput slice = delegate.slice(sliceDescription, offset, length);
|
||||||
MockIndexInputWrapper clone = new MockIndexInputWrapper(dir, sliceDescription, slice);
|
MockIndexInputWrapper clone = new MockIndexInputWrapper(dir, sliceDescription, slice, parent != null ? parent : this);
|
||||||
clone.isClone = true;
|
|
||||||
return clone;
|
return clone;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -30,7 +30,7 @@ class SlowClosingMockIndexInputWrapper extends MockIndexInputWrapper {
|
||||||
|
|
||||||
public SlowClosingMockIndexInputWrapper(MockDirectoryWrapper dir,
|
public SlowClosingMockIndexInputWrapper(MockDirectoryWrapper dir,
|
||||||
String name, IndexInput delegate) {
|
String name, IndexInput delegate) {
|
||||||
super(dir, name, delegate);
|
super(dir, name, delegate, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -28,7 +28,7 @@ class SlowOpeningMockIndexInputWrapper extends MockIndexInputWrapper {
|
||||||
|
|
||||||
public SlowOpeningMockIndexInputWrapper(MockDirectoryWrapper dir,
|
public SlowOpeningMockIndexInputWrapper(MockDirectoryWrapper dir,
|
||||||
String name, IndexInput delegate) throws IOException {
|
String name, IndexInput delegate) throws IOException {
|
||||||
super(dir, name, delegate);
|
super(dir, name, delegate, null);
|
||||||
try {
|
try {
|
||||||
Thread.sleep(50);
|
Thread.sleep(50);
|
||||||
} catch (InterruptedException ie) {
|
} catch (InterruptedException ie) {
|
||||||
|
|
|
@ -171,4 +171,40 @@ public class TestMockDirectoryWrapper extends BaseDirectoryTestCase {
|
||||||
|
|
||||||
assertTrue("MockDirectoryWrapper on dir=" + dir + " failed to corrupt an unsync'd file", changed);
|
assertTrue("MockDirectoryWrapper on dir=" + dir + " failed to corrupt an unsync'd file", changed);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testAbuseClosedIndexInput() throws Exception {
|
||||||
|
MockDirectoryWrapper dir = newMockDirectory();
|
||||||
|
IndexOutput out = dir.createOutput("foo", IOContext.DEFAULT);
|
||||||
|
out.writeByte((byte) 42);
|
||||||
|
out.close();
|
||||||
|
final IndexInput in = dir.openInput("foo", IOContext.DEFAULT);
|
||||||
|
in.close();
|
||||||
|
expectThrows(RuntimeException.class, in::readByte);
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testAbuseCloneAfterParentClosed() throws Exception {
|
||||||
|
MockDirectoryWrapper dir = newMockDirectory();
|
||||||
|
IndexOutput out = dir.createOutput("foo", IOContext.DEFAULT);
|
||||||
|
out.writeByte((byte) 42);
|
||||||
|
out.close();
|
||||||
|
IndexInput in = dir.openInput("foo", IOContext.DEFAULT);
|
||||||
|
final IndexInput clone = in.clone();
|
||||||
|
in.close();
|
||||||
|
expectThrows(RuntimeException.class, clone::readByte);
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testAbuseCloneOfCloneAfterParentClosed() throws Exception {
|
||||||
|
MockDirectoryWrapper dir = newMockDirectory();
|
||||||
|
IndexOutput out = dir.createOutput("foo", IOContext.DEFAULT);
|
||||||
|
out.writeByte((byte) 42);
|
||||||
|
out.close();
|
||||||
|
IndexInput in = dir.openInput("foo", IOContext.DEFAULT);
|
||||||
|
IndexInput clone1 = in.clone();
|
||||||
|
IndexInput clone2 = clone1.clone();
|
||||||
|
in.close();
|
||||||
|
expectThrows(RuntimeException.class, clone2::readByte);
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -205,6 +205,10 @@ Optimizations
|
||||||
* SOLR-9335: Solr cache/search/update stats counters now use LongAdder which are supposed to have higher throughput
|
* SOLR-9335: Solr cache/search/update stats counters now use LongAdder which are supposed to have higher throughput
|
||||||
under high contention. (Varun Thacker)
|
under high contention. (Varun Thacker)
|
||||||
|
|
||||||
|
* SOLR-9350: JSON Facets: method="stream" will no longer always uses & populates the filter cache, likely
|
||||||
|
flushing it. 'cacheDf' can be configured to set a doc frequency threshold, now defaulting to 1/16th doc count.
|
||||||
|
Using -1 Disables use of the cache. (David Smiley, yonik)
|
||||||
|
|
||||||
Other Changes
|
Other Changes
|
||||||
----------------------
|
----------------------
|
||||||
|
|
||||||
|
@ -241,6 +245,10 @@ Other Changes
|
||||||
|
|
||||||
* SOLR-9392: Fixed CDCR Test failures which were due to leaked resources. (shalin)
|
* SOLR-9392: Fixed CDCR Test failures which were due to leaked resources. (shalin)
|
||||||
|
|
||||||
|
* SOLR-9385: Add QParser.getParser(String,SolrQueryRequest) variant. (Christine Poerschke)
|
||||||
|
|
||||||
|
* SOLR-9367: Improved TestInjection's randomization logic to use LuceneTestCase.random() (hossman)
|
||||||
|
|
||||||
================== 6.1.0 ==================
|
================== 6.1.0 ==================
|
||||||
|
|
||||||
Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
|
Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
|
||||||
|
|
|
@ -604,7 +604,7 @@ public class FacetingAccumulator extends BasicAccumulator implements FacetValueA
|
||||||
QueryFacetAccumulator qAcc = new QueryFacetAccumulator(this,qfr.getName(),query);
|
QueryFacetAccumulator qAcc = new QueryFacetAccumulator(this,qfr.getName(),query);
|
||||||
final Query q;
|
final Query q;
|
||||||
try {
|
try {
|
||||||
q = QParser.getParser(query, null, queryRequest).getQuery();
|
q = QParser.getParser(query, queryRequest).getQuery();
|
||||||
} catch( SyntaxError e ){
|
} catch( SyntaxError e ){
|
||||||
throw new SolrException(ErrorCode.BAD_REQUEST,"Invalid query '"+query+"'",e);
|
throw new SolrException(ErrorCode.BAD_REQUEST,"Invalid query '"+query+"'",e);
|
||||||
}
|
}
|
||||||
|
|
|
@ -160,7 +160,7 @@ public class BlobHandler extends RequestHandlerBase implements PluginInfoInitial
|
||||||
} else {
|
} else {
|
||||||
String q = "blobName:{0}";
|
String q = "blobName:{0}";
|
||||||
if (version != -1) q = "id:{0}/{1}";
|
if (version != -1) q = "id:{0}/{1}";
|
||||||
QParser qparser = QParser.getParser(StrUtils.formatString(q, blobName, version), "lucene", req);
|
QParser qparser = QParser.getParser(StrUtils.formatString(q, blobName, version), req);
|
||||||
final TopDocs docs = req.getSearcher().search(qparser.parse(), 1, new Sort(new SortField("version", SortField.Type.LONG, true)));
|
final TopDocs docs = req.getSearcher().search(qparser.parse(), 1, new Sort(new SortField("version", SortField.Type.LONG, true)));
|
||||||
if (docs.totalHits > 0) {
|
if (docs.totalHits > 0) {
|
||||||
rsp.add(ReplicationHandler.FILE_STREAM, new SolrCore.RawWriter() {
|
rsp.add(ReplicationHandler.FILE_STREAM, new SolrCore.RawWriter() {
|
||||||
|
|
|
@ -125,7 +125,7 @@ public class MoreLikeThisHandler extends RequestHandlerBase
|
||||||
filters = new ArrayList<>();
|
filters = new ArrayList<>();
|
||||||
for (String fq : fqs) {
|
for (String fq : fqs) {
|
||||||
if (fq != null && fq.trim().length() != 0) {
|
if (fq != null && fq.trim().length() != 0) {
|
||||||
QParser fqp = QParser.getParser(fq, null, req);
|
QParser fqp = QParser.getParser(fq, req);
|
||||||
filters.add(fqp.getQuery());
|
filters.add(fqp.getQuery());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -176,7 +176,7 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia
|
||||||
query = rb.getQuery();
|
query = rb.getQuery();
|
||||||
} else {
|
} else {
|
||||||
try {
|
try {
|
||||||
QParser parser = QParser.getParser(qs, null, req);
|
QParser parser = QParser.getParser(qs, req);
|
||||||
query = parser.getQuery();
|
query = parser.getQuery();
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
throw new IOException(e);
|
throw new IOException(e);
|
||||||
|
@ -198,7 +198,7 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia
|
||||||
try {
|
try {
|
||||||
for (String fq : fqs) {
|
for (String fq : fqs) {
|
||||||
if (fq != null && fq.trim().length() != 0 && !fq.equals("*:*")) {
|
if (fq != null && fq.trim().length() != 0 && !fq.equals("*:*")) {
|
||||||
QParser fqp = QParser.getParser(fq, null, req);
|
QParser fqp = QParser.getParser(fq, req);
|
||||||
newFilters.add(fqp.getQuery());
|
newFilters.add(fqp.getQuery());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -202,7 +202,7 @@ public class QueryComponent extends SearchComponent
|
||||||
filters = filters == null ? new ArrayList<Query>(fqs.length) : new ArrayList<>(filters);
|
filters = filters == null ? new ArrayList<Query>(fqs.length) : new ArrayList<>(filters);
|
||||||
for (String fq : fqs) {
|
for (String fq : fqs) {
|
||||||
if (fq != null && fq.trim().length()!=0) {
|
if (fq != null && fq.trim().length()!=0) {
|
||||||
QParser fqp = QParser.getParser(fq, null, req);
|
QParser fqp = QParser.getParser(fq, req);
|
||||||
filters.add(fqp.getQuery());
|
filters.add(fqp.getQuery());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -158,7 +158,7 @@ public class RealTimeGetComponent extends SearchComponent
|
||||||
filters = filters == null ? new ArrayList<Query>(fqs.length) : new ArrayList<>(filters);
|
filters = filters == null ? new ArrayList<Query>(fqs.length) : new ArrayList<>(filters);
|
||||||
for (String fq : fqs) {
|
for (String fq : fqs) {
|
||||||
if (fq != null && fq.trim().length()!=0) {
|
if (fq != null && fq.trim().length()!=0) {
|
||||||
QParser fqp = QParser.getParser(fq, null, req);
|
QParser fqp = QParser.getParser(fq, req);
|
||||||
filters.add(fqp.getQuery());
|
filters.add(fqp.getQuery());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -60,7 +60,6 @@ import org.apache.solr.schema.FieldType;
|
||||||
import org.apache.solr.schema.IndexSchema;
|
import org.apache.solr.schema.IndexSchema;
|
||||||
import org.apache.solr.search.DocSet;
|
import org.apache.solr.search.DocSet;
|
||||||
import org.apache.solr.search.QParser;
|
import org.apache.solr.search.QParser;
|
||||||
import org.apache.solr.search.QParserPlugin;
|
|
||||||
import org.apache.solr.search.SyntaxError;
|
import org.apache.solr.search.SyntaxError;
|
||||||
import org.apache.solr.search.SolrIndexSearcher;
|
import org.apache.solr.search.SolrIndexSearcher;
|
||||||
import org.apache.solr.spelling.AbstractLuceneSpellChecker;
|
import org.apache.solr.spelling.AbstractLuceneSpellChecker;
|
||||||
|
@ -242,7 +241,7 @@ public class SpellCheckComponent extends SearchComponent implements SolrCoreAwar
|
||||||
try {
|
try {
|
||||||
if (maxResultsFilterQueryString != null) {
|
if (maxResultsFilterQueryString != null) {
|
||||||
// Get the default Lucene query parser
|
// Get the default Lucene query parser
|
||||||
QParser parser = QParser.getParser(maxResultsFilterQueryString, QParserPlugin.DEFAULT_QTYPE, rb.req);
|
QParser parser = QParser.getParser(maxResultsFilterQueryString, rb.req);
|
||||||
DocSet s = searcher.getDocSet(parser.getQuery());
|
DocSet s = searcher.getDocSet(parser.getQuery());
|
||||||
maxResultsByFilters = s.size();
|
maxResultsByFilters = s.size();
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -321,7 +321,7 @@ public class SimpleFacets {
|
||||||
public void getFacetQueryCount(ParsedParams parsed, NamedList<Integer> res) throws SyntaxError, IOException {
|
public void getFacetQueryCount(ParsedParams parsed, NamedList<Integer> res) throws SyntaxError, IOException {
|
||||||
// TODO: slight optimization would prevent double-parsing of any localParams
|
// TODO: slight optimization would prevent double-parsing of any localParams
|
||||||
// TODO: SOLR-7753
|
// TODO: SOLR-7753
|
||||||
Query qobj = QParser.getParser(parsed.facetValue, null, req).getQuery();
|
Query qobj = QParser.getParser(parsed.facetValue, req).getQuery();
|
||||||
|
|
||||||
if (qobj == null) {
|
if (qobj == null) {
|
||||||
res.add(parsed.key, 0);
|
res.add(parsed.key, 0);
|
||||||
|
|
|
@ -77,7 +77,7 @@ public class ChildDocTransformerFactory extends TransformerFactory {
|
||||||
|
|
||||||
BitSetProducer parentsFilter = null;
|
BitSetProducer parentsFilter = null;
|
||||||
try {
|
try {
|
||||||
Query parentFilterQuery = QParser.getParser( parentFilter, null, req).getQuery();
|
Query parentFilterQuery = QParser.getParser( parentFilter, req).getQuery();
|
||||||
parentsFilter = new QueryBitSetProducer(new QueryWrapperFilter(parentFilterQuery));
|
parentsFilter = new QueryBitSetProducer(new QueryWrapperFilter(parentFilterQuery));
|
||||||
} catch (SyntaxError syntaxError) {
|
} catch (SyntaxError syntaxError) {
|
||||||
throw new SolrException( ErrorCode.BAD_REQUEST, "Failed to create correct parent filter query" );
|
throw new SolrException( ErrorCode.BAD_REQUEST, "Failed to create correct parent filter query" );
|
||||||
|
@ -86,7 +86,7 @@ public class ChildDocTransformerFactory extends TransformerFactory {
|
||||||
Query childFilterQuery = null;
|
Query childFilterQuery = null;
|
||||||
if(childFilter != null) {
|
if(childFilter != null) {
|
||||||
try {
|
try {
|
||||||
childFilterQuery = QParser.getParser( childFilter, null, req).getQuery();
|
childFilterQuery = QParser.getParser( childFilter, req).getQuery();
|
||||||
} catch (SyntaxError syntaxError) {
|
} catch (SyntaxError syntaxError) {
|
||||||
throw new SolrException( ErrorCode.BAD_REQUEST, "Failed to create correct child filter query" );
|
throw new SolrException( ErrorCode.BAD_REQUEST, "Failed to create correct child filter query" );
|
||||||
}
|
}
|
||||||
|
|
|
@ -76,6 +76,15 @@ import org.apache.solr.search.TermsQParserPlugin;
|
||||||
* its' native parameters like <code>collection, shards</code> for subquery, eg<br>
|
* its' native parameters like <code>collection, shards</code> for subquery, eg<br>
|
||||||
* <code>q=*:*&fl=*,foo:[subquery]&foo.q=cloud&foo.collection=departments</code>
|
* <code>q=*:*&fl=*,foo:[subquery]&foo.q=cloud&foo.collection=departments</code>
|
||||||
*
|
*
|
||||||
|
* <h3>When used in Real Time Get</h3>
|
||||||
|
* <p>
|
||||||
|
* When used in the context of a Real Time Get, the <i>values</i> from each document that are used
|
||||||
|
* in the qubquery are the "real time" values (possibly from the transaction log), but the query
|
||||||
|
* itself is still executed against the currently open searcher. Note that this means if a
|
||||||
|
* document is updated but not yet committed, an RTG request for that document that uses
|
||||||
|
* <code>[subquery]</code> could include the older (committed) version of that document,
|
||||||
|
* with differnet field values, in the subquery results.
|
||||||
|
* </p>
|
||||||
*/
|
*/
|
||||||
public class SubQueryAugmenterFactory extends TransformerFactory{
|
public class SubQueryAugmenterFactory extends TransformerFactory{
|
||||||
|
|
||||||
|
@ -303,6 +312,14 @@ class SubQueryAugmenter extends DocTransformer {
|
||||||
public String getName() {
|
public String getName() {
|
||||||
return name;
|
return name;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns false -- this transformer does use an IndexSearcher, but it does not (neccessarily) need
|
||||||
|
* the searcher from the ResultContext of the document being returned. Instead we use the current
|
||||||
|
* "live" searcher for the specified core.
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public boolean needsSolrIndexSearcher() { return false; }
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void transform(SolrDocument doc, int docid, float score) {
|
public void transform(SolrDocument doc, int docid, float score) {
|
||||||
|
|
|
@ -223,7 +223,7 @@ public class Grouping {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void addQueryCommand(String groupByStr, SolrQueryRequest request) throws SyntaxError {
|
public void addQueryCommand(String groupByStr, SolrQueryRequest request) throws SyntaxError {
|
||||||
QParser parser = QParser.getParser(groupByStr, null, request);
|
QParser parser = QParser.getParser(groupByStr, request);
|
||||||
Query gq = parser.getQuery();
|
Query gq = parser.getQuery();
|
||||||
Grouping.CommandQuery gc = new CommandQuery();
|
Grouping.CommandQuery gc = new CommandQuery();
|
||||||
gc.query = gq;
|
gc.query = gq;
|
||||||
|
|
|
@ -98,7 +98,7 @@ public class JoinQParserPlugin extends QParserPlugin {
|
||||||
RefCounted<SolrIndexSearcher> fromHolder = null;
|
RefCounted<SolrIndexSearcher> fromHolder = null;
|
||||||
LocalSolrQueryRequest otherReq = new LocalSolrQueryRequest(fromCore, params);
|
LocalSolrQueryRequest otherReq = new LocalSolrQueryRequest(fromCore, params);
|
||||||
try {
|
try {
|
||||||
QParser parser = QParser.getParser(v, "lucene", otherReq);
|
QParser parser = QParser.getParser(v, otherReq);
|
||||||
fromQuery = parser.getQuery();
|
fromQuery = parser.getQuery();
|
||||||
fromHolder = fromCore.getRegisteredSearcher();
|
fromHolder = fromCore.getRegisteredSearcher();
|
||||||
if (fromHolder != null) fromCoreOpenTime = fromHolder.get().getOpenNanoTime();
|
if (fromHolder != null) fromCoreOpenTime = fromHolder.get().getOpenNanoTime();
|
||||||
|
|
|
@ -263,6 +263,17 @@ public abstract class QParser {
|
||||||
debugInfo.add("QParser", this.getClass().getSimpleName());
|
debugInfo.add("QParser", this.getClass().getSimpleName());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Create a <code>QParser</code> to parse <code>qstr</code>,
|
||||||
|
* using the "lucene" (QParserPlugin.DEFAULT_QTYPE) query parser.
|
||||||
|
* The query parser may be overridden by local parameters in the query
|
||||||
|
* string itself. For example if
|
||||||
|
* qstr=<code>{!prefix f=myfield}foo</code>
|
||||||
|
* then the prefix query parser will be used.
|
||||||
|
*/
|
||||||
|
public static QParser getParser(String qstr, SolrQueryRequest req) throws SyntaxError {
|
||||||
|
return getParser(qstr, QParserPlugin.DEFAULT_QTYPE, req);
|
||||||
|
}
|
||||||
|
|
||||||
/** Create a <code>QParser</code> to parse <code>qstr</code>,
|
/** Create a <code>QParser</code> to parse <code>qstr</code>,
|
||||||
* assuming that the default query parser is <code>defaultParser</code>.
|
* assuming that the default query parser is <code>defaultParser</code>.
|
||||||
* The query parser may be overridden by local parameters in the query
|
* The query parser may be overridden by local parameters in the query
|
||||||
|
|
|
@ -83,7 +83,7 @@ public class ReRankQParserPlugin extends QParserPlugin {
|
||||||
if (reRankQueryString == null || reRankQueryString.trim().length() == 0) {
|
if (reRankQueryString == null || reRankQueryString.trim().length() == 0) {
|
||||||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, RERANK_QUERY+" parameter is mandatory");
|
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, RERANK_QUERY+" parameter is mandatory");
|
||||||
}
|
}
|
||||||
QParser reRankParser = QParser.getParser(reRankQueryString, null, req);
|
QParser reRankParser = QParser.getParser(reRankQueryString, req);
|
||||||
Query reRankQuery = reRankParser.parse();
|
Query reRankQuery = reRankParser.parse();
|
||||||
|
|
||||||
int reRankDocs = localParams.getInt(RERANK_DOCS, RERANK_DOCS_DEFAULT);
|
int reRankDocs = localParams.getInt(RERANK_DOCS, RERANK_DOCS_DEFAULT);
|
||||||
|
|
|
@ -839,9 +839,13 @@ class FacetFieldProcessorStream extends FacetFieldProcessor implements Closeable
|
||||||
createAccs(-1, 1);
|
createAccs(-1, 1);
|
||||||
|
|
||||||
// Minimum term docFreq in order to use the filterCache for that term.
|
// Minimum term docFreq in order to use the filterCache for that term.
|
||||||
int defaultMinDf = Math.max(fcontext.searcher.maxDoc() >> 4, 3); // (minimum of 3 is for test coverage purposes)
|
if (freq.cacheDf == -1) { // -1 means never cache
|
||||||
int minDfFilterCache = freq.cacheDf == 0 ? defaultMinDf : freq.cacheDf;
|
minDfFilterCache = Integer.MAX_VALUE;
|
||||||
if (minDfFilterCache == -1) minDfFilterCache = Integer.MAX_VALUE; // -1 means never cache
|
} else if (freq.cacheDf == 0) { // default; compute as fraction of maxDoc
|
||||||
|
minDfFilterCache = Math.max(fcontext.searcher.maxDoc() >> 4, 3); // (minimum of 3 is for test coverage purposes)
|
||||||
|
} else {
|
||||||
|
minDfFilterCache = freq.cacheDf;
|
||||||
|
}
|
||||||
|
|
||||||
docs = fcontext.base;
|
docs = fcontext.base;
|
||||||
fastForRandomSet = null;
|
fastForRandomSet = null;
|
||||||
|
|
|
@ -96,7 +96,7 @@ public class FacetProcessor<FacetRequestT extends FacetRequest> {
|
||||||
String parentStr = freq.domain.parents;
|
String parentStr = freq.domain.parents;
|
||||||
Query parentQuery;
|
Query parentQuery;
|
||||||
try {
|
try {
|
||||||
QParser parser = QParser.getParser(parentStr, null, fcontext.req);
|
QParser parser = QParser.getParser(parentStr, fcontext.req);
|
||||||
parentQuery = parser.getQuery();
|
parentQuery = parser.getQuery();
|
||||||
} catch (SyntaxError err) {
|
} catch (SyntaxError err) {
|
||||||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Error parsing block join parent specification: " + parentStr);
|
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Error parsing block join parent specification: " + parentStr);
|
||||||
|
|
|
@ -478,7 +478,7 @@ class FacetQueryParser extends FacetParser<FacetQuery> {
|
||||||
// TODO: substats that are from defaults!!!
|
// TODO: substats that are from defaults!!!
|
||||||
|
|
||||||
if (qstring != null) {
|
if (qstring != null) {
|
||||||
QParser parser = QParser.getParser(qstring, null, getSolrRequest());
|
QParser parser = QParser.getParser(qstring, getSolrRequest());
|
||||||
facet.q = parser.getQuery();
|
facet.q = parser.getQuery();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -62,7 +62,7 @@ public class QueryCommand implements Command<QueryCommandResult> {
|
||||||
* @return this
|
* @return this
|
||||||
*/
|
*/
|
||||||
public Builder setQuery(String groupQueryString, SolrQueryRequest request) throws SyntaxError {
|
public Builder setQuery(String groupQueryString, SolrQueryRequest request) throws SyntaxError {
|
||||||
QParser parser = QParser.getParser(groupQueryString, null, request);
|
QParser parser = QParser.getParser(groupQueryString, request);
|
||||||
this.queryString = groupQueryString;
|
this.queryString = groupQueryString;
|
||||||
return setQuery(parser.getQuery());
|
return setQuery(parser.getQuery());
|
||||||
}
|
}
|
||||||
|
|
|
@ -236,7 +236,7 @@ public class ScoreJoinQParserPlugin extends QParserPlugin {
|
||||||
LocalSolrQueryRequest otherReq = new LocalSolrQueryRequest(fromCore, params);
|
LocalSolrQueryRequest otherReq = new LocalSolrQueryRequest(fromCore, params);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
QParser fromQueryParser = QParser.getParser(fromQueryStr, "lucene", otherReq);
|
QParser fromQueryParser = QParser.getParser(fromQueryStr, otherReq);
|
||||||
Query fromQuery = fromQueryParser.getQuery();
|
Query fromQuery = fromQueryParser.getQuery();
|
||||||
|
|
||||||
fromHolder = fromCore.getRegisteredSearcher();
|
fromHolder = fromCore.getRegisteredSearcher();
|
||||||
|
|
|
@ -399,7 +399,7 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
|
||||||
Query q;
|
Query q;
|
||||||
try {
|
try {
|
||||||
// move this higher in the stack?
|
// move this higher in the stack?
|
||||||
QParser parser = QParser.getParser(cmd.getQuery(), "lucene", cmd.req);
|
QParser parser = QParser.getParser(cmd.getQuery(), cmd.req);
|
||||||
q = parser.getQuery();
|
q = parser.getQuery();
|
||||||
q = QueryUtils.makeQueryable(q);
|
q = QueryUtils.makeQueryable(q);
|
||||||
|
|
||||||
|
|
|
@ -486,7 +486,7 @@ public class SolrPluginUtils {
|
||||||
|
|
||||||
String qs = commands.size() >= 1 ? commands.get(0) : "";
|
String qs = commands.size() >= 1 ? commands.get(0) : "";
|
||||||
try {
|
try {
|
||||||
Query query = QParser.getParser(qs, null, req).getQuery();
|
Query query = QParser.getParser(qs, req).getQuery();
|
||||||
|
|
||||||
// If the first non-query, non-filter command is a simple sort on an indexed field, then
|
// If the first non-query, non-filter command is a simple sort on an indexed field, then
|
||||||
// we can use the Lucene sort ability.
|
// we can use the Lucene sort ability.
|
||||||
|
@ -978,7 +978,7 @@ public class SolrPluginUtils {
|
||||||
List<Query> out = new ArrayList<>(queries.length);
|
List<Query> out = new ArrayList<>(queries.length);
|
||||||
for (String q : queries) {
|
for (String q : queries) {
|
||||||
if (null != q && 0 != q.trim().length()) {
|
if (null != q && 0 != q.trim().length()) {
|
||||||
out.add(QParser.getParser(q, null, req).getQuery());
|
out.add(QParser.getParser(q, req).getQuery());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return out;
|
return out;
|
||||||
|
|
|
@ -17,6 +17,7 @@
|
||||||
package org.apache.solr.util;
|
package org.apache.solr.util;
|
||||||
|
|
||||||
import java.lang.invoke.MethodHandles;
|
import java.lang.invoke.MethodHandles;
|
||||||
|
import java.lang.reflect.Method;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
|
@ -39,6 +40,11 @@ import org.slf4j.LoggerFactory;
|
||||||
* Allows random faults to be injected in running code during test runs.
|
* Allows random faults to be injected in running code during test runs.
|
||||||
*
|
*
|
||||||
* Set static strings to "true" or "false" or "true:60" for true 60% of the time.
|
* Set static strings to "true" or "false" or "true:60" for true 60% of the time.
|
||||||
|
*
|
||||||
|
* All methods are No-Ops unless <code>LuceneTestCase</code> is loadable via the ClassLoader used
|
||||||
|
* to load this class. <code>LuceneTestCase.random()</code> is used as the source of all entropy.
|
||||||
|
*
|
||||||
|
* @lucene.internal
|
||||||
*/
|
*/
|
||||||
public class TestInjection {
|
public class TestInjection {
|
||||||
|
|
||||||
|
@ -53,16 +59,42 @@ public class TestInjection {
|
||||||
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
|
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
|
||||||
|
|
||||||
private static final Pattern ENABLED_PERCENT = Pattern.compile("(true|false)(?:\\:(\\d+))?$", Pattern.CASE_INSENSITIVE);
|
private static final Pattern ENABLED_PERCENT = Pattern.compile("(true|false)(?:\\:(\\d+))?$", Pattern.CASE_INSENSITIVE);
|
||||||
private static final Random RANDOM;
|
|
||||||
|
private static final String LUCENE_TEST_CASE_FQN = "org.apache.lucene.util.LuceneTestCase";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If null, then we are not being run as part of a test, and all TestInjection events should be No-Ops.
|
||||||
|
* If non-null, then this class should be used for accessing random entropy
|
||||||
|
* @see #random
|
||||||
|
*/
|
||||||
|
private static final Class LUCENE_TEST_CASE;
|
||||||
|
|
||||||
static {
|
static {
|
||||||
// We try to make things reproducible in the context of our tests by initializing the random instance
|
Class nonFinalTemp = null;
|
||||||
// based on the current seed
|
try {
|
||||||
String seed = System.getProperty("tests.seed");
|
ClassLoader classLoader = MethodHandles.lookup().lookupClass().getClassLoader();
|
||||||
if (seed == null) {
|
nonFinalTemp = classLoader.loadClass(LUCENE_TEST_CASE_FQN);
|
||||||
RANDOM = new Random();
|
} catch (ClassNotFoundException e) {
|
||||||
|
log.debug("TestInjection methods will all be No-Ops since LuceneTestCase not found");
|
||||||
|
}
|
||||||
|
LUCENE_TEST_CASE = nonFinalTemp;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a random to be used by the current thread if available, otherwise
|
||||||
|
* returns null.
|
||||||
|
* @see #LUCENE_TEST_CASE
|
||||||
|
*/
|
||||||
|
static Random random() { // non-private for testing
|
||||||
|
if (null == LUCENE_TEST_CASE) {
|
||||||
|
return null;
|
||||||
} else {
|
} else {
|
||||||
RANDOM = new Random(seed.hashCode());
|
try {
|
||||||
|
Method randomMethod = LUCENE_TEST_CASE.getMethod("random");
|
||||||
|
return (Random) randomMethod.invoke(null);
|
||||||
|
} catch (Exception e) {
|
||||||
|
throw new IllegalStateException("Unable to use reflection to invoke LuceneTestCase.random()", e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -100,11 +132,14 @@ public class TestInjection {
|
||||||
|
|
||||||
public static boolean injectRandomDelayInCoreCreation() {
|
public static boolean injectRandomDelayInCoreCreation() {
|
||||||
if (randomDelayInCoreCreation != null) {
|
if (randomDelayInCoreCreation != null) {
|
||||||
|
Random rand = random();
|
||||||
|
if (null == rand) return true;
|
||||||
|
|
||||||
Pair<Boolean,Integer> pair = parseValue(randomDelayInCoreCreation);
|
Pair<Boolean,Integer> pair = parseValue(randomDelayInCoreCreation);
|
||||||
boolean enabled = pair.first();
|
boolean enabled = pair.first();
|
||||||
int chanceIn100 = pair.second();
|
int chanceIn100 = pair.second();
|
||||||
if (enabled && RANDOM.nextInt(100) >= (100 - chanceIn100)) {
|
if (enabled && rand.nextInt(100) >= (100 - chanceIn100)) {
|
||||||
int delay = RANDOM.nextInt(randomDelayMaxInCoreCreationInSec);
|
int delay = rand.nextInt(randomDelayMaxInCoreCreationInSec);
|
||||||
log.info("Inject random core creation delay of {}s", delay);
|
log.info("Inject random core creation delay of {}s", delay);
|
||||||
try {
|
try {
|
||||||
Thread.sleep(delay * 1000);
|
Thread.sleep(delay * 1000);
|
||||||
|
@ -118,11 +153,14 @@ public class TestInjection {
|
||||||
|
|
||||||
public static boolean injectNonGracefullClose(CoreContainer cc) {
|
public static boolean injectNonGracefullClose(CoreContainer cc) {
|
||||||
if (cc.isShutDown() && nonGracefullClose != null) {
|
if (cc.isShutDown() && nonGracefullClose != null) {
|
||||||
|
Random rand = random();
|
||||||
|
if (null == rand) return true;
|
||||||
|
|
||||||
Pair<Boolean,Integer> pair = parseValue(nonGracefullClose);
|
Pair<Boolean,Integer> pair = parseValue(nonGracefullClose);
|
||||||
boolean enabled = pair.first();
|
boolean enabled = pair.first();
|
||||||
int chanceIn100 = pair.second();
|
int chanceIn100 = pair.second();
|
||||||
if (enabled && RANDOM.nextInt(100) >= (100 - chanceIn100)) {
|
if (enabled && rand.nextInt(100) >= (100 - chanceIn100)) {
|
||||||
if (RANDOM.nextBoolean()) {
|
if (rand.nextBoolean()) {
|
||||||
throw new TestShutdownFailError("Test exception for non graceful close");
|
throw new TestShutdownFailError("Test exception for non graceful close");
|
||||||
} else {
|
} else {
|
||||||
|
|
||||||
|
@ -135,7 +173,9 @@ public class TestInjection {
|
||||||
// we should only need to do it once
|
// we should only need to do it once
|
||||||
|
|
||||||
try {
|
try {
|
||||||
Thread.sleep(RANDOM.nextInt(1000));
|
// call random() again to get the correct one for this thread
|
||||||
|
Random taskRand = random();
|
||||||
|
Thread.sleep(taskRand.nextInt(1000));
|
||||||
} catch (InterruptedException e) {
|
} catch (InterruptedException e) {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -147,7 +187,7 @@ public class TestInjection {
|
||||||
};
|
};
|
||||||
Timer timer = new Timer();
|
Timer timer = new Timer();
|
||||||
timers.add(timer);
|
timers.add(timer);
|
||||||
timer.schedule(task, RANDOM.nextInt(500));
|
timer.schedule(task, rand.nextInt(500));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -156,10 +196,13 @@ public class TestInjection {
|
||||||
|
|
||||||
public static boolean injectFailReplicaRequests() {
|
public static boolean injectFailReplicaRequests() {
|
||||||
if (failReplicaRequests != null) {
|
if (failReplicaRequests != null) {
|
||||||
|
Random rand = random();
|
||||||
|
if (null == rand) return true;
|
||||||
|
|
||||||
Pair<Boolean,Integer> pair = parseValue(failReplicaRequests);
|
Pair<Boolean,Integer> pair = parseValue(failReplicaRequests);
|
||||||
boolean enabled = pair.first();
|
boolean enabled = pair.first();
|
||||||
int chanceIn100 = pair.second();
|
int chanceIn100 = pair.second();
|
||||||
if (enabled && RANDOM.nextInt(100) >= (100 - chanceIn100)) {
|
if (enabled && rand.nextInt(100) >= (100 - chanceIn100)) {
|
||||||
throw new SolrException(ErrorCode.SERVER_ERROR, "Random test update fail");
|
throw new SolrException(ErrorCode.SERVER_ERROR, "Random test update fail");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -169,10 +212,13 @@ public class TestInjection {
|
||||||
|
|
||||||
public static boolean injectFailUpdateRequests() {
|
public static boolean injectFailUpdateRequests() {
|
||||||
if (failUpdateRequests != null) {
|
if (failUpdateRequests != null) {
|
||||||
|
Random rand = random();
|
||||||
|
if (null == rand) return true;
|
||||||
|
|
||||||
Pair<Boolean,Integer> pair = parseValue(failUpdateRequests);
|
Pair<Boolean,Integer> pair = parseValue(failUpdateRequests);
|
||||||
boolean enabled = pair.first();
|
boolean enabled = pair.first();
|
||||||
int chanceIn100 = pair.second();
|
int chanceIn100 = pair.second();
|
||||||
if (enabled && RANDOM.nextInt(100) >= (100 - chanceIn100)) {
|
if (enabled && rand.nextInt(100) >= (100 - chanceIn100)) {
|
||||||
throw new SolrException(ErrorCode.SERVER_ERROR, "Random test update fail");
|
throw new SolrException(ErrorCode.SERVER_ERROR, "Random test update fail");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -182,10 +228,13 @@ public class TestInjection {
|
||||||
|
|
||||||
public static boolean injectNonExistentCoreExceptionAfterUnload(String cname) {
|
public static boolean injectNonExistentCoreExceptionAfterUnload(String cname) {
|
||||||
if (nonExistentCoreExceptionAfterUnload != null) {
|
if (nonExistentCoreExceptionAfterUnload != null) {
|
||||||
|
Random rand = random();
|
||||||
|
if (null == rand) return true;
|
||||||
|
|
||||||
Pair<Boolean,Integer> pair = parseValue(nonExistentCoreExceptionAfterUnload);
|
Pair<Boolean,Integer> pair = parseValue(nonExistentCoreExceptionAfterUnload);
|
||||||
boolean enabled = pair.first();
|
boolean enabled = pair.first();
|
||||||
int chanceIn100 = pair.second();
|
int chanceIn100 = pair.second();
|
||||||
if (enabled && RANDOM.nextInt(100) >= (100 - chanceIn100)) {
|
if (enabled && rand.nextInt(100) >= (100 - chanceIn100)) {
|
||||||
throw new NonExistentCoreException("Core not found to unload: " + cname);
|
throw new NonExistentCoreException("Core not found to unload: " + cname);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -195,11 +244,14 @@ public class TestInjection {
|
||||||
|
|
||||||
public static boolean injectUpdateLogReplayRandomPause() {
|
public static boolean injectUpdateLogReplayRandomPause() {
|
||||||
if (updateLogReplayRandomPause != null) {
|
if (updateLogReplayRandomPause != null) {
|
||||||
|
Random rand = random();
|
||||||
|
if (null == rand) return true;
|
||||||
|
|
||||||
Pair<Boolean,Integer> pair = parseValue(updateLogReplayRandomPause);
|
Pair<Boolean,Integer> pair = parseValue(updateLogReplayRandomPause);
|
||||||
boolean enabled = pair.first();
|
boolean enabled = pair.first();
|
||||||
int chanceIn100 = pair.second();
|
int chanceIn100 = pair.second();
|
||||||
if (enabled && RANDOM.nextInt(100) >= (100 - chanceIn100)) {
|
if (enabled && rand.nextInt(100) >= (100 - chanceIn100)) {
|
||||||
long rndTime = RANDOM.nextInt(1000);
|
long rndTime = rand.nextInt(1000);
|
||||||
log.info("inject random log replay delay of {}ms", rndTime);
|
log.info("inject random log replay delay of {}ms", rndTime);
|
||||||
try {
|
try {
|
||||||
Thread.sleep(rndTime);
|
Thread.sleep(rndTime);
|
||||||
|
@ -214,11 +266,14 @@ public class TestInjection {
|
||||||
|
|
||||||
public static boolean injectUpdateRandomPause() {
|
public static boolean injectUpdateRandomPause() {
|
||||||
if (updateRandomPause != null) {
|
if (updateRandomPause != null) {
|
||||||
|
Random rand = random();
|
||||||
|
if (null == rand) return true;
|
||||||
|
|
||||||
Pair<Boolean,Integer> pair = parseValue(updateRandomPause);
|
Pair<Boolean,Integer> pair = parseValue(updateRandomPause);
|
||||||
boolean enabled = pair.first();
|
boolean enabled = pair.first();
|
||||||
int chanceIn100 = pair.second();
|
int chanceIn100 = pair.second();
|
||||||
if (enabled && RANDOM.nextInt(100) >= (100 - chanceIn100)) {
|
if (enabled && rand.nextInt(100) >= (100 - chanceIn100)) {
|
||||||
long rndTime = RANDOM.nextInt(1000);
|
long rndTime = rand.nextInt(1000);
|
||||||
log.info("inject random update delay of {}ms", rndTime);
|
log.info("inject random update delay of {}ms", rndTime);
|
||||||
try {
|
try {
|
||||||
Thread.sleep(rndTime);
|
Thread.sleep(rndTime);
|
||||||
|
|
|
@ -131,7 +131,7 @@ public class TestReversedWildcardFilterFactory extends SolrTestCaseJ4 {
|
||||||
"//result[@numFound=1]");
|
"//result[@numFound=1]");
|
||||||
|
|
||||||
SolrQueryRequest req = req();
|
SolrQueryRequest req = req();
|
||||||
QParser qparser = QParser.getParser("id:1", "lucene", req);
|
QParser qparser = QParser.getParser("id:1", req);
|
||||||
|
|
||||||
SolrQueryParser parserTwo = new SolrQueryParser(qparser, "two");
|
SolrQueryParser parserTwo = new SolrQueryParser(qparser, "two");
|
||||||
assertTrue(parserTwo.getAllowLeadingWildcard());
|
assertTrue(parserTwo.getAllowLeadingWildcard());
|
||||||
|
|
|
@ -120,11 +120,7 @@ public class TestRandomFlRTGCloud extends SolrCloudTestCase {
|
||||||
new GeoTransformerValidator("geo_2_srpt","my_geo_alias"),
|
new GeoTransformerValidator("geo_2_srpt","my_geo_alias"),
|
||||||
new ExplainValidator(),
|
new ExplainValidator(),
|
||||||
new ExplainValidator("explain_alias"),
|
new ExplainValidator("explain_alias"),
|
||||||
//
|
new SubQueryValidator(),
|
||||||
// SOLR-9377: SubQueryValidator fails on uncommited docs because not using RT seacher for sub query
|
|
||||||
//
|
|
||||||
// new SubQueryValidator(),
|
|
||||||
//
|
|
||||||
new NotIncludedValidator("score"),
|
new NotIncludedValidator("score"),
|
||||||
new NotIncludedValidator("score","score_alias:score")));
|
new NotIncludedValidator("score","score_alias:score")));
|
||||||
|
|
||||||
|
@ -197,8 +193,7 @@ public class TestRandomFlRTGCloud extends SolrCloudTestCase {
|
||||||
// items should only be added to this list if it's known that they do not work with RTG
|
// items should only be added to this list if it's known that they do not work with RTG
|
||||||
// and a specific Jira for fixing this is listed as a comment
|
// and a specific Jira for fixing this is listed as a comment
|
||||||
final List<String> knownBugs = Arrays.asList
|
final List<String> knownBugs = Arrays.asList
|
||||||
( SubQueryValidator.NAME, // SOLR-9377
|
( "xml","json", // SOLR-9376
|
||||||
"xml","json", // SOLR-9376
|
|
||||||
"child" // way to complicatd to vet with this test, see SOLR-9379 instead
|
"child" // way to complicatd to vet with this test, see SOLR-9379 instead
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -336,6 +331,9 @@ public class TestRandomFlRTGCloud extends SolrCloudTestCase {
|
||||||
//
|
//
|
||||||
"geo_1_srpt", GeoTransformerValidator.getValueForIndexing(random()),
|
"geo_1_srpt", GeoTransformerValidator.getValueForIndexing(random()),
|
||||||
"geo_2_srpt", GeoTransformerValidator.getValueForIndexing(random()),
|
"geo_2_srpt", GeoTransformerValidator.getValueForIndexing(random()),
|
||||||
|
// for testing subqueries
|
||||||
|
"next_2_ids_ss", String.valueOf(docId + 1),
|
||||||
|
"next_2_ids_ss", String.valueOf(docId + 2),
|
||||||
// for testing prefix globbing
|
// for testing prefix globbing
|
||||||
"axx_i", random().nextInt(),
|
"axx_i", random().nextInt(),
|
||||||
"ayy_i", random().nextInt(),
|
"ayy_i", random().nextInt(),
|
||||||
|
@ -365,12 +363,8 @@ public class TestRandomFlRTGCloud extends SolrCloudTestCase {
|
||||||
final Set<FlValidator> validators = new LinkedHashSet<>();
|
final Set<FlValidator> validators = new LinkedHashSet<>();
|
||||||
validators.add(ID_VALIDATOR); // always include id so we can be confident which doc we're looking at
|
validators.add(ID_VALIDATOR); // always include id so we can be confident which doc we're looking at
|
||||||
addRandomFlValidators(random(), validators);
|
addRandomFlValidators(random(), validators);
|
||||||
FlValidator.addFlParams(validators, params);
|
FlValidator.addParams(validators, params);
|
||||||
|
|
||||||
// HACK: [subquery] expects this to be top level params
|
|
||||||
params.add(SubQueryValidator.SUBQ_KEY + ".q",
|
|
||||||
"{!field f=" + SubQueryValidator.SUBQ_FIELD + " v=$row." + SubQueryValidator.SUBQ_FIELD + "}");
|
|
||||||
|
|
||||||
final List<String> idsToRequest = new ArrayList<>(docIds.length);
|
final List<String> idsToRequest = new ArrayList<>(docIds.length);
|
||||||
final List<SolrInputDocument> docsToExpect = new ArrayList<>(docIds.length);
|
final List<SolrInputDocument> docsToExpect = new ArrayList<>(docIds.length);
|
||||||
for (int docId : docIds) {
|
for (int docId : docIds) {
|
||||||
|
@ -421,7 +415,7 @@ public class TestRandomFlRTGCloud extends SolrCloudTestCase {
|
||||||
// NOTE: RTG makes no garuntees about the order docs will be returned in when multi requested
|
// NOTE: RTG makes no garuntees about the order docs will be returned in when multi requested
|
||||||
for (SolrDocument actual : docs) {
|
for (SolrDocument actual : docs) {
|
||||||
try {
|
try {
|
||||||
int actualId = Integer.parseInt(actual.getFirstValue("id").toString());
|
int actualId = assertParseInt("id", actual.getFirstValue("id"));
|
||||||
final SolrInputDocument expected = knownDocs[actualId];
|
final SolrInputDocument expected = knownDocs[actualId];
|
||||||
assertNotNull("expected null doc but RTG returned: " + actual, expected);
|
assertNotNull("expected null doc but RTG returned: " + actual, expected);
|
||||||
|
|
||||||
|
@ -485,10 +479,14 @@ public class TestRandomFlRTGCloud extends SolrCloudTestCase {
|
||||||
*/
|
*/
|
||||||
private interface FlValidator {
|
private interface FlValidator {
|
||||||
|
|
||||||
/** Given a list of FlValidators, adds one or more fl params that corrispond to the entire set */
|
/**
|
||||||
public static void addFlParams(final Collection<FlValidator> validators, final ModifiableSolrParams params) {
|
* Given a list of FlValidators, adds one or more fl params that corrispond to the entire set,
|
||||||
|
* as well as any other special case top level params required by the validators.
|
||||||
|
*/
|
||||||
|
public static void addParams(final Collection<FlValidator> validators, final ModifiableSolrParams params) {
|
||||||
final List<String> fls = new ArrayList<>(validators.size());
|
final List<String> fls = new ArrayList<>(validators.size());
|
||||||
for (FlValidator v : validators) {
|
for (FlValidator v : validators) {
|
||||||
|
params.add(v.getExtraRequestParams());
|
||||||
fls.add(v.getFlParam());
|
fls.add(v.getFlParam());
|
||||||
}
|
}
|
||||||
params.add(buildCommaSepParams(random(), "fl", fls));
|
params.add(buildCommaSepParams(random(), "fl", fls));
|
||||||
|
@ -519,6 +517,11 @@ public class TestRandomFlRTGCloud extends SolrCloudTestCase {
|
||||||
*/
|
*/
|
||||||
public default String getDefaultTransformerFactoryName() { return null; }
|
public default String getDefaultTransformerFactoryName() { return null; }
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Any special case params that must be added to the request for this validator
|
||||||
|
*/
|
||||||
|
public default SolrParams getExtraRequestParams() { return params(); }
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Must return a non null String that can be used in an fl param -- either by itself,
|
* Must return a non null String that can be used in an fl param -- either by itself,
|
||||||
* or with other items separated by commas
|
* or with other items separated by commas
|
||||||
|
@ -747,34 +750,50 @@ public class TestRandomFlRTGCloud extends SolrCloudTestCase {
|
||||||
* Trivial validator of a SubQueryAugmenter.
|
* Trivial validator of a SubQueryAugmenter.
|
||||||
*
|
*
|
||||||
* This validator ignores 90% of the features/complexity
|
* This validator ignores 90% of the features/complexity
|
||||||
* of SubQueryAugmenter, and instead just focuses on the basics of
|
* of SubQueryAugmenter, and instead just focuses on the basics of:
|
||||||
* "did we match at least one doc based on a field value of the requested doc?"
|
* <ul>
|
||||||
|
* <li>do a subquery for docs where SUBQ_FIELD contains the id of the top level doc</li>
|
||||||
|
* <li>verify that any subquery match is expected based on indexing pattern</li>
|
||||||
|
* </ul>
|
||||||
*/
|
*/
|
||||||
private static class SubQueryValidator implements FlValidator {
|
private static class SubQueryValidator implements FlValidator {
|
||||||
|
|
||||||
|
// HACK to work around SOLR-9396...
|
||||||
|
//
|
||||||
|
// we're using "id" (and only "id") in the subquery.q as a workarround limitation in
|
||||||
|
// "$rows.foo" parsing -- it only works reliably if "foo" is in fl, so we only use "$rows.id",
|
||||||
|
// which we know is in every request (and is a valid integer)
|
||||||
|
|
||||||
public final static String NAME = "subquery";
|
public final static String NAME = "subquery";
|
||||||
public final static String SUBQ_KEY = "subq";
|
public final static String SUBQ_KEY = "subq";
|
||||||
public final static String SUBQ_FIELD = "aaa_i";
|
public final static String SUBQ_FIELD = "next_2_ids_i";
|
||||||
/** always returns true */
|
|
||||||
public boolean requiresRealtimeSearcherReOpen() { return true; }
|
|
||||||
public String getFlParam() { return SUBQ_KEY+":["+NAME+"]"; }
|
public String getFlParam() { return SUBQ_KEY+":["+NAME+"]"; }
|
||||||
public Collection<String> assertRTGResults(final Collection<FlValidator> validators,
|
public Collection<String> assertRTGResults(final Collection<FlValidator> validators,
|
||||||
final SolrInputDocument expected,
|
final SolrInputDocument expected,
|
||||||
final SolrDocument actual) {
|
final SolrDocument actual) {
|
||||||
final Object origVal = expected.getFieldValue(SUBQ_FIELD);
|
final int compVal = assertParseInt("expected id", expected.getFieldValue("id"));
|
||||||
|
|
||||||
final Object actualVal = actual.getFieldValue(SUBQ_KEY);
|
final Object actualVal = actual.getFieldValue(SUBQ_KEY);
|
||||||
assertTrue("Expected a doclist: " + actualVal,
|
assertTrue("Expected a doclist: " + actualVal,
|
||||||
actualVal instanceof SolrDocumentList);
|
actualVal instanceof SolrDocumentList);
|
||||||
SolrDocumentList subList = (SolrDocumentList) actualVal;
|
assertTrue("should be at most 2 docs in doc list: " + actualVal,
|
||||||
assertTrue("sub query should have producted at least one result (this doc)",
|
((SolrDocumentList) actualVal).getNumFound() <= 2);
|
||||||
1 <= subList.getNumFound());
|
|
||||||
for (SolrDocument subDoc : subList) {
|
for (SolrDocument subDoc : (SolrDocumentList) actualVal) {
|
||||||
assertEquals("orig doc value doesn't match subquery doc value",
|
final int subDocIdVal = assertParseInt("subquery id", subDoc.getFirstValue("id"));
|
||||||
origVal, subDoc.getFirstValue(SUBQ_FIELD));
|
assertTrue("subDocId="+subDocIdVal+" not in valid range for id="+compVal+" (expected "
|
||||||
|
+ (compVal-1) + " or " + (compVal-2) + ")",
|
||||||
|
((subDocIdVal < compVal) && ((compVal-2) <= subDocIdVal)));
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return Collections.<String>singleton(SUBQ_KEY);
|
return Collections.<String>singleton(SUBQ_KEY);
|
||||||
}
|
}
|
||||||
public String getDefaultTransformerFactoryName() { return NAME; }
|
public String getDefaultTransformerFactoryName() { return NAME; }
|
||||||
|
public SolrParams getExtraRequestParams() {
|
||||||
|
return params(SubQueryValidator.SUBQ_KEY + ".q",
|
||||||
|
"{!field f=" + SubQueryValidator.SUBQ_FIELD + " v=$row.id}");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Trivial validator of a GeoTransformer */
|
/** Trivial validator of a GeoTransformer */
|
||||||
|
@ -945,4 +964,15 @@ public class TestRandomFlRTGCloud extends SolrCloudTestCase {
|
||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** helper method for asserting an object is a non-null String can be parsed as an int */
|
||||||
|
public static int assertParseInt(String msg, Object orig) {
|
||||||
|
assertNotNull(msg + ": is null", orig);
|
||||||
|
assertTrue(msg + ": is not a string: " + orig, orig instanceof String);
|
||||||
|
try {
|
||||||
|
return Integer.parseInt(orig.toString());
|
||||||
|
} catch (NumberFormatException nfe) {
|
||||||
|
throw new AssertionError(msg + ": can't be parsed as a number: " + orig, nfe);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -133,7 +133,7 @@ public class TestOverriddenPrefixQueryForCustomFieldType extends SolrTestCaseJ4
|
||||||
SolrQueryResponse rsp = new SolrQueryResponse();
|
SolrQueryResponse rsp = new SolrQueryResponse();
|
||||||
SolrRequestInfo.setRequestInfo(new SolrRequestInfo(req, rsp));
|
SolrRequestInfo.setRequestInfo(new SolrRequestInfo(req, rsp));
|
||||||
for (int i = 0; i < inputs.length; i++) {
|
for (int i = 0; i < inputs.length; i++) {
|
||||||
queries[i] = (QParser.getParser(inputs[i], null, req).getQuery());
|
queries[i] = (QParser.getParser(inputs[i], req).getQuery());
|
||||||
}
|
}
|
||||||
} finally {
|
} finally {
|
||||||
SolrRequestInfo.clearRequestInfo();
|
SolrRequestInfo.clearRequestInfo();
|
||||||
|
|
|
@ -198,10 +198,10 @@ public class TestSearchPerf extends AbstractSolrTestCase {
|
||||||
String u=t((int)(indexSize*10*fractionCovered));
|
String u=t((int)(indexSize*10*fractionCovered));
|
||||||
|
|
||||||
SolrQueryRequest req = lrf.makeRequest();
|
SolrQueryRequest req = lrf.makeRequest();
|
||||||
QParser parser = QParser.getParser("foomany_s:[" + l + " TO " + u + "]", null, req);
|
QParser parser = QParser.getParser("foomany_s:[" + l + " TO " + u + "]", req);
|
||||||
Query range = parser.getQuery();
|
Query range = parser.getQuery();
|
||||||
|
|
||||||
QParser parser2 = QParser.getParser("{!frange l="+l+" u="+u+"}foomany_s", null, req);
|
QParser parser2 = QParser.getParser("{!frange l="+l+" u="+u+"}foomany_s", req);
|
||||||
Query frange = parser2.getQuery();
|
Query frange = parser2.getQuery();
|
||||||
req.close();
|
req.close();
|
||||||
|
|
||||||
|
@ -224,13 +224,13 @@ public class TestSearchPerf extends AbstractSolrTestCase {
|
||||||
|
|
||||||
SolrQueryRequest req = lrf.makeRequest();
|
SolrQueryRequest req = lrf.makeRequest();
|
||||||
|
|
||||||
QParser parser = QParser.getParser("foomany_s:[" + l + " TO " + u + "]", null, req);
|
QParser parser = QParser.getParser("foomany_s:[" + l + " TO " + u + "]", req);
|
||||||
Query rangeQ = parser.getQuery();
|
Query rangeQ = parser.getQuery();
|
||||||
List<Query> filters = new ArrayList<>();
|
List<Query> filters = new ArrayList<>();
|
||||||
filters.add(rangeQ);
|
filters.add(rangeQ);
|
||||||
req.close();
|
req.close();
|
||||||
|
|
||||||
parser = QParser.getParser("{!dismax qf=t10_100_ws pf=t10_100_ws ps=20}"+ t(0) + ' ' + t(1) + ' ' + t(2), null, req);
|
parser = QParser.getParser("{!dismax qf=t10_100_ws pf=t10_100_ws ps=20}"+ t(0) + ' ' + t(1) + ' ' + t(2), req);
|
||||||
Query q= parser.getQuery();
|
Query q= parser.getQuery();
|
||||||
|
|
||||||
// SolrIndexSearcher searcher = req.getSearcher();
|
// SolrIndexSearcher searcher = req.getSearcher();
|
||||||
|
|
|
@ -169,13 +169,13 @@ public class TestSolrQueryParser extends SolrTestCaseJ4 {
|
||||||
public void testCSQ() throws Exception {
|
public void testCSQ() throws Exception {
|
||||||
SolrQueryRequest req = req();
|
SolrQueryRequest req = req();
|
||||||
|
|
||||||
QParser qParser = QParser.getParser("text:x^=3", "lucene", req);
|
QParser qParser = QParser.getParser("text:x^=3", req);
|
||||||
Query q = qParser.getQuery();
|
Query q = qParser.getQuery();
|
||||||
assertTrue(q instanceof BoostQuery);
|
assertTrue(q instanceof BoostQuery);
|
||||||
assertTrue(((BoostQuery) q).getQuery() instanceof ConstantScoreQuery);
|
assertTrue(((BoostQuery) q).getQuery() instanceof ConstantScoreQuery);
|
||||||
assertEquals(3.0, ((BoostQuery) q).getBoost(), 0.0f);
|
assertEquals(3.0, ((BoostQuery) q).getBoost(), 0.0f);
|
||||||
|
|
||||||
qParser = QParser.getParser("(text:x text:y)^=-3", "lucene", req);
|
qParser = QParser.getParser("(text:x text:y)^=-3", req);
|
||||||
q = qParser.getQuery();
|
q = qParser.getQuery();
|
||||||
assertTrue(q instanceof BoostQuery);
|
assertTrue(q instanceof BoostQuery);
|
||||||
assertTrue(((BoostQuery) q).getQuery() instanceof ConstantScoreQuery);
|
assertTrue(((BoostQuery) q).getQuery() instanceof ConstantScoreQuery);
|
||||||
|
|
|
@ -79,4 +79,13 @@ public class TestStandardQParsers extends LuceneTestCase {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test that "lucene" is the default query parser.
|
||||||
|
*/
|
||||||
|
@Test
|
||||||
|
public void testDefaultQType() throws Exception {
|
||||||
|
assertEquals(LuceneQParserPlugin.NAME, QParserPlugin.DEFAULT_QTYPE);
|
||||||
|
assertEquals("lucene", LuceneQParserPlugin.NAME);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -168,7 +168,7 @@ public class TestScoreJoinQPNoScore extends SolrTestCaseJ4 {
|
||||||
SolrRequestInfo.setRequestInfo(new SolrRequestInfo(req, rsp));
|
SolrRequestInfo.setRequestInfo(new SolrRequestInfo(req, rsp));
|
||||||
|
|
||||||
{
|
{
|
||||||
final Query query = QParser.getParser(req.getParams().get("q"), null, req).getQuery();
|
final Query query = QParser.getParser(req.getParams().get("q"), req).getQuery();
|
||||||
final Query rewrittenQuery = query.rewrite(req.getSearcher().getIndexReader());
|
final Query rewrittenQuery = query.rewrite(req.getSearcher().getIndexReader());
|
||||||
assertTrue(
|
assertTrue(
|
||||||
rewrittenQuery+" should be Lucene's",
|
rewrittenQuery+" should be Lucene's",
|
||||||
|
@ -178,7 +178,7 @@ public class TestScoreJoinQPNoScore extends SolrTestCaseJ4 {
|
||||||
{
|
{
|
||||||
final Query query = QParser.getParser(
|
final Query query = QParser.getParser(
|
||||||
"{!join from=dept_id_s to=dept_ss}text_t:develop"
|
"{!join from=dept_id_s to=dept_ss}text_t:develop"
|
||||||
, null, req).getQuery();
|
, req).getQuery();
|
||||||
final Query rewrittenQuery = query.rewrite(req.getSearcher().getIndexReader());
|
final Query rewrittenQuery = query.rewrite(req.getSearcher().getIndexReader());
|
||||||
assertEquals(rewrittenQuery+" is expected to be from Solr",
|
assertEquals(rewrittenQuery+" is expected to be from Solr",
|
||||||
JoinQParserPlugin.class.getPackage().getName(),
|
JoinQParserPlugin.class.getPackage().getName(),
|
||||||
|
|
|
@ -187,7 +187,7 @@ public class TestScoreJoinQPScore extends SolrTestCaseJ4 {
|
||||||
|
|
||||||
final SolrQueryRequest req = req("q", "{!join from=movieId_s to=id score=" + score + " b=200}title:movie", "fl", "id,score", "omitHeader", "true");
|
final SolrQueryRequest req = req("q", "{!join from=movieId_s to=id score=" + score + " b=200}title:movie", "fl", "id,score", "omitHeader", "true");
|
||||||
SolrRequestInfo.setRequestInfo(new SolrRequestInfo(req, new SolrQueryResponse()));
|
SolrRequestInfo.setRequestInfo(new SolrRequestInfo(req, new SolrQueryResponse()));
|
||||||
final Query luceneQ = QParser.getParser(req.getParams().get("q"), null, req).getQuery().rewrite(req.getSearcher().getLeafReader());
|
final Query luceneQ = QParser.getParser(req.getParams().get("q"), req).getQuery().rewrite(req.getSearcher().getLeafReader());
|
||||||
assertTrue(luceneQ instanceof BoostQuery);
|
assertTrue(luceneQ instanceof BoostQuery);
|
||||||
float boost = ((BoostQuery) luceneQ).getBoost();
|
float boost = ((BoostQuery) luceneQ).getBoost();
|
||||||
assertEquals("" + luceneQ, Float.floatToIntBits(200), Float.floatToIntBits(boost));
|
assertEquals("" + luceneQ, Float.floatToIntBits(200), Float.floatToIntBits(boost));
|
||||||
|
|
|
@ -98,4 +98,8 @@ public class TestTestInjection extends LuceneTestCase {
|
||||||
assertFalse(e.getMessage().toLowerCase(Locale.ENGLISH).contains("bad syntax"));
|
assertFalse(e.getMessage().toLowerCase(Locale.ENGLISH).contains("bad syntax"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testUsingConsistentRandomization() {
|
||||||
|
assertSame(random(), TestInjection.random());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue