This commit is contained in:
Karl Wright 2016-08-10 02:29:38 -04:00
commit 22eeba9920
37 changed files with 277 additions and 95 deletions

View File

@ -771,7 +771,7 @@ public class MockDirectoryWrapper extends BaseDirectoryWrapper {
}
ii = new SlowOpeningMockIndexInputWrapper(this, name, delegateInput);
} else {
ii = new MockIndexInputWrapper(this, name, delegateInput);
ii = new MockIndexInputWrapper(this, name, delegateInput, null);
}
addFileHandle(ii, name, Handle.Input);
return ii;

View File

@ -30,12 +30,19 @@ public class MockIndexInputWrapper extends IndexInput {
private MockDirectoryWrapper dir;
final String name;
private IndexInput delegate;
private boolean isClone;
private boolean closed;
private volatile boolean closed;
/** Construct an empty output buffer. */
public MockIndexInputWrapper(MockDirectoryWrapper dir, String name, IndexInput delegate) {
// Which MockIndexInputWrapper we were cloned from, or null if we are not a clone:
private final MockIndexInputWrapper parent;
/** Sole constructor */
public MockIndexInputWrapper(MockDirectoryWrapper dir, String name, IndexInput delegate, MockIndexInputWrapper parent) {
super("MockIndexInputWrapper(name=" + name + " delegate=" + delegate + ")");
// If we are a clone then our parent better not be a clone!
assert parent == null || parent.parent == null;
this.parent = parent;
this.name = name;
this.dir = dir;
this.delegate = delegate;
@ -54,7 +61,7 @@ public class MockIndexInputWrapper extends IndexInput {
// remove the conditional check so we also track that
// all clones get closed:
assert delegate != null;
if (!isClone) {
if (parent == null) {
dir.removeIndexInput(this, name);
}
dir.maybeThrowDeterministicException();
@ -62,9 +69,13 @@ public class MockIndexInputWrapper extends IndexInput {
}
private void ensureOpen() {
// TODO: not great this is a volatile read (closed) ... we should deploy heavy JVM voodoo like SwitchPoint to avoid this
if (closed) {
throw new RuntimeException("Abusing closed IndexInput!");
}
if (parent != null && parent.closed) {
throw new RuntimeException("Abusing clone of a closed IndexInput!");
}
}
@Override
@ -75,8 +86,7 @@ public class MockIndexInputWrapper extends IndexInput {
}
dir.inputCloneCount.incrementAndGet();
IndexInput iiclone = delegate.clone();
MockIndexInputWrapper clone = new MockIndexInputWrapper(dir, name, iiclone);
clone.isClone = true;
MockIndexInputWrapper clone = new MockIndexInputWrapper(dir, name, iiclone, parent != null ? parent : this);
// Pending resolution on LUCENE-686 we may want to
// uncomment this code so that we also track that all
// clones get closed:
@ -102,8 +112,7 @@ public class MockIndexInputWrapper extends IndexInput {
}
dir.inputCloneCount.incrementAndGet();
IndexInput slice = delegate.slice(sliceDescription, offset, length);
MockIndexInputWrapper clone = new MockIndexInputWrapper(dir, sliceDescription, slice);
clone.isClone = true;
MockIndexInputWrapper clone = new MockIndexInputWrapper(dir, sliceDescription, slice, parent != null ? parent : this);
return clone;
}

View File

@ -30,7 +30,7 @@ class SlowClosingMockIndexInputWrapper extends MockIndexInputWrapper {
public SlowClosingMockIndexInputWrapper(MockDirectoryWrapper dir,
String name, IndexInput delegate) {
super(dir, name, delegate);
super(dir, name, delegate, null);
}
@Override

View File

@ -28,7 +28,7 @@ class SlowOpeningMockIndexInputWrapper extends MockIndexInputWrapper {
public SlowOpeningMockIndexInputWrapper(MockDirectoryWrapper dir,
String name, IndexInput delegate) throws IOException {
super(dir, name, delegate);
super(dir, name, delegate, null);
try {
Thread.sleep(50);
} catch (InterruptedException ie) {

View File

@ -171,4 +171,40 @@ public class TestMockDirectoryWrapper extends BaseDirectoryTestCase {
assertTrue("MockDirectoryWrapper on dir=" + dir + " failed to corrupt an unsync'd file", changed);
}
public void testAbuseClosedIndexInput() throws Exception {
MockDirectoryWrapper dir = newMockDirectory();
IndexOutput out = dir.createOutput("foo", IOContext.DEFAULT);
out.writeByte((byte) 42);
out.close();
final IndexInput in = dir.openInput("foo", IOContext.DEFAULT);
in.close();
expectThrows(RuntimeException.class, in::readByte);
dir.close();
}
public void testAbuseCloneAfterParentClosed() throws Exception {
MockDirectoryWrapper dir = newMockDirectory();
IndexOutput out = dir.createOutput("foo", IOContext.DEFAULT);
out.writeByte((byte) 42);
out.close();
IndexInput in = dir.openInput("foo", IOContext.DEFAULT);
final IndexInput clone = in.clone();
in.close();
expectThrows(RuntimeException.class, clone::readByte);
dir.close();
}
public void testAbuseCloneOfCloneAfterParentClosed() throws Exception {
MockDirectoryWrapper dir = newMockDirectory();
IndexOutput out = dir.createOutput("foo", IOContext.DEFAULT);
out.writeByte((byte) 42);
out.close();
IndexInput in = dir.openInput("foo", IOContext.DEFAULT);
IndexInput clone1 = in.clone();
IndexInput clone2 = clone1.clone();
in.close();
expectThrows(RuntimeException.class, clone2::readByte);
dir.close();
}
}

View File

@ -205,6 +205,10 @@ Optimizations
* SOLR-9335: Solr cache/search/update stats counters now use LongAdder which are supposed to have higher throughput
under high contention. (Varun Thacker)
* SOLR-9350: JSON Facets: method="stream" will no longer always uses & populates the filter cache, likely
flushing it. 'cacheDf' can be configured to set a doc frequency threshold, now defaulting to 1/16th doc count.
Using -1 Disables use of the cache. (David Smiley, yonik)
Other Changes
----------------------
@ -241,6 +245,10 @@ Other Changes
* SOLR-9392: Fixed CDCR Test failures which were due to leaked resources. (shalin)
* SOLR-9385: Add QParser.getParser(String,SolrQueryRequest) variant. (Christine Poerschke)
* SOLR-9367: Improved TestInjection's randomization logic to use LuceneTestCase.random() (hossman)
================== 6.1.0 ==================
Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.

View File

@ -604,7 +604,7 @@ public class FacetingAccumulator extends BasicAccumulator implements FacetValueA
QueryFacetAccumulator qAcc = new QueryFacetAccumulator(this,qfr.getName(),query);
final Query q;
try {
q = QParser.getParser(query, null, queryRequest).getQuery();
q = QParser.getParser(query, queryRequest).getQuery();
} catch( SyntaxError e ){
throw new SolrException(ErrorCode.BAD_REQUEST,"Invalid query '"+query+"'",e);
}

View File

@ -160,7 +160,7 @@ public class BlobHandler extends RequestHandlerBase implements PluginInfoInitial
} else {
String q = "blobName:{0}";
if (version != -1) q = "id:{0}/{1}";
QParser qparser = QParser.getParser(StrUtils.formatString(q, blobName, version), "lucene", req);
QParser qparser = QParser.getParser(StrUtils.formatString(q, blobName, version), req);
final TopDocs docs = req.getSearcher().search(qparser.parse(), 1, new Sort(new SortField("version", SortField.Type.LONG, true)));
if (docs.totalHits > 0) {
rsp.add(ReplicationHandler.FILE_STREAM, new SolrCore.RawWriter() {

View File

@ -125,7 +125,7 @@ public class MoreLikeThisHandler extends RequestHandlerBase
filters = new ArrayList<>();
for (String fq : fqs) {
if (fq != null && fq.trim().length() != 0) {
QParser fqp = QParser.getParser(fq, null, req);
QParser fqp = QParser.getParser(fq, req);
filters.add(fqp.getQuery());
}
}

View File

@ -176,7 +176,7 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia
query = rb.getQuery();
} else {
try {
QParser parser = QParser.getParser(qs, null, req);
QParser parser = QParser.getParser(qs, req);
query = parser.getQuery();
} catch (Exception e) {
throw new IOException(e);
@ -198,7 +198,7 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia
try {
for (String fq : fqs) {
if (fq != null && fq.trim().length() != 0 && !fq.equals("*:*")) {
QParser fqp = QParser.getParser(fq, null, req);
QParser fqp = QParser.getParser(fq, req);
newFilters.add(fqp.getQuery());
}
}

View File

@ -202,7 +202,7 @@ public class QueryComponent extends SearchComponent
filters = filters == null ? new ArrayList<Query>(fqs.length) : new ArrayList<>(filters);
for (String fq : fqs) {
if (fq != null && fq.trim().length()!=0) {
QParser fqp = QParser.getParser(fq, null, req);
QParser fqp = QParser.getParser(fq, req);
filters.add(fqp.getQuery());
}
}

View File

@ -158,7 +158,7 @@ public class RealTimeGetComponent extends SearchComponent
filters = filters == null ? new ArrayList<Query>(fqs.length) : new ArrayList<>(filters);
for (String fq : fqs) {
if (fq != null && fq.trim().length()!=0) {
QParser fqp = QParser.getParser(fq, null, req);
QParser fqp = QParser.getParser(fq, req);
filters.add(fqp.getQuery());
}
}

View File

@ -60,7 +60,6 @@ import org.apache.solr.schema.FieldType;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.search.DocSet;
import org.apache.solr.search.QParser;
import org.apache.solr.search.QParserPlugin;
import org.apache.solr.search.SyntaxError;
import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.spelling.AbstractLuceneSpellChecker;
@ -242,7 +241,7 @@ public class SpellCheckComponent extends SearchComponent implements SolrCoreAwar
try {
if (maxResultsFilterQueryString != null) {
// Get the default Lucene query parser
QParser parser = QParser.getParser(maxResultsFilterQueryString, QParserPlugin.DEFAULT_QTYPE, rb.req);
QParser parser = QParser.getParser(maxResultsFilterQueryString, rb.req);
DocSet s = searcher.getDocSet(parser.getQuery());
maxResultsByFilters = s.size();
} else {

View File

@ -321,7 +321,7 @@ public class SimpleFacets {
public void getFacetQueryCount(ParsedParams parsed, NamedList<Integer> res) throws SyntaxError, IOException {
// TODO: slight optimization would prevent double-parsing of any localParams
// TODO: SOLR-7753
Query qobj = QParser.getParser(parsed.facetValue, null, req).getQuery();
Query qobj = QParser.getParser(parsed.facetValue, req).getQuery();
if (qobj == null) {
res.add(parsed.key, 0);

View File

@ -77,7 +77,7 @@ public class ChildDocTransformerFactory extends TransformerFactory {
BitSetProducer parentsFilter = null;
try {
Query parentFilterQuery = QParser.getParser( parentFilter, null, req).getQuery();
Query parentFilterQuery = QParser.getParser( parentFilter, req).getQuery();
parentsFilter = new QueryBitSetProducer(new QueryWrapperFilter(parentFilterQuery));
} catch (SyntaxError syntaxError) {
throw new SolrException( ErrorCode.BAD_REQUEST, "Failed to create correct parent filter query" );
@ -86,7 +86,7 @@ public class ChildDocTransformerFactory extends TransformerFactory {
Query childFilterQuery = null;
if(childFilter != null) {
try {
childFilterQuery = QParser.getParser( childFilter, null, req).getQuery();
childFilterQuery = QParser.getParser( childFilter, req).getQuery();
} catch (SyntaxError syntaxError) {
throw new SolrException( ErrorCode.BAD_REQUEST, "Failed to create correct child filter query" );
}

View File

@ -76,6 +76,15 @@ import org.apache.solr.search.TermsQParserPlugin;
* its' native parameters like <code>collection, shards</code> for subquery, eg<br>
* <code>q=*:*&amp;fl=*,foo:[subquery]&amp;foo.q=cloud&amp;foo.collection=departments</code>
*
* <h3>When used in Real Time Get</h3>
* <p>
* When used in the context of a Real Time Get, the <i>values</i> from each document that are used
* in the qubquery are the "real time" values (possibly from the transaction log), but the query
* itself is still executed against the currently open searcher. Note that this means if a
* document is updated but not yet committed, an RTG request for that document that uses
* <code>[subquery]</code> could include the older (committed) version of that document,
* with differnet field values, in the subquery results.
* </p>
*/
public class SubQueryAugmenterFactory extends TransformerFactory{
@ -304,6 +313,14 @@ class SubQueryAugmenter extends DocTransformer {
return name;
}
/**
* Returns false -- this transformer does use an IndexSearcher, but it does not (neccessarily) need
* the searcher from the ResultContext of the document being returned. Instead we use the current
* "live" searcher for the specified core.
*/
@Override
public boolean needsSolrIndexSearcher() { return false; }
@Override
public void transform(SolrDocument doc, int docid, float score) {

View File

@ -223,7 +223,7 @@ public class Grouping {
}
public void addQueryCommand(String groupByStr, SolrQueryRequest request) throws SyntaxError {
QParser parser = QParser.getParser(groupByStr, null, request);
QParser parser = QParser.getParser(groupByStr, request);
Query gq = parser.getQuery();
Grouping.CommandQuery gc = new CommandQuery();
gc.query = gq;

View File

@ -98,7 +98,7 @@ public class JoinQParserPlugin extends QParserPlugin {
RefCounted<SolrIndexSearcher> fromHolder = null;
LocalSolrQueryRequest otherReq = new LocalSolrQueryRequest(fromCore, params);
try {
QParser parser = QParser.getParser(v, "lucene", otherReq);
QParser parser = QParser.getParser(v, otherReq);
fromQuery = parser.getQuery();
fromHolder = fromCore.getRegisteredSearcher();
if (fromHolder != null) fromCoreOpenTime = fromHolder.get().getOpenNanoTime();

View File

@ -263,6 +263,17 @@ public abstract class QParser {
debugInfo.add("QParser", this.getClass().getSimpleName());
}
/** Create a <code>QParser</code> to parse <code>qstr</code>,
* using the "lucene" (QParserPlugin.DEFAULT_QTYPE) query parser.
* The query parser may be overridden by local parameters in the query
* string itself. For example if
* qstr=<code>{!prefix f=myfield}foo</code>
* then the prefix query parser will be used.
*/
public static QParser getParser(String qstr, SolrQueryRequest req) throws SyntaxError {
return getParser(qstr, QParserPlugin.DEFAULT_QTYPE, req);
}
/** Create a <code>QParser</code> to parse <code>qstr</code>,
* assuming that the default query parser is <code>defaultParser</code>.
* The query parser may be overridden by local parameters in the query

View File

@ -83,7 +83,7 @@ public class ReRankQParserPlugin extends QParserPlugin {
if (reRankQueryString == null || reRankQueryString.trim().length() == 0) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, RERANK_QUERY+" parameter is mandatory");
}
QParser reRankParser = QParser.getParser(reRankQueryString, null, req);
QParser reRankParser = QParser.getParser(reRankQueryString, req);
Query reRankQuery = reRankParser.parse();
int reRankDocs = localParams.getInt(RERANK_DOCS, RERANK_DOCS_DEFAULT);

View File

@ -839,9 +839,13 @@ class FacetFieldProcessorStream extends FacetFieldProcessor implements Closeable
createAccs(-1, 1);
// Minimum term docFreq in order to use the filterCache for that term.
int defaultMinDf = Math.max(fcontext.searcher.maxDoc() >> 4, 3); // (minimum of 3 is for test coverage purposes)
int minDfFilterCache = freq.cacheDf == 0 ? defaultMinDf : freq.cacheDf;
if (minDfFilterCache == -1) minDfFilterCache = Integer.MAX_VALUE; // -1 means never cache
if (freq.cacheDf == -1) { // -1 means never cache
minDfFilterCache = Integer.MAX_VALUE;
} else if (freq.cacheDf == 0) { // default; compute as fraction of maxDoc
minDfFilterCache = Math.max(fcontext.searcher.maxDoc() >> 4, 3); // (minimum of 3 is for test coverage purposes)
} else {
minDfFilterCache = freq.cacheDf;
}
docs = fcontext.base;
fastForRandomSet = null;

View File

@ -96,7 +96,7 @@ public class FacetProcessor<FacetRequestT extends FacetRequest> {
String parentStr = freq.domain.parents;
Query parentQuery;
try {
QParser parser = QParser.getParser(parentStr, null, fcontext.req);
QParser parser = QParser.getParser(parentStr, fcontext.req);
parentQuery = parser.getQuery();
} catch (SyntaxError err) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Error parsing block join parent specification: " + parentStr);

View File

@ -478,7 +478,7 @@ class FacetQueryParser extends FacetParser<FacetQuery> {
// TODO: substats that are from defaults!!!
if (qstring != null) {
QParser parser = QParser.getParser(qstring, null, getSolrRequest());
QParser parser = QParser.getParser(qstring, getSolrRequest());
facet.q = parser.getQuery();
}

View File

@ -62,7 +62,7 @@ public class QueryCommand implements Command<QueryCommandResult> {
* @return this
*/
public Builder setQuery(String groupQueryString, SolrQueryRequest request) throws SyntaxError {
QParser parser = QParser.getParser(groupQueryString, null, request);
QParser parser = QParser.getParser(groupQueryString, request);
this.queryString = groupQueryString;
return setQuery(parser.getQuery());
}

View File

@ -236,7 +236,7 @@ public class ScoreJoinQParserPlugin extends QParserPlugin {
LocalSolrQueryRequest otherReq = new LocalSolrQueryRequest(fromCore, params);
try {
QParser fromQueryParser = QParser.getParser(fromQueryStr, "lucene", otherReq);
QParser fromQueryParser = QParser.getParser(fromQueryStr, otherReq);
Query fromQuery = fromQueryParser.getQuery();
fromHolder = fromCore.getRegisteredSearcher();

View File

@ -399,7 +399,7 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
Query q;
try {
// move this higher in the stack?
QParser parser = QParser.getParser(cmd.getQuery(), "lucene", cmd.req);
QParser parser = QParser.getParser(cmd.getQuery(), cmd.req);
q = parser.getQuery();
q = QueryUtils.makeQueryable(q);

View File

@ -486,7 +486,7 @@ public class SolrPluginUtils {
String qs = commands.size() >= 1 ? commands.get(0) : "";
try {
Query query = QParser.getParser(qs, null, req).getQuery();
Query query = QParser.getParser(qs, req).getQuery();
// If the first non-query, non-filter command is a simple sort on an indexed field, then
// we can use the Lucene sort ability.
@ -978,7 +978,7 @@ public class SolrPluginUtils {
List<Query> out = new ArrayList<>(queries.length);
for (String q : queries) {
if (null != q && 0 != q.trim().length()) {
out.add(QParser.getParser(q, null, req).getQuery());
out.add(QParser.getParser(q, req).getQuery());
}
}
return out;

View File

@ -17,6 +17,7 @@
package org.apache.solr.util;
import java.lang.invoke.MethodHandles;
import java.lang.reflect.Method;
import java.util.Collections;
import java.util.HashSet;
import java.util.Random;
@ -39,6 +40,11 @@ import org.slf4j.LoggerFactory;
* Allows random faults to be injected in running code during test runs.
*
* Set static strings to "true" or "false" or "true:60" for true 60% of the time.
*
* All methods are No-Ops unless <code>LuceneTestCase</code> is loadable via the ClassLoader used
* to load this class. <code>LuceneTestCase.random()</code> is used as the source of all entropy.
*
* @lucene.internal
*/
public class TestInjection {
@ -53,16 +59,42 @@ public class TestInjection {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private static final Pattern ENABLED_PERCENT = Pattern.compile("(true|false)(?:\\:(\\d+))?$", Pattern.CASE_INSENSITIVE);
private static final Random RANDOM;
private static final String LUCENE_TEST_CASE_FQN = "org.apache.lucene.util.LuceneTestCase";
/**
* If null, then we are not being run as part of a test, and all TestInjection events should be No-Ops.
* If non-null, then this class should be used for accessing random entropy
* @see #random
*/
private static final Class LUCENE_TEST_CASE;
static {
// We try to make things reproducible in the context of our tests by initializing the random instance
// based on the current seed
String seed = System.getProperty("tests.seed");
if (seed == null) {
RANDOM = new Random();
Class nonFinalTemp = null;
try {
ClassLoader classLoader = MethodHandles.lookup().lookupClass().getClassLoader();
nonFinalTemp = classLoader.loadClass(LUCENE_TEST_CASE_FQN);
} catch (ClassNotFoundException e) {
log.debug("TestInjection methods will all be No-Ops since LuceneTestCase not found");
}
LUCENE_TEST_CASE = nonFinalTemp;
}
/**
* Returns a random to be used by the current thread if available, otherwise
* returns null.
* @see #LUCENE_TEST_CASE
*/
static Random random() { // non-private for testing
if (null == LUCENE_TEST_CASE) {
return null;
} else {
RANDOM = new Random(seed.hashCode());
try {
Method randomMethod = LUCENE_TEST_CASE.getMethod("random");
return (Random) randomMethod.invoke(null);
} catch (Exception e) {
throw new IllegalStateException("Unable to use reflection to invoke LuceneTestCase.random()", e);
}
}
}
@ -100,11 +132,14 @@ public class TestInjection {
public static boolean injectRandomDelayInCoreCreation() {
if (randomDelayInCoreCreation != null) {
Random rand = random();
if (null == rand) return true;
Pair<Boolean,Integer> pair = parseValue(randomDelayInCoreCreation);
boolean enabled = pair.first();
int chanceIn100 = pair.second();
if (enabled && RANDOM.nextInt(100) >= (100 - chanceIn100)) {
int delay = RANDOM.nextInt(randomDelayMaxInCoreCreationInSec);
if (enabled && rand.nextInt(100) >= (100 - chanceIn100)) {
int delay = rand.nextInt(randomDelayMaxInCoreCreationInSec);
log.info("Inject random core creation delay of {}s", delay);
try {
Thread.sleep(delay * 1000);
@ -118,11 +153,14 @@ public class TestInjection {
public static boolean injectNonGracefullClose(CoreContainer cc) {
if (cc.isShutDown() && nonGracefullClose != null) {
Random rand = random();
if (null == rand) return true;
Pair<Boolean,Integer> pair = parseValue(nonGracefullClose);
boolean enabled = pair.first();
int chanceIn100 = pair.second();
if (enabled && RANDOM.nextInt(100) >= (100 - chanceIn100)) {
if (RANDOM.nextBoolean()) {
if (enabled && rand.nextInt(100) >= (100 - chanceIn100)) {
if (rand.nextBoolean()) {
throw new TestShutdownFailError("Test exception for non graceful close");
} else {
@ -135,7 +173,9 @@ public class TestInjection {
// we should only need to do it once
try {
Thread.sleep(RANDOM.nextInt(1000));
// call random() again to get the correct one for this thread
Random taskRand = random();
Thread.sleep(taskRand.nextInt(1000));
} catch (InterruptedException e) {
}
@ -147,7 +187,7 @@ public class TestInjection {
};
Timer timer = new Timer();
timers.add(timer);
timer.schedule(task, RANDOM.nextInt(500));
timer.schedule(task, rand.nextInt(500));
}
}
}
@ -156,10 +196,13 @@ public class TestInjection {
public static boolean injectFailReplicaRequests() {
if (failReplicaRequests != null) {
Random rand = random();
if (null == rand) return true;
Pair<Boolean,Integer> pair = parseValue(failReplicaRequests);
boolean enabled = pair.first();
int chanceIn100 = pair.second();
if (enabled && RANDOM.nextInt(100) >= (100 - chanceIn100)) {
if (enabled && rand.nextInt(100) >= (100 - chanceIn100)) {
throw new SolrException(ErrorCode.SERVER_ERROR, "Random test update fail");
}
}
@ -169,10 +212,13 @@ public class TestInjection {
public static boolean injectFailUpdateRequests() {
if (failUpdateRequests != null) {
Random rand = random();
if (null == rand) return true;
Pair<Boolean,Integer> pair = parseValue(failUpdateRequests);
boolean enabled = pair.first();
int chanceIn100 = pair.second();
if (enabled && RANDOM.nextInt(100) >= (100 - chanceIn100)) {
if (enabled && rand.nextInt(100) >= (100 - chanceIn100)) {
throw new SolrException(ErrorCode.SERVER_ERROR, "Random test update fail");
}
}
@ -182,10 +228,13 @@ public class TestInjection {
public static boolean injectNonExistentCoreExceptionAfterUnload(String cname) {
if (nonExistentCoreExceptionAfterUnload != null) {
Random rand = random();
if (null == rand) return true;
Pair<Boolean,Integer> pair = parseValue(nonExistentCoreExceptionAfterUnload);
boolean enabled = pair.first();
int chanceIn100 = pair.second();
if (enabled && RANDOM.nextInt(100) >= (100 - chanceIn100)) {
if (enabled && rand.nextInt(100) >= (100 - chanceIn100)) {
throw new NonExistentCoreException("Core not found to unload: " + cname);
}
}
@ -195,11 +244,14 @@ public class TestInjection {
public static boolean injectUpdateLogReplayRandomPause() {
if (updateLogReplayRandomPause != null) {
Random rand = random();
if (null == rand) return true;
Pair<Boolean,Integer> pair = parseValue(updateLogReplayRandomPause);
boolean enabled = pair.first();
int chanceIn100 = pair.second();
if (enabled && RANDOM.nextInt(100) >= (100 - chanceIn100)) {
long rndTime = RANDOM.nextInt(1000);
if (enabled && rand.nextInt(100) >= (100 - chanceIn100)) {
long rndTime = rand.nextInt(1000);
log.info("inject random log replay delay of {}ms", rndTime);
try {
Thread.sleep(rndTime);
@ -214,11 +266,14 @@ public class TestInjection {
public static boolean injectUpdateRandomPause() {
if (updateRandomPause != null) {
Random rand = random();
if (null == rand) return true;
Pair<Boolean,Integer> pair = parseValue(updateRandomPause);
boolean enabled = pair.first();
int chanceIn100 = pair.second();
if (enabled && RANDOM.nextInt(100) >= (100 - chanceIn100)) {
long rndTime = RANDOM.nextInt(1000);
if (enabled && rand.nextInt(100) >= (100 - chanceIn100)) {
long rndTime = rand.nextInt(1000);
log.info("inject random update delay of {}ms", rndTime);
try {
Thread.sleep(rndTime);

View File

@ -131,7 +131,7 @@ public class TestReversedWildcardFilterFactory extends SolrTestCaseJ4 {
"//result[@numFound=1]");
SolrQueryRequest req = req();
QParser qparser = QParser.getParser("id:1", "lucene", req);
QParser qparser = QParser.getParser("id:1", req);
SolrQueryParser parserTwo = new SolrQueryParser(qparser, "two");
assertTrue(parserTwo.getAllowLeadingWildcard());

View File

@ -120,11 +120,7 @@ public class TestRandomFlRTGCloud extends SolrCloudTestCase {
new GeoTransformerValidator("geo_2_srpt","my_geo_alias"),
new ExplainValidator(),
new ExplainValidator("explain_alias"),
//
// SOLR-9377: SubQueryValidator fails on uncommited docs because not using RT seacher for sub query
//
// new SubQueryValidator(),
//
new SubQueryValidator(),
new NotIncludedValidator("score"),
new NotIncludedValidator("score","score_alias:score")));
@ -197,8 +193,7 @@ public class TestRandomFlRTGCloud extends SolrCloudTestCase {
// items should only be added to this list if it's known that they do not work with RTG
// and a specific Jira for fixing this is listed as a comment
final List<String> knownBugs = Arrays.asList
( SubQueryValidator.NAME, // SOLR-9377
"xml","json", // SOLR-9376
( "xml","json", // SOLR-9376
"child" // way to complicatd to vet with this test, see SOLR-9379 instead
);
@ -336,6 +331,9 @@ public class TestRandomFlRTGCloud extends SolrCloudTestCase {
//
"geo_1_srpt", GeoTransformerValidator.getValueForIndexing(random()),
"geo_2_srpt", GeoTransformerValidator.getValueForIndexing(random()),
// for testing subqueries
"next_2_ids_ss", String.valueOf(docId + 1),
"next_2_ids_ss", String.valueOf(docId + 2),
// for testing prefix globbing
"axx_i", random().nextInt(),
"ayy_i", random().nextInt(),
@ -365,11 +363,7 @@ public class TestRandomFlRTGCloud extends SolrCloudTestCase {
final Set<FlValidator> validators = new LinkedHashSet<>();
validators.add(ID_VALIDATOR); // always include id so we can be confident which doc we're looking at
addRandomFlValidators(random(), validators);
FlValidator.addFlParams(validators, params);
// HACK: [subquery] expects this to be top level params
params.add(SubQueryValidator.SUBQ_KEY + ".q",
"{!field f=" + SubQueryValidator.SUBQ_FIELD + " v=$row." + SubQueryValidator.SUBQ_FIELD + "}");
FlValidator.addParams(validators, params);
final List<String> idsToRequest = new ArrayList<>(docIds.length);
final List<SolrInputDocument> docsToExpect = new ArrayList<>(docIds.length);
@ -421,7 +415,7 @@ public class TestRandomFlRTGCloud extends SolrCloudTestCase {
// NOTE: RTG makes no garuntees about the order docs will be returned in when multi requested
for (SolrDocument actual : docs) {
try {
int actualId = Integer.parseInt(actual.getFirstValue("id").toString());
int actualId = assertParseInt("id", actual.getFirstValue("id"));
final SolrInputDocument expected = knownDocs[actualId];
assertNotNull("expected null doc but RTG returned: " + actual, expected);
@ -485,10 +479,14 @@ public class TestRandomFlRTGCloud extends SolrCloudTestCase {
*/
private interface FlValidator {
/** Given a list of FlValidators, adds one or more fl params that corrispond to the entire set */
public static void addFlParams(final Collection<FlValidator> validators, final ModifiableSolrParams params) {
/**
* Given a list of FlValidators, adds one or more fl params that corrispond to the entire set,
* as well as any other special case top level params required by the validators.
*/
public static void addParams(final Collection<FlValidator> validators, final ModifiableSolrParams params) {
final List<String> fls = new ArrayList<>(validators.size());
for (FlValidator v : validators) {
params.add(v.getExtraRequestParams());
fls.add(v.getFlParam());
}
params.add(buildCommaSepParams(random(), "fl", fls));
@ -519,6 +517,11 @@ public class TestRandomFlRTGCloud extends SolrCloudTestCase {
*/
public default String getDefaultTransformerFactoryName() { return null; }
/**
* Any special case params that must be added to the request for this validator
*/
public default SolrParams getExtraRequestParams() { return params(); }
/**
* Must return a non null String that can be used in an fl param -- either by itself,
* or with other items separated by commas
@ -747,34 +750,50 @@ public class TestRandomFlRTGCloud extends SolrCloudTestCase {
* Trivial validator of a SubQueryAugmenter.
*
* This validator ignores 90% of the features/complexity
* of SubQueryAugmenter, and instead just focuses on the basics of
* "did we match at least one doc based on a field value of the requested doc?"
* of SubQueryAugmenter, and instead just focuses on the basics of:
* <ul>
* <li>do a subquery for docs where SUBQ_FIELD contains the id of the top level doc</li>
* <li>verify that any subquery match is expected based on indexing pattern</li>
* </ul>
*/
private static class SubQueryValidator implements FlValidator {
// HACK to work around SOLR-9396...
//
// we're using "id" (and only "id") in the subquery.q as a workarround limitation in
// "$rows.foo" parsing -- it only works reliably if "foo" is in fl, so we only use "$rows.id",
// which we know is in every request (and is a valid integer)
public final static String NAME = "subquery";
public final static String SUBQ_KEY = "subq";
public final static String SUBQ_FIELD = "aaa_i";
/** always returns true */
public boolean requiresRealtimeSearcherReOpen() { return true; }
public final static String SUBQ_FIELD = "next_2_ids_i";
public String getFlParam() { return SUBQ_KEY+":["+NAME+"]"; }
public Collection<String> assertRTGResults(final Collection<FlValidator> validators,
final SolrInputDocument expected,
final SolrDocument actual) {
final Object origVal = expected.getFieldValue(SUBQ_FIELD);
final int compVal = assertParseInt("expected id", expected.getFieldValue("id"));
final Object actualVal = actual.getFieldValue(SUBQ_KEY);
assertTrue("Expected a doclist: " + actualVal,
actualVal instanceof SolrDocumentList);
SolrDocumentList subList = (SolrDocumentList) actualVal;
assertTrue("sub query should have producted at least one result (this doc)",
1 <= subList.getNumFound());
for (SolrDocument subDoc : subList) {
assertEquals("orig doc value doesn't match subquery doc value",
origVal, subDoc.getFirstValue(SUBQ_FIELD));
assertTrue("should be at most 2 docs in doc list: " + actualVal,
((SolrDocumentList) actualVal).getNumFound() <= 2);
for (SolrDocument subDoc : (SolrDocumentList) actualVal) {
final int subDocIdVal = assertParseInt("subquery id", subDoc.getFirstValue("id"));
assertTrue("subDocId="+subDocIdVal+" not in valid range for id="+compVal+" (expected "
+ (compVal-1) + " or " + (compVal-2) + ")",
((subDocIdVal < compVal) && ((compVal-2) <= subDocIdVal)));
}
return Collections.<String>singleton(SUBQ_KEY);
}
public String getDefaultTransformerFactoryName() { return NAME; }
public SolrParams getExtraRequestParams() {
return params(SubQueryValidator.SUBQ_KEY + ".q",
"{!field f=" + SubQueryValidator.SUBQ_FIELD + " v=$row.id}");
}
}
/** Trivial validator of a GeoTransformer */
@ -945,4 +964,15 @@ public class TestRandomFlRTGCloud extends SolrCloudTestCase {
}
return result;
}
/** helper method for asserting an object is a non-null String can be parsed as an int */
public static int assertParseInt(String msg, Object orig) {
assertNotNull(msg + ": is null", orig);
assertTrue(msg + ": is not a string: " + orig, orig instanceof String);
try {
return Integer.parseInt(orig.toString());
} catch (NumberFormatException nfe) {
throw new AssertionError(msg + ": can't be parsed as a number: " + orig, nfe);
}
}
}

View File

@ -133,7 +133,7 @@ public class TestOverriddenPrefixQueryForCustomFieldType extends SolrTestCaseJ4
SolrQueryResponse rsp = new SolrQueryResponse();
SolrRequestInfo.setRequestInfo(new SolrRequestInfo(req, rsp));
for (int i = 0; i < inputs.length; i++) {
queries[i] = (QParser.getParser(inputs[i], null, req).getQuery());
queries[i] = (QParser.getParser(inputs[i], req).getQuery());
}
} finally {
SolrRequestInfo.clearRequestInfo();

View File

@ -198,10 +198,10 @@ public class TestSearchPerf extends AbstractSolrTestCase {
String u=t((int)(indexSize*10*fractionCovered));
SolrQueryRequest req = lrf.makeRequest();
QParser parser = QParser.getParser("foomany_s:[" + l + " TO " + u + "]", null, req);
QParser parser = QParser.getParser("foomany_s:[" + l + " TO " + u + "]", req);
Query range = parser.getQuery();
QParser parser2 = QParser.getParser("{!frange l="+l+" u="+u+"}foomany_s", null, req);
QParser parser2 = QParser.getParser("{!frange l="+l+" u="+u+"}foomany_s", req);
Query frange = parser2.getQuery();
req.close();
@ -224,13 +224,13 @@ public class TestSearchPerf extends AbstractSolrTestCase {
SolrQueryRequest req = lrf.makeRequest();
QParser parser = QParser.getParser("foomany_s:[" + l + " TO " + u + "]", null, req);
QParser parser = QParser.getParser("foomany_s:[" + l + " TO " + u + "]", req);
Query rangeQ = parser.getQuery();
List<Query> filters = new ArrayList<>();
filters.add(rangeQ);
req.close();
parser = QParser.getParser("{!dismax qf=t10_100_ws pf=t10_100_ws ps=20}"+ t(0) + ' ' + t(1) + ' ' + t(2), null, req);
parser = QParser.getParser("{!dismax qf=t10_100_ws pf=t10_100_ws ps=20}"+ t(0) + ' ' + t(1) + ' ' + t(2), req);
Query q= parser.getQuery();
// SolrIndexSearcher searcher = req.getSearcher();

View File

@ -169,13 +169,13 @@ public class TestSolrQueryParser extends SolrTestCaseJ4 {
public void testCSQ() throws Exception {
SolrQueryRequest req = req();
QParser qParser = QParser.getParser("text:x^=3", "lucene", req);
QParser qParser = QParser.getParser("text:x^=3", req);
Query q = qParser.getQuery();
assertTrue(q instanceof BoostQuery);
assertTrue(((BoostQuery) q).getQuery() instanceof ConstantScoreQuery);
assertEquals(3.0, ((BoostQuery) q).getBoost(), 0.0f);
qParser = QParser.getParser("(text:x text:y)^=-3", "lucene", req);
qParser = QParser.getParser("(text:x text:y)^=-3", req);
q = qParser.getQuery();
assertTrue(q instanceof BoostQuery);
assertTrue(((BoostQuery) q).getQuery() instanceof ConstantScoreQuery);

View File

@ -79,4 +79,13 @@ public class TestStandardQParsers extends LuceneTestCase {
}
/**
* Test that "lucene" is the default query parser.
*/
@Test
public void testDefaultQType() throws Exception {
assertEquals(LuceneQParserPlugin.NAME, QParserPlugin.DEFAULT_QTYPE);
assertEquals("lucene", LuceneQParserPlugin.NAME);
}
}

View File

@ -168,7 +168,7 @@ public class TestScoreJoinQPNoScore extends SolrTestCaseJ4 {
SolrRequestInfo.setRequestInfo(new SolrRequestInfo(req, rsp));
{
final Query query = QParser.getParser(req.getParams().get("q"), null, req).getQuery();
final Query query = QParser.getParser(req.getParams().get("q"), req).getQuery();
final Query rewrittenQuery = query.rewrite(req.getSearcher().getIndexReader());
assertTrue(
rewrittenQuery+" should be Lucene's",
@ -178,7 +178,7 @@ public class TestScoreJoinQPNoScore extends SolrTestCaseJ4 {
{
final Query query = QParser.getParser(
"{!join from=dept_id_s to=dept_ss}text_t:develop"
, null, req).getQuery();
, req).getQuery();
final Query rewrittenQuery = query.rewrite(req.getSearcher().getIndexReader());
assertEquals(rewrittenQuery+" is expected to be from Solr",
JoinQParserPlugin.class.getPackage().getName(),

View File

@ -187,7 +187,7 @@ public class TestScoreJoinQPScore extends SolrTestCaseJ4 {
final SolrQueryRequest req = req("q", "{!join from=movieId_s to=id score=" + score + " b=200}title:movie", "fl", "id,score", "omitHeader", "true");
SolrRequestInfo.setRequestInfo(new SolrRequestInfo(req, new SolrQueryResponse()));
final Query luceneQ = QParser.getParser(req.getParams().get("q"), null, req).getQuery().rewrite(req.getSearcher().getLeafReader());
final Query luceneQ = QParser.getParser(req.getParams().get("q"), req).getQuery().rewrite(req.getSearcher().getLeafReader());
assertTrue(luceneQ instanceof BoostQuery);
float boost = ((BoostQuery) luceneQ).getBoost();
assertEquals("" + luceneQ, Float.floatToIntBits(200), Float.floatToIntBits(boost));

View File

@ -98,4 +98,8 @@ public class TestTestInjection extends LuceneTestCase {
assertFalse(e.getMessage().toLowerCase(Locale.ENGLISH).contains("bad syntax"));
}
}
public void testUsingConsistentRandomization() {
assertSame(random(), TestInjection.random());
}
}