SOLR-603: Add partial optimize capabilities and deprecate DirectUpdateHandler

git-svn-id: https://svn.apache.org/repos/asf/lucene/solr/trunk@672031 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Grant Ingersoll 2008-06-26 21:14:06 +00:00
parent a0983feb04
commit 77a4e2fa4f
11 changed files with 548 additions and 10 deletions

View File

@ -298,6 +298,8 @@ New Features
(Shalin Shekhar Mangar, Bojan Smid, gsingers)
54. SOLR-423: Added Request Handler close hook notification so that RequestHandlers can be notified when a core is closing. (gsingers, ryan)
55. SOLR-603: Added ability to partially optimize. (gsingers)
Changes in runtime behavior
1. SOLR-559: use Lucene updateDocument, deleteDocuments methods. This

View File

@ -95,7 +95,7 @@ public abstract class SolrServer implements Serializable
* @throws IOException
*/
public UpdateResponse optimize( ) throws SolrServerException, IOException {
return optimize(true, true);
return optimize(true, true, 1);
}
public UpdateResponse commit( boolean waitFlush, boolean waitSearcher ) throws SolrServerException, IOException {
@ -103,7 +103,11 @@ public abstract class SolrServer implements Serializable
}
public UpdateResponse optimize( boolean waitFlush, boolean waitSearcher ) throws SolrServerException, IOException {
return new UpdateRequest().setAction( UpdateRequest.ACTION.OPTIMIZE, waitFlush, waitSearcher ).process( this );
return optimize(waitFlush, waitSearcher, 1);
}
public UpdateResponse optimize(boolean waitFlush, boolean waitSearcher, int maxSegments ) throws SolrServerException, IOException {
return new UpdateRequest().setAction( UpdateRequest.ACTION.OPTIMIZE, waitFlush, waitSearcher, maxSegments ).process( this );
}
public UpdateResponse deleteById(String id) throws SolrServerException, IOException {

View File

@ -127,11 +127,16 @@ public class UpdateRequest extends SolrRequest
/** Sets appropriate parameters for the given ACTION */
public UpdateRequest setAction(ACTION action, boolean waitFlush, boolean waitSearcher ) {
return setAction(action, waitFlush, waitSearcher, 1);
}
public UpdateRequest setAction(ACTION action, boolean waitFlush, boolean waitSearcher, int maxSegments ) {
if (params == null)
params = new ModifiableSolrParams();
if( action == ACTION.OPTIMIZE ) {
params.set( UpdateParams.OPTIMIZE, "true" );
params.set(UpdateParams.MAX_OPTIMIZE_SEGMENTS, maxSegments);
}
else if( action == ACTION.COMMIT ) {
params.set( UpdateParams.COMMIT, "true" );
@ -140,6 +145,7 @@ public class UpdateRequest extends SolrRequest
params.set( UpdateParams.WAIT_SEARCHER, waitSearcher+"" );
return this;
}
public void setParam(String param, String value) {
if (params == null)

View File

@ -42,4 +42,8 @@ public interface UpdateParams
/** Select the update processor to use. A RequestHandler may or may not respect this parameter */
public static final String UPDATE_PROCESSOR = "update.processor";
/**
* If optimizing, set the maximum number of segments left in the index after optimization. 1 is the default (and is equivalent to calling IndexWriter.optimize() in Lucene).
*/
public static final String MAX_OPTIMIZE_SEGMENTS = "maxSegments";
}

View File

@ -67,6 +67,7 @@ public class RequestHandlerUtils
CommitUpdateCommand cmd = new CommitUpdateCommand( optimize );
cmd.waitFlush = params.getBool( UpdateParams.WAIT_FLUSH, cmd.waitFlush );
cmd.waitSearcher = params.getBool( UpdateParams.WAIT_SEARCHER, cmd.waitSearcher );
cmd.maxOptimizeSegments = params.getInt(UpdateParams.MAX_OPTIMIZE_SEGMENTS, cmd.maxOptimizeSegments);
req.getCore().getUpdateHandler().commit( cmd );
// Lets wait till after solr1.2 to define consistent output format
@ -99,6 +100,7 @@ public class RequestHandlerUtils
CommitUpdateCommand cmd = new CommitUpdateCommand( optimize );
cmd.waitFlush = params.getBool( UpdateParams.WAIT_FLUSH, cmd.waitFlush );
cmd.waitSearcher = params.getBool( UpdateParams.WAIT_SEARCHER, cmd.waitSearcher );
cmd.maxOptimizeSegments = params.getInt(UpdateParams.MAX_OPTIMIZE_SEGMENTS, cmd.maxOptimizeSegments);
processor.processCommit( cmd );
return true;
}

View File

@ -20,6 +20,7 @@ package org.apache.solr.handler;
import java.io.IOException;
import java.io.Reader;
import java.io.Writer;
import java.io.File;
import java.util.HashMap;
import java.util.logging.Logger;
@ -75,7 +76,8 @@ public class XmlUpdateRequestHandler extends RequestHandlerBase
public static final String ALLOW_DUPS = "allowDups";
XMLInputFactory inputFactory;
@Override
public void init(NamedList args)
{
@ -210,7 +212,10 @@ public class XmlUpdateRequestHandler extends RequestHandlerBase
} else if (WAIT_SEARCHER.equals(attrName)) {
cmd.waitSearcher = StrUtils.parseBoolean(attrVal);
sawWaitSearcher = true;
} else {
} else if (UpdateParams.MAX_OPTIMIZE_SEGMENTS.equals(attrName)){
cmd.maxOptimizeSegments = Integer.parseInt(attrVal);
}
else {
log.warning("unexpected attribute commit/@" + attrName);
}
}

View File

@ -24,6 +24,13 @@ public class CommitUpdateCommand extends UpdateCommand {
public boolean waitFlush;
public boolean waitSearcher=true;
/**
* During optimize, optimize down to <= this many segments. Must be >= 1
*
* @see {@link org.apache.lucene.index.IndexWriter#optimize(int)}
*/
public int maxOptimizeSegments = 1;
public CommitUpdateCommand(boolean optimize) {
super("commit");
this.optimize=optimize;

View File

@ -49,6 +49,8 @@ import org.apache.solr.core.SolrCore;
*
* @version $Id$
* @since solr 0.9
*
* @deprecated Use {@link DirectUpdateHandler2} instead. This is only kept around for back-compatibility (way back).
*/
public class DirectUpdateHandler extends UpdateHandler {
@ -236,7 +238,7 @@ public class DirectUpdateHandler extends UpdateHandler {
closeSearcher(); // flush any deletes
if (cmd.optimize) {
openWriter(); // writer needs to be open to optimize
writer.optimize();
writer.optimize(cmd.maxOptimizeSegments);
}
closeWriter();

View File

@ -360,8 +360,8 @@ public class DirectUpdateHandler2 extends UpdateHandler {
if (cmd.optimize) {
closeSearcher();
openWriter();
writer.optimize();
openWriter();
writer.optimize(cmd.maxOptimizeSegments);
}
closeSearcher();
@ -402,8 +402,6 @@ public class DirectUpdateHandler2 extends UpdateHandler {
SolrException.log(log,e);
}
}
return;
}
@ -521,7 +519,8 @@ public class DirectUpdateHandler2 extends UpdateHandler {
try {
CommitUpdateCommand command = new CommitUpdateCommand( false );
command.waitFlush = true;
command.waitSearcher = true;
command.waitSearcher = true;
//no need for command.maxOptimizeSegments = 1; since it is not optimizing
commit( command );
autoCommitCount++;
}

View File

@ -0,0 +1,94 @@
package org.apache.solr.update;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.solr.core.SolrCore;
import org.apache.solr.util.AbstractSolrTestCase;
import java.io.File;
import java.io.FileFilter;
/**
*
*
**/
public class DirectUpdateHandlerOptimizeTest extends AbstractSolrTestCase {
public String getSchemaFile() {
return "schema.xml";
}
public String getSolrConfigFile() {
return "solrconfig-duh-optimize.xml";
}
public void testOptimize() throws Exception {
SolrCore core = h.getCore();
UpdateHandler updater = core.getUpdateHandler();
AddUpdateCommand cmd = new AddUpdateCommand();
cmd.overwriteCommitted = true;
cmd.overwritePending = true;
cmd.allowDups = false;
//add just under the merge factor, so no segments are merged
//the merge factor is 1000 and the maxBufferedDocs is 2, so there should be 500 segments (498 segs each w/ 2 docs, and 1 segment with 1 doc)
for (int i = 0; i < 999; i++) {
// Add a valid document
cmd.doc = new Document();
cmd.doc.add(new Field("id", "id_" + i, Field.Store.YES, Field.Index.UN_TOKENIZED));
cmd.doc.add(new Field("subject", "subject_" + i, Field.Store.NO, Field.Index.TOKENIZED));
updater.addDoc(cmd);
}
CommitUpdateCommand cmtCmd = new CommitUpdateCommand(false);
updater.commit(cmtCmd);
String indexDir = core.getIndexDir();
assertNumSegments(indexDir, 500);
//now do an optimize
cmtCmd = new CommitUpdateCommand(true);
cmtCmd.maxOptimizeSegments = 250;
updater.commit(cmtCmd);
assertNumSegments(indexDir, 250);
cmtCmd.maxOptimizeSegments = -1;
try {
updater.commit(cmtCmd);
assertTrue(false);
} catch (IllegalArgumentException e) {
}
cmtCmd.maxOptimizeSegments = 1;
updater.commit(cmtCmd);
assertNumSegments(indexDir, 1);
}
private void assertNumSegments(String indexDir, int numSegs) {
File file = new File(indexDir);
File[] segs = file.listFiles(new FileFilter() {
public boolean accept(File file) {
return file.getName().endsWith("tii");
}
});
assertTrue("Wrong number of segments: " + segs.length + " does not equal: " + numSegs, segs.length == numSegs);
}
}

View File

@ -0,0 +1,413 @@
<?xml version="1.0" ?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<!-- $Id: solrconfig.xml 382610 2006-03-03 01:43:03Z yonik $
$Source$
$Name$
-->
<config>
<!-- Used to specify an alternate directory to hold all index data.
It defaults to "index" if not present, and should probably
not be changed if replication is in use. -->
<dataDir>${solr.data.dir:./solr/data}</dataDir>
<indexDefaults>
<!-- Values here affect all index writers and act as a default
unless overridden. -->
<!-- Values here affect all index writers and act as a default unless overridden. -->
<useCompoundFile>false</useCompoundFile>
<!-- Set a high merge factor, so we get lots of segments -->
<mergeFactor>1000</mergeFactor>
<maxBufferedDocs>2</maxBufferedDocs>
<!-- If both ramBufferSizeMB and maxBufferedDocs is set, then Lucene will flush based on whichever limit is hit first.
-->
<!--<maxBufferedDocs>1000</maxBufferedDocs>-->
<!-- Tell Lucene when to flush documents to disk.
Giving Lucene more memory for indexing means faster indexing at the cost of more RAM
If both ramBufferSizeMB and maxBufferedDocs is set, then Lucene will flush based on whichever limit is hit first.
-->
<maxMergeDocs>2147483647</maxMergeDocs>
<maxFieldLength>10000</maxFieldLength>
<writeLockTimeout>1000</writeLockTimeout>
<commitLockTimeout>10000</commitLockTimeout>
<!--
Expert: Turn on Lucene's auto commit capability.
NOTE: Despite the name, this value does not have any relation to Solr's autoCommit functionality
-->
<luceneAutoCommit>false</luceneAutoCommit>
<!--
Expert:
The Merge Policy in Lucene controls how merging is handled by Lucene. The default in 2.3 is the LogByteSizeMergePolicy, previous
versions used LogDocMergePolicy.
LogByteSizeMergePolicy chooses segments to merge based on their size. The Lucene 2.2 default, LogDocMergePolicy chose when
to merge based on number of documents
Other implementations of MergePolicy must have a no-argument constructor
-->
<mergePolicy>org.apache.lucene.index.LogByteSizeMergePolicy</mergePolicy>
<!--
Expert:
The Merge Scheduler in Lucene controls how merges are performed. The ConcurrentMergeScheduler (Lucene 2.3 default)
can perform merges in the background using separate threads. The SerialMergeScheduler (Lucene 2.2 default) does not.
-->
<mergeScheduler>org.apache.lucene.index.ConcurrentMergeScheduler</mergeScheduler>
<!-- these are global... can't currently override per index -->
<writeLockTimeout>1000</writeLockTimeout>
<commitLockTimeout>10000</commitLockTimeout>
<lockType>single</lockType>
</indexDefaults>
<mainIndex>
<!-- lucene options specific to the main on-disk lucene index -->
<useCompoundFile>false</useCompoundFile>
<mergeFactor>1000</mergeFactor>
<maxBufferedDocs>2</maxBufferedDocs>
<maxMergeDocs>2147483647</maxMergeDocs>
<maxFieldLength>10000</maxFieldLength>
<unlockOnStartup>true</unlockOnStartup>
</mainIndex>
<updateHandler class="solr.DirectUpdateHandler2">
<!-- autocommit pending docs if certain criteria are met
<autoCommit>
<maxDocs>10000</maxDocs>
<maxTime>3600000</maxTime>
</autoCommit>
-->
<!-- represents a lower bound on the frequency that commits may
occur (in seconds). NOTE: not yet implemented
<commitIntervalLowerBound>0</commitIntervalLowerBound>
-->
<!-- The RunExecutableListener executes an external command.
exe - the name of the executable to run
dir - dir to use as the current working directory. default="."
wait - the calling thread waits until the executable returns. default="true"
args - the arguments to pass to the program. default=nothing
env - environment variables to set. default=nothing
-->
<!-- A postCommit event is fired after every commit
<listener event="postCommit" class="solr.RunExecutableListener">
<str name="exe">/var/opt/resin3/__PORT__/scripts/solr/snapshooter</str>
<str name="dir">/var/opt/resin3/__PORT__</str>
<bool name="wait">true</bool>
<arr name="args"> <str>arg1</str> <str>arg2</str> </arr>
<arr name="env"> <str>MYVAR=val1</str> </arr>
</listener>
-->
</updateHandler>
<query>
<!-- Maximum number of clauses in a boolean query... can affect
range or wildcard queries that expand to big boolean
queries. An exception is thrown if exceeded.
-->
<maxBooleanClauses>1024</maxBooleanClauses>
<!-- Cache specification for Filters or DocSets - unordered set of *all* documents
that match a particular query.
-->
<filterCache
class="solr.search.LRUCache"
size="512"
initialSize="512"
autowarmCount="256"/>
<queryResultCache
class="solr.search.LRUCache"
size="512"
initialSize="512"
autowarmCount="1024"/>
<documentCache
class="solr.search.LRUCache"
size="512"
initialSize="512"
autowarmCount="0"/>
<!-- If true, stored fields that are not requested will be loaded lazily.
-->
<enableLazyFieldLoading>true</enableLazyFieldLoading>
<!--
<cache name="myUserCache"
class="solr.search.LRUCache"
size="4096"
initialSize="1024"
autowarmCount="1024"
regenerator="MyRegenerator"
/>
-->
<useFilterForSortedQuery>true</useFilterForSortedQuery>
<queryResultWindowSize>10</queryResultWindowSize>
<!-- set maxSize artificially low to exercise both types of sets -->
<HashDocSet maxSize="3" loadFactor="0.75"/>
<!-- boolToFilterOptimizer converts boolean clauses with zero boost
into cached filters if the number of docs selected by the clause exceeds
the threshold (represented as a fraction of the total index)
-->
<boolTofilterOptimizer enabled="false" cacheSize="32" threshold=".05"/>
<!-- a newSearcher event is fired whenever a new searcher is being prepared
and there is a current searcher handling requests (aka registered). -->
<!-- QuerySenderListener takes an array of NamedList and executes a
local query request for each NamedList in sequence. -->
<!--
<listener event="newSearcher" class="solr.QuerySenderListener">
<arr name="queries">
<lst> <str name="q">solr</str> <str name="start">0</str> <str name="rows">10</str> </lst>
<lst> <str name="q">rocks</str> <str name="start">0</str> <str name="rows">10</str> </lst>
</arr>
</listener>
-->
<!-- a firstSearcher event is fired whenever a new searcher is being
prepared but there is no current registered searcher to handle
requests or to gain prewarming data from. -->
<!--
<listener event="firstSearcher" class="solr.QuerySenderListener">
<arr name="queries">
<lst> <str name="q">fast_warm</str> <str name="start">0</str> <str name="rows">10</str> </lst>
</arr>
</listener>
-->
</query>
<!-- An alternate set representation that uses an integer hash to store filters (sets of docids).
If the set cardinality <= maxSize elements, then HashDocSet will be used instead of the bitset
based HashBitset. -->
<!-- requestHandler plugins... incoming queries will be dispatched to the
correct handler based on the qt (query type) param matching the
name of registered handlers.
The "standard" request handler is the default and will be used if qt
is not specified in the request.
-->
<requestHandler name="standard" class="solr.StandardRequestHandler"/>
<requestHandler name="dismaxOldStyleDefaults"
class="solr.DisMaxRequestHandler" >
<!-- for historic reasons, DisMaxRequestHandler will use all of
it's init params as "defaults" if there is no "defaults" list
specified
-->
<float name="tie">0.01</float>
<str name="qf">
text^0.5 features_t^1.0 subject^1.4 title_stemmed^2.0
</str>
<str name="pf">
text^0.2 features_t^1.1 subject^1.4 title_stemmed^2.0 title^1.5
</str>
<str name="bf">
ord(weight)^0.5 recip(rord(iind),1,1000,1000)^0.3
</str>
<str name="mm">
3&lt;-1 5&lt;-2 6&lt;90%
</str>
<int name="ps">100</int>
</requestHandler>
<requestHandler name="dismax" class="solr.DisMaxRequestHandler" >
<lst name="defaults">
<str name="q.alt">*:*</str>
<float name="tie">0.01</float>
<str name="qf">
text^0.5 features_t^1.0 subject^1.4 title_stemmed^2.0
</str>
<str name="pf">
text^0.2 features_t^1.1 subject^1.4 title_stemmed^2.0 title^1.5
</str>
<str name="bf">
ord(weight)^0.5 recip(rord(iind),1,1000,1000)^0.3
</str>
<str name="mm">
3&lt;-1 5&lt;-2 6&lt;90%
</str>
<int name="ps">100</int>
</lst>
</requestHandler>
<requestHandler name="old" class="solr.tst.OldRequestHandler" >
<int name="myparam">1000</int>
<float name="ratio">1.4142135</float>
<arr name="myarr"><int>1</int><int>2</int></arr>
<str>foo</str>
</requestHandler>
<requestHandler name="oldagain" class="solr.tst.OldRequestHandler" >
<lst name="lst1"> <str name="op">sqrt</str> <int name="val">2</int> </lst>
<lst name="lst2"> <str name="op">log</str> <float name="val">10</float> </lst>
</requestHandler>
<requestHandler name="test" class="solr.tst.TestRequestHandler" />
<!-- test query parameter defaults -->
<requestHandler name="defaults" class="solr.StandardRequestHandler">
<lst name="defaults">
<int name="rows">4</int>
<bool name="hl">true</bool>
<str name="hl.fl">text,name,subject,title,whitetok</str>
</lst>
</requestHandler>
<!-- test query parameter defaults -->
<requestHandler name="lazy" class="solr.StandardRequestHandler" startup="lazy">
<lst name="defaults">
<int name="rows">4</int>
<bool name="hl">true</bool>
<str name="hl.fl">text,name,subject,title,whitetok</str>
</lst>
</requestHandler>
<requestHandler name="/update" class="solr.XmlUpdateRequestHandler" />
<requestHandler name="/update/csv" class="solr.CSVRequestHandler" startup="lazy" />
<!-- test elevation -->
<searchComponent name="elevate" class="org.apache.solr.handler.component.QueryElevationComponent" >
<str name="queryFieldType">string</str>
<str name="config-file">elevate.xml</str>
</searchComponent>
<requestHandler name="/elevate" class="org.apache.solr.handler.component.SearchHandler">
<lst name="defaults">
<str name="echoParams">explicit</str>
</lst>
<arr name="last-components">
<str>elevate</str>
</arr>
</requestHandler>
<searchComponent name="spellcheck" class="org.apache.solr.handler.component.SpellCheckComponent">
<lst name="defaults">
<!-- omp = Only More Popular -->
<str name="spellcheck.onlyMorePopular">false</str>
<!-- exr = Extended Results -->
<str name="spellcheck.extendedResults">false</str>
<!-- The number of suggestions to return -->
<str name="spellcheck.count">1</str>
</lst>
<str name="queryAnalyzerFieldType">lowerfilt</str>
<lst name="spellchecker">
<str name="name">default</str>
<str name="field">lowerfilt</str>
<str name="spellcheckIndexDir">./spellchecker</str>
</lst>
<!-- Example of using different distance measure -->
<lst name="spellchecker">
<str name="name">jarowinkler</str>
<str name="field">lowerfilt</str>
<!-- Use a different Distance Measure -->
<str name="distanceMeasure">org.apache.lucene.search.spell.JaroWinklerDistance</str>
<str name="spellcheckIndexDir">./spellchecker</str>
</lst>
<lst name="spellchecker">
<str name="classname">solr.FileBasedSpellChecker</str>
<str name="name">external</str>
<str name="sourceLocation">spellings.txt</str>
<str name="characterEncoding">UTF-8</str>
<str name="spellcheckIndexDir">./spellchecker</str>
</lst>
</searchComponent>
<!--
The SpellingQueryConverter to convert raw (CommonParams.Q) queries into tokens. Uses a simple regular expression
to strip off field markup, boosts, ranges, etc. but it is not guaranteed to match an exact parse from the query parser.
-->
<queryConverter name="queryConverter" class="org.apache.solr.spelling.SpellingQueryConverter"/>
<requestHandler name="spellCheckCompRH" class="org.apache.solr.handler.component.SearchHandler">
<arr name="last-components">
<str>spellcheck</str>
</arr>
</requestHandler>
<highlighting>
<!-- Configure the standard fragmenter -->
<fragmenter name="gap" class="org.apache.solr.highlight.GapFragmenter" default="true">
<lst name="defaults">
<int name="hl.fragsize">100</int>
</lst>
</fragmenter>
<fragmenter name="regex" class="org.apache.solr.highlight.RegexFragmenter">
<lst name="defaults">
<int name="hl.fragsize">70</int>
</lst>
</fragmenter>
<!-- Configure the standard formatter -->
<formatter name="html" class="org.apache.solr.highlight.HtmlFormatter" default="true">
<lst name="defaults">
<str name="hl.simple.pre"><![CDATA[<em>]]></str>
<str name="hl.simple.post"><![CDATA[</em>]]></str>
</lst>
</formatter>
</highlighting>
<!-- enable streaming for testing... -->
<requestDispatcher handleSelect="true" >
<requestParsers enableRemoteStreaming="true" multipartUploadLimitInKB="2048" />
<httpCaching lastModifiedFrom="openTime" etagSeed="Solr" never304="false">
<cacheControl>max-age=30, public</cacheControl>
</httpCaching>
</requestDispatcher>
<admin>
<defaultQuery>solr</defaultQuery>
<gettableFiles>solrconfig.xml scheam.xml admin-extra.html</gettableFiles>
</admin>
<!-- test getting system property -->
<propTest attr1="${solr.test.sys.prop1}-$${literal}"
attr2="${non.existent.sys.prop:default-from-config}">prefix-${solr.test.sys.prop2}-suffix</propTest>
<queryParser name="foo" class="FooQParserPlugin"/>
</config>