SOLR-3961: Fixed error using LimitTokenCountFilterFactory

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1399474 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Chris M. Hostetter 2012-10-17 22:56:28 +00:00
parent d50d86168d
commit dd93b85321
4 changed files with 66 additions and 5 deletions

View File

@ -36,16 +36,13 @@ import org.apache.lucene.analysis.util.TokenFilterFactory;
*/
public class LimitTokenCountFilterFactory extends TokenFilterFactory {
public static final String MAX_TOKEN_COUNT_KEY = "maxTokenCount";
int maxTokenCount;
@Override
public void init(Map<String, String> args) {
super.init( args );
String maxTokenCountArg = args.get("maxTokenCount");
if (maxTokenCountArg == null) {
throw new IllegalArgumentException("maxTokenCount is mandatory.");
}
maxTokenCount = Integer.parseInt(args.get(maxTokenCountArg));
maxTokenCount = getInt(MAX_TOKEN_COUNT_KEY);
}
@Override

View File

@ -0,0 +1,55 @@
package org.apache.lucene.analysis.miscellaneous;
/**
* Copyright 2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.io.StringReader;
import java.util.HashMap;
import java.util.Map;
import org.apache.lucene.analysis.BaseTokenStreamTestCase;
import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.analysis.TokenStream;
public class TestLimitTokenCountFilterFactory extends BaseTokenStreamTestCase {
public void test() throws IOException {
LimitTokenCountFilterFactory factory = new LimitTokenCountFilterFactory();
Map<String, String> args = new HashMap<String, String>();
args.put(LimitTokenCountFilterFactory.MAX_TOKEN_COUNT_KEY, "3");
factory.init(args);
String test = "A1 B2 C3 D4 E5 F6";
MockTokenizer tok = new MockTokenizer(new StringReader(test), MockTokenizer.WHITESPACE, false);
// LimitTokenCountFilter doesn't consume the entire stream that it wraps
tok.setEnableChecks(false);
TokenStream stream = factory.create(tok);
assertTokenStreamContents(stream, new String[] { "A1", "B2", "C3" });
// param is required
factory = new LimitTokenCountFilterFactory();
args = new HashMap<String, String>();
IllegalArgumentException iae = null;
try {
factory.init(args);
} catch (IllegalArgumentException e) {
assertTrue("exception doesn't mention param: " + e.getMessage(),
0 < e.getMessage().indexOf(LimitTokenCountFilterFactory.MAX_TOKEN_COUNT_KEY));
iae = e;
}
assertNotNull("no exception thrown", iae);
}
}

View File

@ -81,6 +81,9 @@ Bug Fixes
* SOLR-3940: Rejoining the leader election incorrectly triggers the code path
for a fresh cluster start rather than fail over. (Mark Miller)
* SOLR-3961: Fixed error using LimitTokenCountFilterFactory
(Jack Krupansky, hossman)
Other Changes
----------------------

View File

@ -269,6 +269,12 @@
<filter class="solr.LengthFilterFactory" min="2" max="5"/>
</analyzer>
</fieldtype>
<fieldtype name="limitfilt" class="solr.TextField">
<analyzer>
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.LimitTokenCountFilterFactory" maxTokenCount="100" />
</analyzer>
</fieldtype>
<fieldtype name="subword" class="solr.TextField" multiValued="true" positionIncrementGap="100">
<analyzer type="index">