LUCENE-7855: The advanced parameters of the Wikipedia tokenizer are added to the factory

Closes #209
This commit is contained in:
jpgilaberte 2017-05-29 13:06:05 +02:00 committed by Adrien Grand
parent 23adc2b0a0
commit fe176b601b
3 changed files with 90 additions and 20 deletions

View File

@ -119,6 +119,11 @@ Other
======================= Lucene 6.7.0 =======================
New Features
* LUCENE-7855: Added advanced options of the Wikipedia tokenizer to its factory.
(Juan Pedro via Adrien Grand)
Other
* LUCENE-7800: Remove code that potentially rethrows checked exceptions

View File

@ -16,9 +16,9 @@
*/
package org.apache.lucene.analysis.wikipedia;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
import org.apache.lucene.analysis.util.TokenizerFactory;
import org.apache.lucene.util.AttributeFactory;
@ -33,19 +33,28 @@ import org.apache.lucene.util.AttributeFactory;
* &lt;/fieldType&gt;</pre>
*/
public class WikipediaTokenizerFactory extends TokenizerFactory {
public static final String TOKEN_OUTPUT = "tokenOutput";
public static final String UNTOKENIZED_TYPES = "untokenizedTypes";
protected final int tokenOutput;
protected Set<String> untokenizedTypes;
/** Creates a new WikipediaTokenizerFactory */
public WikipediaTokenizerFactory(Map<String,String> args) {
super(args);
tokenOutput = getInt(args, TOKEN_OUTPUT, WikipediaTokenizer.TOKENS_ONLY);
untokenizedTypes = getSet(args, UNTOKENIZED_TYPES);
if (untokenizedTypes == null) {
untokenizedTypes = Collections.emptySet();
}
if (!args.isEmpty()) {
throw new IllegalArgumentException("Unknown parameters: " + args);
}
}
// TODO: add support for WikipediaTokenizer's advanced options.
@Override
public WikipediaTokenizer create(AttributeFactory factory) {
return new WikipediaTokenizer(factory, WikipediaTokenizer.TOKENS_ONLY,
Collections.<String>emptySet());
return new WikipediaTokenizer(factory, tokenOutput, untokenizedTypes);
}
}

View File

@ -17,34 +17,90 @@
package org.apache.lucene.analysis.wikipedia;
import java.io.Reader;
import java.io.StringReader;
import java.util.HashSet;
import java.util.Set;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.util.BaseTokenStreamFactoryTestCase;
import org.apache.lucene.analysis.wikipedia.WikipediaTokenizer;
/**
* Simple tests to ensure the wikipedia tokenizer is working.
*/
public class TestWikipediaTokenizerFactory extends BaseTokenStreamFactoryTestCase {
private final String WIKIPEDIA = "Wikipedia";
private final String TOKEN_OUTPUT = "tokenOutput";
private final String UNTOKENIZED_TYPES = "untokenizedTypes";
public void testTokenizer() throws Exception {
Reader reader = new StringReader("This is a [[Category:foo]]");
Tokenizer tokenizer = tokenizerFactory("Wikipedia").create(newAttributeFactory());
tokenizer.setReader(reader);
assertTokenStreamContents(tokenizer,
new String[] { "This", "is", "a", "foo" },
new int[] { 0, 5, 8, 21 },
new int[] { 4, 7, 9, 24 },
new String[] { "<ALPHANUM>", "<ALPHANUM>", "<ALPHANUM>", WikipediaTokenizer.CATEGORY },
new int[] { 1, 1, 1, 1, });
String text = "This is a [[Category:foo]]";
Tokenizer tf = tokenizerFactory(WIKIPEDIA).create(newAttributeFactory());
tf.setReader(new StringReader(text));
assertTokenStreamContents(tf,
new String[] { "This", "is", "a", "foo" },
new int[] { 0, 5, 8, 21 },
new int[] { 4, 7, 9, 24 },
new String[] { "<ALPHANUM>", "<ALPHANUM>", "<ALPHANUM>", WikipediaTokenizer.CATEGORY },
new int[] { 1, 1, 1, 1, },
text.length());
}
public void testTokenizerTokensOnly() throws Exception {
String text = "This is a [[Category:foo]]";
Tokenizer tf = tokenizerFactory(WIKIPEDIA, TOKEN_OUTPUT, new Integer( WikipediaTokenizer.TOKENS_ONLY).toString()).create(newAttributeFactory());
tf.setReader(new StringReader(text));
assertTokenStreamContents(tf,
new String[] { "This", "is", "a", "foo" },
new int[] { 0, 5, 8, 21 },
new int[] { 4, 7, 9, 24 },
new String[] { "<ALPHANUM>", "<ALPHANUM>", "<ALPHANUM>", WikipediaTokenizer.CATEGORY },
new int[] { 1, 1, 1, 1, },
text.length());
}
public void testTokenizerUntokenizedOnly() throws Exception {
String test = "[[Category:a b c d]] [[Category:e f g]] [[link here]] [[link there]] ''italics here'' something ''more italics'' [[Category:h i j]]";
Set<String> untoks = new HashSet<>();
untoks.add(WikipediaTokenizer.CATEGORY);
untoks.add(WikipediaTokenizer.ITALICS);
Tokenizer tf = tokenizerFactory(WIKIPEDIA, TOKEN_OUTPUT, new Integer(WikipediaTokenizer.UNTOKENIZED_ONLY).toString(), UNTOKENIZED_TYPES, WikipediaTokenizer.CATEGORY + ", " + WikipediaTokenizer.ITALICS).create(newAttributeFactory());
tf.setReader(new StringReader(test));
assertTokenStreamContents(tf,
new String[] { "a b c d", "e f g", "link", "here", "link",
"there", "italics here", "something", "more italics", "h i j" },
new int[] { 11, 32, 42, 47, 56, 61, 71, 86, 98, 124 },
new int[] { 18, 37, 46, 51, 60, 66, 83, 95, 110, 133 },
new int[] { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 }
);
}
public void testTokenizerBoth() throws Exception {
String test = "[[Category:a b c d]] [[Category:e f g]] [[link here]] [[link there]] ''italics here'' something ''more italics'' [[Category:h i j]]";
Tokenizer tf = tokenizerFactory(WIKIPEDIA, TOKEN_OUTPUT, new Integer(WikipediaTokenizer.BOTH).toString(), UNTOKENIZED_TYPES, WikipediaTokenizer.CATEGORY + ", " + WikipediaTokenizer.ITALICS).create(newAttributeFactory());
tf.setReader(new StringReader(test));
assertTokenStreamContents(tf,
new String[] { "a b c d", "a", "b", "c", "d", "e f g", "e", "f", "g",
"link", "here", "link", "there", "italics here", "italics", "here",
"something", "more italics", "more", "italics", "h i j", "h", "i", "j" },
new int[] { 11, 11, 13, 15, 17, 32, 32, 34, 36, 42, 47, 56, 61, 71, 71, 79, 86, 98, 98, 103, 124, 124, 128, 132 },
new int[] { 18, 12, 14, 16, 18, 37, 33, 35, 37, 46, 51, 60, 66, 83, 78, 83, 95, 110, 102, 110, 133, 125, 129, 133 },
new int[] { 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1 }
);
}
/** Test that bogus arguments result in exception */
public void testBogusArguments() throws Exception {
IllegalArgumentException expected = expectThrows(IllegalArgumentException.class, () -> {
tokenizerFactory("Wikipedia", "bogusArg", "bogusValue");
tokenizerFactory(WIKIPEDIA, "bogusArg", "bogusValue").create(newAttributeFactory());
});
assertTrue(expected.getMessage().contains("Unknown parameters"));
}
}
public void testIllegalArguments() throws Exception {
IllegalArgumentException expected = expectThrows(IllegalArgumentException.class, () -> {
Tokenizer tf = tokenizerFactory(WIKIPEDIA, TOKEN_OUTPUT, "-1").create(newAttributeFactory());
});
assertTrue(expected.getMessage().contains("tokenOutput must be TOKENS_ONLY, UNTOKENIZED_ONLY or BOTH"));
}
}