mirror of https://github.com/apache/lucene.git
LUCENE-1460: Additional cleanup in two contrib junit tests.
git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@799973 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
537aeb24e0
commit
b91f993a0e
|
@ -23,25 +23,28 @@ import junit.framework.TestCase;
|
|||
import org.apache.lucene.analysis.Token;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.analysis.shingle.ShingleFilter;
|
||||
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
|
||||
|
||||
public class PositionFilterTest extends TestCase {
|
||||
|
||||
public class TestTokenStream extends TokenStream {
|
||||
|
||||
protected int index = 0;
|
||||
protected Token[] testToken;
|
||||
protected String[] testToken;
|
||||
protected TermAttribute termAtt;
|
||||
|
||||
public TestTokenStream(Token[] testToken) {
|
||||
public TestTokenStream(String[] testToken) {
|
||||
super();
|
||||
this.testToken = testToken;
|
||||
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
|
||||
}
|
||||
|
||||
public Token next(final Token reusableToken) throws IOException {
|
||||
assert reusableToken != null;
|
||||
public final boolean incrementToken() throws IOException {
|
||||
if (index < testToken.length) {
|
||||
return testToken[index++];
|
||||
termAtt.setTermBuffer(testToken[index++]);
|
||||
return true;
|
||||
} else {
|
||||
return null;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
public void reset() {
|
||||
|
@ -52,13 +55,13 @@ public class PositionFilterTest extends TestCase {
|
|||
public static void main(String[] args) {
|
||||
junit.textui.TestRunner.run(PositionFilterTest.class);
|
||||
}
|
||||
public static final Token[] TEST_TOKEN = new Token[]{
|
||||
createToken("please"),
|
||||
createToken("divide"),
|
||||
createToken("this"),
|
||||
createToken("sentence"),
|
||||
createToken("into"),
|
||||
createToken("shingles"),
|
||||
public static final String[] TEST_TOKEN = new String[]{
|
||||
"please",
|
||||
"divide",
|
||||
"this",
|
||||
"sentence",
|
||||
"into",
|
||||
"shingles",
|
||||
};
|
||||
public static final int[] TEST_TOKEN_POSITION_INCREMENTS = new int[]{
|
||||
1, 0, 0, 0, 0, 0
|
||||
|
@ -67,28 +70,28 @@ public class PositionFilterTest extends TestCase {
|
|||
1, 5, 5, 5, 5, 5
|
||||
};
|
||||
|
||||
public static final Token[] SIX_GRAM_NO_POSITIONS_TOKENS = new Token[]{
|
||||
createToken("please"),
|
||||
createToken("please divide"),
|
||||
createToken("please divide this"),
|
||||
createToken("please divide this sentence"),
|
||||
createToken("please divide this sentence into"),
|
||||
createToken("please divide this sentence into shingles"),
|
||||
createToken("divide"),
|
||||
createToken("divide this"),
|
||||
createToken("divide this sentence"),
|
||||
createToken("divide this sentence into"),
|
||||
createToken("divide this sentence into shingles"),
|
||||
createToken("this"),
|
||||
createToken("this sentence"),
|
||||
createToken("this sentence into"),
|
||||
createToken("this sentence into shingles"),
|
||||
createToken("sentence"),
|
||||
createToken("sentence into"),
|
||||
createToken("sentence into shingles"),
|
||||
createToken("into"),
|
||||
createToken("into shingles"),
|
||||
createToken("shingles"),
|
||||
public static final String[] SIX_GRAM_NO_POSITIONS_TOKENS = new String[]{
|
||||
"please",
|
||||
"please divide",
|
||||
"please divide this",
|
||||
"please divide this sentence",
|
||||
"please divide this sentence into",
|
||||
"please divide this sentence into shingles",
|
||||
"divide",
|
||||
"divide this",
|
||||
"divide this sentence",
|
||||
"divide this sentence into",
|
||||
"divide this sentence into shingles",
|
||||
"this",
|
||||
"this sentence",
|
||||
"this sentence into",
|
||||
"this sentence into shingles",
|
||||
"sentence",
|
||||
"sentence into",
|
||||
"sentence into shingles",
|
||||
"into",
|
||||
"into shingles",
|
||||
"shingles",
|
||||
};
|
||||
public static final int[] SIX_GRAM_NO_POSITIONS_INCREMENTS = new int[]{
|
||||
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
|
||||
|
@ -138,7 +141,7 @@ public class PositionFilterTest extends TestCase {
|
|||
}
|
||||
|
||||
protected TokenStream filterTest(final TokenStream filter,
|
||||
final Token[] tokensToCompare,
|
||||
final String[] tokensToCompare,
|
||||
final int[] positionIncrements)
|
||||
throws IOException {
|
||||
|
||||
|
@ -151,7 +154,7 @@ public class PositionFilterTest extends TestCase {
|
|||
|
||||
if (null != nextToken) {
|
||||
final String termText = nextToken.term();
|
||||
final String goldText = tokensToCompare[i].term();
|
||||
final String goldText = tokensToCompare[i];
|
||||
|
||||
assertEquals("Wrong termText", goldText, termText);
|
||||
assertEquals("Wrong positionIncrement for token \"" + termText + "\"",
|
||||
|
@ -163,12 +166,4 @@ public class PositionFilterTest extends TestCase {
|
|||
}
|
||||
return filter;
|
||||
}
|
||||
|
||||
private static Token createToken(String term) {
|
||||
final Token token = new Token();
|
||||
if (null != term) {
|
||||
token.setTermBuffer(term);
|
||||
}
|
||||
return token;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -26,6 +26,8 @@ import org.apache.lucene.analysis.Token;
|
|||
import org.apache.lucene.analysis.TokenFilter;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.analysis.WhitespaceTokenizer;
|
||||
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
|
||||
import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
|
||||
|
||||
public class TokenTypeSinkTokenizerTest extends TestCase {
|
||||
|
||||
|
@ -61,17 +63,22 @@ public class TokenTypeSinkTokenizerTest extends TestCase {
|
|||
}
|
||||
|
||||
private class WordTokenFilter extends TokenFilter {
|
||||
private TermAttribute termAtt;
|
||||
private TypeAttribute typeAtt;
|
||||
|
||||
private WordTokenFilter(TokenStream input) {
|
||||
super(input);
|
||||
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
|
||||
typeAtt = (TypeAttribute) addAttribute(TypeAttribute.class);
|
||||
}
|
||||
|
||||
public Token next(final Token reusableToken) throws IOException {
|
||||
assert reusableToken != null;
|
||||
Token nextToken = input.next(reusableToken);
|
||||
if (nextToken != null && nextToken.term().equals("dogs")) {
|
||||
nextToken.setType("D");
|
||||
public final boolean incrementToken() throws IOException {
|
||||
if (!input.incrementToken()) return false;
|
||||
|
||||
if (termAtt.term().equals("dogs")) {
|
||||
typeAtt.setType("D");
|
||||
}
|
||||
return nextToken;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue