mirror of https://github.com/apache/lucene.git
LUCENE-1460: Additional cleanup in two contrib junit tests.
git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@799973 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
537aeb24e0
commit
b91f993a0e
|
@ -23,25 +23,28 @@ import junit.framework.TestCase;
|
||||||
import org.apache.lucene.analysis.Token;
|
import org.apache.lucene.analysis.Token;
|
||||||
import org.apache.lucene.analysis.TokenStream;
|
import org.apache.lucene.analysis.TokenStream;
|
||||||
import org.apache.lucene.analysis.shingle.ShingleFilter;
|
import org.apache.lucene.analysis.shingle.ShingleFilter;
|
||||||
|
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
|
||||||
|
|
||||||
public class PositionFilterTest extends TestCase {
|
public class PositionFilterTest extends TestCase {
|
||||||
|
|
||||||
public class TestTokenStream extends TokenStream {
|
public class TestTokenStream extends TokenStream {
|
||||||
|
|
||||||
protected int index = 0;
|
protected int index = 0;
|
||||||
protected Token[] testToken;
|
protected String[] testToken;
|
||||||
|
protected TermAttribute termAtt;
|
||||||
|
|
||||||
public TestTokenStream(Token[] testToken) {
|
public TestTokenStream(String[] testToken) {
|
||||||
super();
|
super();
|
||||||
this.testToken = testToken;
|
this.testToken = testToken;
|
||||||
|
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
public Token next(final Token reusableToken) throws IOException {
|
public final boolean incrementToken() throws IOException {
|
||||||
assert reusableToken != null;
|
|
||||||
if (index < testToken.length) {
|
if (index < testToken.length) {
|
||||||
return testToken[index++];
|
termAtt.setTermBuffer(testToken[index++]);
|
||||||
|
return true;
|
||||||
} else {
|
} else {
|
||||||
return null;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
public void reset() {
|
public void reset() {
|
||||||
|
@ -52,13 +55,13 @@ public class PositionFilterTest extends TestCase {
|
||||||
public static void main(String[] args) {
|
public static void main(String[] args) {
|
||||||
junit.textui.TestRunner.run(PositionFilterTest.class);
|
junit.textui.TestRunner.run(PositionFilterTest.class);
|
||||||
}
|
}
|
||||||
public static final Token[] TEST_TOKEN = new Token[]{
|
public static final String[] TEST_TOKEN = new String[]{
|
||||||
createToken("please"),
|
"please",
|
||||||
createToken("divide"),
|
"divide",
|
||||||
createToken("this"),
|
"this",
|
||||||
createToken("sentence"),
|
"sentence",
|
||||||
createToken("into"),
|
"into",
|
||||||
createToken("shingles"),
|
"shingles",
|
||||||
};
|
};
|
||||||
public static final int[] TEST_TOKEN_POSITION_INCREMENTS = new int[]{
|
public static final int[] TEST_TOKEN_POSITION_INCREMENTS = new int[]{
|
||||||
1, 0, 0, 0, 0, 0
|
1, 0, 0, 0, 0, 0
|
||||||
|
@ -67,28 +70,28 @@ public class PositionFilterTest extends TestCase {
|
||||||
1, 5, 5, 5, 5, 5
|
1, 5, 5, 5, 5, 5
|
||||||
};
|
};
|
||||||
|
|
||||||
public static final Token[] SIX_GRAM_NO_POSITIONS_TOKENS = new Token[]{
|
public static final String[] SIX_GRAM_NO_POSITIONS_TOKENS = new String[]{
|
||||||
createToken("please"),
|
"please",
|
||||||
createToken("please divide"),
|
"please divide",
|
||||||
createToken("please divide this"),
|
"please divide this",
|
||||||
createToken("please divide this sentence"),
|
"please divide this sentence",
|
||||||
createToken("please divide this sentence into"),
|
"please divide this sentence into",
|
||||||
createToken("please divide this sentence into shingles"),
|
"please divide this sentence into shingles",
|
||||||
createToken("divide"),
|
"divide",
|
||||||
createToken("divide this"),
|
"divide this",
|
||||||
createToken("divide this sentence"),
|
"divide this sentence",
|
||||||
createToken("divide this sentence into"),
|
"divide this sentence into",
|
||||||
createToken("divide this sentence into shingles"),
|
"divide this sentence into shingles",
|
||||||
createToken("this"),
|
"this",
|
||||||
createToken("this sentence"),
|
"this sentence",
|
||||||
createToken("this sentence into"),
|
"this sentence into",
|
||||||
createToken("this sentence into shingles"),
|
"this sentence into shingles",
|
||||||
createToken("sentence"),
|
"sentence",
|
||||||
createToken("sentence into"),
|
"sentence into",
|
||||||
createToken("sentence into shingles"),
|
"sentence into shingles",
|
||||||
createToken("into"),
|
"into",
|
||||||
createToken("into shingles"),
|
"into shingles",
|
||||||
createToken("shingles"),
|
"shingles",
|
||||||
};
|
};
|
||||||
public static final int[] SIX_GRAM_NO_POSITIONS_INCREMENTS = new int[]{
|
public static final int[] SIX_GRAM_NO_POSITIONS_INCREMENTS = new int[]{
|
||||||
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
|
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
|
||||||
|
@ -138,7 +141,7 @@ public class PositionFilterTest extends TestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
protected TokenStream filterTest(final TokenStream filter,
|
protected TokenStream filterTest(final TokenStream filter,
|
||||||
final Token[] tokensToCompare,
|
final String[] tokensToCompare,
|
||||||
final int[] positionIncrements)
|
final int[] positionIncrements)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
|
||||||
|
@ -151,7 +154,7 @@ public class PositionFilterTest extends TestCase {
|
||||||
|
|
||||||
if (null != nextToken) {
|
if (null != nextToken) {
|
||||||
final String termText = nextToken.term();
|
final String termText = nextToken.term();
|
||||||
final String goldText = tokensToCompare[i].term();
|
final String goldText = tokensToCompare[i];
|
||||||
|
|
||||||
assertEquals("Wrong termText", goldText, termText);
|
assertEquals("Wrong termText", goldText, termText);
|
||||||
assertEquals("Wrong positionIncrement for token \"" + termText + "\"",
|
assertEquals("Wrong positionIncrement for token \"" + termText + "\"",
|
||||||
|
@ -163,12 +166,4 @@ public class PositionFilterTest extends TestCase {
|
||||||
}
|
}
|
||||||
return filter;
|
return filter;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Token createToken(String term) {
|
|
||||||
final Token token = new Token();
|
|
||||||
if (null != term) {
|
|
||||||
token.setTermBuffer(term);
|
|
||||||
}
|
|
||||||
return token;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,6 +26,8 @@ import org.apache.lucene.analysis.Token;
|
||||||
import org.apache.lucene.analysis.TokenFilter;
|
import org.apache.lucene.analysis.TokenFilter;
|
||||||
import org.apache.lucene.analysis.TokenStream;
|
import org.apache.lucene.analysis.TokenStream;
|
||||||
import org.apache.lucene.analysis.WhitespaceTokenizer;
|
import org.apache.lucene.analysis.WhitespaceTokenizer;
|
||||||
|
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
|
||||||
|
import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
|
||||||
|
|
||||||
public class TokenTypeSinkTokenizerTest extends TestCase {
|
public class TokenTypeSinkTokenizerTest extends TestCase {
|
||||||
|
|
||||||
|
@ -61,17 +63,22 @@ public class TokenTypeSinkTokenizerTest extends TestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private class WordTokenFilter extends TokenFilter {
|
private class WordTokenFilter extends TokenFilter {
|
||||||
|
private TermAttribute termAtt;
|
||||||
|
private TypeAttribute typeAtt;
|
||||||
|
|
||||||
private WordTokenFilter(TokenStream input) {
|
private WordTokenFilter(TokenStream input) {
|
||||||
super(input);
|
super(input);
|
||||||
|
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
|
||||||
|
typeAtt = (TypeAttribute) addAttribute(TypeAttribute.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
public Token next(final Token reusableToken) throws IOException {
|
public final boolean incrementToken() throws IOException {
|
||||||
assert reusableToken != null;
|
if (!input.incrementToken()) return false;
|
||||||
Token nextToken = input.next(reusableToken);
|
|
||||||
if (nextToken != null && nextToken.term().equals("dogs")) {
|
if (termAtt.term().equals("dogs")) {
|
||||||
nextToken.setType("D");
|
typeAtt.setType("D");
|
||||||
}
|
}
|
||||||
return nextToken;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
Loading…
Reference in New Issue