mirror of https://github.com/apache/lucene.git
add test that EdgeNGram filter keeps payloads
This commit is contained in:
parent
987e2650b5
commit
61e4528306
|
@ -22,7 +22,10 @@ import java.io.StringReader;
|
|||
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.analysis.Tokenizer;
|
||||
import org.apache.lucene.analysis.payloads.PayloadHelper;
|
||||
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
|
||||
import org.apache.lucene.analysis.util.BaseTokenStreamFactoryTestCase;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
|
||||
/**
|
||||
* Simple tests to ensure the NGram filter factories are working.
|
||||
|
@ -123,6 +126,25 @@ public class TestNGramFilters extends BaseTokenStreamFactoryTestCase {
|
|||
assertTokenStreamContents(stream,
|
||||
new String[] { "t", "te" });
|
||||
}
|
||||
|
||||
public void testEdgeNGramFilterPayload() throws Exception {
|
||||
Reader reader = new StringReader("test|0.1");
|
||||
TokenStream stream = whitespaceMockTokenizer(reader);
|
||||
stream = tokenFilterFactory("DelimitedPayload", "encoder", "float").create(stream);
|
||||
stream = tokenFilterFactory("EdgeNGram", "minGramSize", "1", "maxGramSize", "2").create(stream);
|
||||
|
||||
stream.reset();
|
||||
while (stream.incrementToken()) {
|
||||
PayloadAttribute payAttr = stream.getAttribute(PayloadAttribute.class);
|
||||
assertNotNull(payAttr);
|
||||
BytesRef payData = payAttr.getPayload();
|
||||
assertNotNull(payData);
|
||||
float payFloat = PayloadHelper.decodeFloat(payData.bytes);
|
||||
assertEquals(0.1f, payFloat, 0.0f);
|
||||
}
|
||||
stream.end();
|
||||
stream.close();
|
||||
}
|
||||
|
||||
/** Test that bogus arguments result in exception */
|
||||
public void testBogusArguments() throws Exception {
|
||||
|
|
Loading…
Reference in New Issue