LUCENE-9007: MockSynonymFilter should add TypeAttribute (#23)

The MockSynonymFilter should add the type TypeAttribute to the synonyms it
generates in order to make it a better stand-in for the real filter in tests.
This commit is contained in:
Christoph Büscher 2021-03-19 03:00:09 +01:00 committed by GitHub
parent 28edbf8fc6
commit 7ed72972b8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 16 additions and 13 deletions

View File

@ -24,6 +24,7 @@ import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionLengthAttribute; import org.apache.lucene.analysis.tokenattributes.PositionLengthAttribute;
import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
import org.apache.lucene.util.AttributeSource; import org.apache.lucene.util.AttributeSource;
/** adds synonym of "dog" for "dogs", and synonym of "cavy" for "guinea pig". */ /** adds synonym of "dog" for "dogs", and synonym of "cavy" for "guinea pig". */
@ -32,6 +33,7 @@ public class MockSynonymFilter extends TokenFilter {
PositionIncrementAttribute posIncAtt = addAttribute(PositionIncrementAttribute.class); PositionIncrementAttribute posIncAtt = addAttribute(PositionIncrementAttribute.class);
OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class); OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class);
PositionLengthAttribute posLenAtt = addAttribute(PositionLengthAttribute.class); PositionLengthAttribute posLenAtt = addAttribute(PositionLengthAttribute.class);
TypeAttribute typeAtt = addAttribute(TypeAttribute.class);
List<AttributeSource> tokenQueue = new ArrayList<>(); List<AttributeSource> tokenQueue = new ArrayList<>();
boolean endOfInput = false; boolean endOfInput = false;
@ -85,6 +87,7 @@ public class MockSynonymFilter extends TokenFilter {
posIncAtt.setPositionIncrement(0); posIncAtt.setPositionIncrement(0);
posLenAtt.setPositionLength(posLen); posLenAtt.setPositionLength(posLen);
offsetAtt.setOffset(offsetAtt.startOffset(), endOffset); offsetAtt.setOffset(offsetAtt.startOffset(), endOffset);
typeAtt.setType("SYNONYM");
tokenQueue.add(cloneAttributes()); tokenQueue.add(cloneAttributes());
} }

View File

@ -38,7 +38,7 @@ public class TestMockSynonymFilter extends BaseTokenStreamTestCase {
new String[] {"dogs", "dog"}, new String[] {"dogs", "dog"},
new int[] {0, 0}, // start offset new int[] {0, 0}, // start offset
new int[] {4, 4}, // end offset new int[] {4, 4}, // end offset
null, new String[] {"word", "SYNONYM"},
new int[] {1, 0}, // position increment new int[] {1, 0}, // position increment
new int[] {1, 1}, // position length new int[] {1, 1}, // position length
true); // check that offsets are correct true); // check that offsets are correct
@ -49,7 +49,7 @@ public class TestMockSynonymFilter extends BaseTokenStreamTestCase {
new String[] {"small", "dogs", "dog"}, new String[] {"small", "dogs", "dog"},
new int[] {0, 6, 6}, // start offset new int[] {0, 6, 6}, // start offset
new int[] {5, 10, 10}, // end offset new int[] {5, 10, 10}, // end offset
null, new String[] {"word", "word", "SYNONYM"},
new int[] {1, 1, 0}, // position increment new int[] {1, 1, 0}, // position increment
new int[] {1, 1, 1}, // position length new int[] {1, 1, 1}, // position length
true); // check that offsets are correct true); // check that offsets are correct
@ -60,7 +60,7 @@ public class TestMockSynonymFilter extends BaseTokenStreamTestCase {
new String[] {"dogs", "dog", "running"}, new String[] {"dogs", "dog", "running"},
new int[] {0, 0, 5}, // start offset new int[] {0, 0, 5}, // start offset
new int[] {4, 4, 12}, // end offset new int[] {4, 4, 12}, // end offset
null, new String[] {"word", "SYNONYM", "word"},
new int[] {1, 0, 1}, // position increment new int[] {1, 0, 1}, // position increment
new int[] {1, 1, 1}, // position length new int[] {1, 1, 1}, // position length
true); // check that offsets are correct true); // check that offsets are correct
@ -71,7 +71,7 @@ public class TestMockSynonymFilter extends BaseTokenStreamTestCase {
new String[] {"small", "dogs", "dog", "running"}, new String[] {"small", "dogs", "dog", "running"},
new int[] {0, 6, 6, 11}, // start offset new int[] {0, 6, 6, 11}, // start offset
new int[] {5, 10, 10, 18}, // end offset new int[] {5, 10, 10, 18}, // end offset
null, new String[] {"word", "word", "SYNONYM", "word"},
new int[] {1, 1, 0, 1}, // position increment new int[] {1, 1, 0, 1}, // position increment
new int[] {1, 1, 1, 1}, // position length new int[] {1, 1, 1, 1}, // position length
true); // check that offsets are correct true); // check that offsets are correct
@ -82,7 +82,7 @@ public class TestMockSynonymFilter extends BaseTokenStreamTestCase {
new String[] {"guinea"}, new String[] {"guinea"},
new int[] {0}, // start offset new int[] {0}, // start offset
new int[] {6}, // end offset new int[] {6}, // end offset
null, new String[] {"word"},
new int[] {1}, // position increment new int[] {1}, // position increment
new int[] {1}, // position length new int[] {1}, // position length
true); // check that offsets are correct true); // check that offsets are correct
@ -93,7 +93,7 @@ public class TestMockSynonymFilter extends BaseTokenStreamTestCase {
new String[] {"pig"}, new String[] {"pig"},
new int[] {0}, // start offset new int[] {0}, // start offset
new int[] {3}, // end offset new int[] {3}, // end offset
null, new String[] {"word"},
new int[] {1}, // position increment new int[] {1}, // position increment
new int[] {1}, // position length new int[] {1}, // position length
true); // check that offsets are correct true); // check that offsets are correct
@ -104,7 +104,7 @@ public class TestMockSynonymFilter extends BaseTokenStreamTestCase {
new String[] {"guinea", "cavy", "pig"}, new String[] {"guinea", "cavy", "pig"},
new int[] {0, 0, 7}, // start offset new int[] {0, 0, 7}, // start offset
new int[] {6, 10, 10}, // end offset new int[] {6, 10, 10}, // end offset
null, new String[] {"word", "SYNONYM", "word"},
new int[] {1, 0, 1}, // position increment new int[] {1, 0, 1}, // position increment
new int[] {1, 2, 1}, // position length new int[] {1, 2, 1}, // position length
true); // check that offsets are correct true); // check that offsets are correct
@ -115,7 +115,7 @@ public class TestMockSynonymFilter extends BaseTokenStreamTestCase {
new String[] {"guinea", "dogs", "dog"}, new String[] {"guinea", "dogs", "dog"},
new int[] {0, 7, 7}, // start offset new int[] {0, 7, 7}, // start offset
new int[] {6, 11, 11}, // end offset new int[] {6, 11, 11}, // end offset
null, new String[] {"word", "word", "SYNONYM"},
new int[] {1, 1, 0}, // position increment new int[] {1, 1, 0}, // position increment
new int[] {1, 1, 1}, // position length new int[] {1, 1, 1}, // position length
true); // check that offsets are correct true); // check that offsets are correct
@ -126,7 +126,7 @@ public class TestMockSynonymFilter extends BaseTokenStreamTestCase {
new String[] {"dogs", "dog", "guinea"}, new String[] {"dogs", "dog", "guinea"},
new int[] {0, 0, 5}, // start offset new int[] {0, 0, 5}, // start offset
new int[] {4, 4, 11}, // end offset new int[] {4, 4, 11}, // end offset
null, new String[] {"word", "SYNONYM", "word"},
new int[] {1, 0, 1}, // position increment new int[] {1, 0, 1}, // position increment
new int[] {1, 1, 1}, // position length new int[] {1, 1, 1}, // position length
true); // check that offsets are correct true); // check that offsets are correct
@ -137,7 +137,7 @@ public class TestMockSynonymFilter extends BaseTokenStreamTestCase {
new String[] {"dogs", "dog", "guinea", "cavy", "pig"}, new String[] {"dogs", "dog", "guinea", "cavy", "pig"},
new int[] {0, 0, 5, 5, 12}, // start offset new int[] {0, 0, 5, 5, 12}, // start offset
new int[] {4, 4, 11, 15, 15}, // end offset new int[] {4, 4, 11, 15, 15}, // end offset
null, new String[] {"word", "SYNONYM", "word", "SYNONYM", "word"},
new int[] {1, 0, 1, 0, 1}, // position increment new int[] {1, 0, 1, 0, 1}, // position increment
new int[] {1, 1, 1, 2, 1}, // position length new int[] {1, 1, 1, 2, 1}, // position length
true); // check that offsets are correct true); // check that offsets are correct
@ -148,7 +148,7 @@ public class TestMockSynonymFilter extends BaseTokenStreamTestCase {
new String[] {"guinea", "cavy", "pig", "dogs", "dog"}, new String[] {"guinea", "cavy", "pig", "dogs", "dog"},
new int[] {0, 0, 7, 11, 11}, // start offset new int[] {0, 0, 7, 11, 11}, // start offset
new int[] {6, 10, 10, 15, 15}, // end offset new int[] {6, 10, 10, 15, 15}, // end offset
null, new String[] {"word", "SYNONYM", "word", "word", "SYNONYM"},
new int[] {1, 0, 1, 1, 0}, // position increment new int[] {1, 0, 1, 1, 0}, // position increment
new int[] {1, 2, 1, 1, 1}, // position length new int[] {1, 2, 1, 1, 1}, // position length
true); // check that offsets are correct true); // check that offsets are correct
@ -159,7 +159,7 @@ public class TestMockSynonymFilter extends BaseTokenStreamTestCase {
new String[] {"small", "dogs", "dog", "and", "guinea", "cavy", "pig", "running"}, new String[] {"small", "dogs", "dog", "and", "guinea", "cavy", "pig", "running"},
new int[] {0, 6, 6, 11, 15, 15, 22, 26}, // start offset new int[] {0, 6, 6, 11, 15, 15, 22, 26}, // start offset
new int[] {5, 10, 10, 14, 21, 25, 25, 33}, // end offset new int[] {5, 10, 10, 14, 21, 25, 25, 33}, // end offset
null, new String[] {"word", "word", "SYNONYM", "word", "word", "SYNONYM", "word", "word"},
new int[] {1, 1, 0, 1, 1, 0, 1, 1}, // position increment new int[] {1, 1, 0, 1, 1, 0, 1, 1}, // position increment
new int[] {1, 1, 1, 1, 1, 2, 1, 1}, // position length new int[] {1, 1, 1, 1, 1, 2, 1, 1}, // position length
true); // check that offsets are correct true); // check that offsets are correct
@ -170,7 +170,7 @@ public class TestMockSynonymFilter extends BaseTokenStreamTestCase {
new String[] {"small", "guinea", "cavy", "pig", "and", "dogs", "dog", "running"}, new String[] {"small", "guinea", "cavy", "pig", "and", "dogs", "dog", "running"},
new int[] {0, 6, 6, 13, 17, 21, 21, 26}, // start offset new int[] {0, 6, 6, 13, 17, 21, 21, 26}, // start offset
new int[] {5, 12, 16, 16, 20, 25, 25, 33}, // end offset new int[] {5, 12, 16, 16, 20, 25, 25, 33}, // end offset
null, new String[] {"word", "word", "SYNONYM", "word", "word", "word", "SYNONYM", "word"},
new int[] {1, 1, 0, 1, 1, 1, 0, 1}, // position increment new int[] {1, 1, 0, 1, 1, 1, 0, 1}, // position increment
new int[] {1, 1, 2, 1, 1, 1, 1, 1}, // position length new int[] {1, 1, 2, 1, 1, 1, 1, 1}, // position length
true); // check that offsets are correct true); // check that offsets are correct