LUCENE-1855: Change AttributeSource API to use generics

git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@820553 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Uwe Schindler 2009-10-01 07:53:43 +00:00
parent 7733f8ab32
commit ec90bc2202
158 changed files with 662 additions and 661 deletions

View File

@ -13,6 +13,11 @@ API Changes
protected; add expert ctor to directly specify reader, subReaders
and docStarts. (John Wang, Tim Smith via Mike McCandless)
* LUCENE-1855: Convert TokenStream/AttributeSource API to Generics.
Now addAttribute()/getAttribute() return an instance of the requested
attribute interface and no cast needed anymore. (Uwe Schindler,
Michael Busch, Robert Muir, Adriano Crestani)
Bug fixes
New features

View File

@ -22,9 +22,6 @@
<description>
Additional Analyzers
</description>
<property name="javac.source" value="1.4" />
<property name="javac.target" value="1.4" />
<property name="build.dir" location="../../../build/contrib/analyzers/common" />
<property name="dist.dir" location="../../../dist/contrib/analyzers/common" />

View File

@ -36,7 +36,7 @@ public final class ArabicNormalizationFilter extends TokenFilter {
public ArabicNormalizationFilter(TokenStream input) {
super(input);
normalizer = new ArabicNormalizer();
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
termAtt = addAttribute(TermAttribute.class);
}
public boolean incrementToken() throws IOException {

View File

@ -36,7 +36,7 @@ public final class ArabicStemFilter extends TokenFilter {
public ArabicStemFilter(TokenStream input) {
super(input);
stemmer = new ArabicStemmer();
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
termAtt = addAttribute(TermAttribute.class);
}
public boolean incrementToken() throws IOException {

View File

@ -40,7 +40,7 @@ public final class BrazilianStemFilter extends TokenFilter {
public BrazilianStemFilter(TokenStream in) {
super(in);
stemmer = new BrazilianStemmer();
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
termAtt = addAttribute(TermAttribute.class);
}
public BrazilianStemFilter(TokenStream in, Set exclusiontable) {

View File

@ -127,9 +127,9 @@ public final class CJKTokenizer extends Tokenizer {
}
private void init() {
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
offsetAtt = (OffsetAttribute) addAttribute(OffsetAttribute.class);
typeAtt = (TypeAttribute) addAttribute(TypeAttribute.class);
termAtt = addAttribute(TermAttribute.class);
offsetAtt = addAttribute(OffsetAttribute.class);
typeAtt = addAttribute(TypeAttribute.class);
}
//~ Methods ----------------------------------------------------------------

View File

@ -66,7 +66,7 @@ public final class ChineseFilter extends TokenFilter {
stopTable = new HashMap(STOP_WORDS.length);
for (int i = 0; i < STOP_WORDS.length; i++)
stopTable.put(STOP_WORDS[i], STOP_WORDS[i]);
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
termAtt = addAttribute(TermAttribute.class);
}
public boolean incrementToken() throws IOException {

View File

@ -74,8 +74,8 @@ public final class ChineseTokenizer extends Tokenizer {
}
private void init() {
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
offsetAtt = (OffsetAttribute) addAttribute(OffsetAttribute.class);
termAtt = addAttribute(TermAttribute.class);
offsetAtt = addAttribute(OffsetAttribute.class);
}
private int offset = 0, bufferIndex=0, dataLen=0;

View File

@ -106,12 +106,12 @@ public abstract class CompoundWordTokenFilterBase extends TokenFilter {
addAllLowerCase(this.dictionary, dictionary);
}
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
offsetAtt = (OffsetAttribute) addAttribute(OffsetAttribute.class);
flagsAtt = (FlagsAttribute) addAttribute(FlagsAttribute.class);
posIncAtt = (PositionIncrementAttribute) addAttribute(PositionIncrementAttribute.class);
typeAtt = (TypeAttribute) addAttribute(TypeAttribute.class);
payloadAtt = (PayloadAttribute) addAttribute(PayloadAttribute.class);
termAtt = addAttribute(TermAttribute.class);
offsetAtt = addAttribute(OffsetAttribute.class);
flagsAtt = addAttribute(FlagsAttribute.class);
posIncAtt = addAttribute(PositionIncrementAttribute.class);
typeAtt = addAttribute(TypeAttribute.class);
payloadAtt = addAttribute(PayloadAttribute.class);
}
/**

View File

@ -48,7 +48,7 @@ public final class GermanStemFilter extends TokenFilter
{
super(in);
stemmer = new GermanStemmer();
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
termAtt = addAttribute(TermAttribute.class);
}
/**

View File

@ -42,7 +42,7 @@ public final class GreekLowerCaseFilter extends TokenFilter
{
super(in);
this.charset = charset;
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
termAtt = addAttribute(TermAttribute.class);
}
public GreekLowerCaseFilter(TokenStream in)

View File

@ -37,7 +37,7 @@ public final class PersianNormalizationFilter extends TokenFilter {
public PersianNormalizationFilter(TokenStream input) {
super(input);
normalizer = new PersianNormalizer();
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
termAtt = addAttribute(TermAttribute.class);
}
public boolean incrementToken() throws IOException {

View File

@ -57,7 +57,7 @@ public class ElisionFilter extends TokenFilter {
super(input);
this.articles = new HashSet(Arrays.asList(new String[] { "l", "m", "t",
"qu", "n", "s", "j" }));
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
termAtt = addAttribute(TermAttribute.class);
}
/**
@ -66,7 +66,7 @@ public class ElisionFilter extends TokenFilter {
public ElisionFilter(TokenStream input, Set articles) {
super(input);
setArticles(articles);
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
termAtt = addAttribute(TermAttribute.class);
}
/**
@ -75,7 +75,7 @@ public class ElisionFilter extends TokenFilter {
public ElisionFilter(TokenStream input, String[] articles) {
super(input);
setArticles(new HashSet(Arrays.asList(articles)));
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
termAtt = addAttribute(TermAttribute.class);
}
/**

View File

@ -47,7 +47,7 @@ public final class FrenchStemFilter extends TokenFilter {
public FrenchStemFilter( TokenStream in ) {
super(in);
stemmer = new FrenchStemmer();
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
termAtt = addAttribute(TermAttribute.class);
}

View File

@ -64,19 +64,19 @@ public class PrefixAwareTokenFilter extends TokenStream {
this.prefix = prefix;
prefixExhausted = false;
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
posIncrAtt = (PositionIncrementAttribute) addAttribute(PositionIncrementAttribute.class);
payloadAtt = (PayloadAttribute) addAttribute(PayloadAttribute.class);
offsetAtt = (OffsetAttribute) addAttribute(OffsetAttribute.class);
typeAtt = (TypeAttribute) addAttribute(TypeAttribute.class);
flagsAtt = (FlagsAttribute) addAttribute(FlagsAttribute.class);
termAtt = addAttribute(TermAttribute.class);
posIncrAtt = addAttribute(PositionIncrementAttribute.class);
payloadAtt = addAttribute(PayloadAttribute.class);
offsetAtt = addAttribute(OffsetAttribute.class);
typeAtt = addAttribute(TypeAttribute.class);
flagsAtt = addAttribute(FlagsAttribute.class);
p_termAtt = (TermAttribute) prefix.addAttribute(TermAttribute.class);
p_posIncrAtt = (PositionIncrementAttribute) prefix.addAttribute(PositionIncrementAttribute.class);
p_payloadAtt = (PayloadAttribute) prefix.addAttribute(PayloadAttribute.class);
p_offsetAtt = (OffsetAttribute) prefix.addAttribute(OffsetAttribute.class);
p_typeAtt = (TypeAttribute) prefix.addAttribute(TypeAttribute.class);
p_flagsAtt = (FlagsAttribute) prefix.addAttribute(FlagsAttribute.class);
p_termAtt = prefix.addAttribute(TermAttribute.class);
p_posIncrAtt = prefix.addAttribute(PositionIncrementAttribute.class);
p_payloadAtt = prefix.addAttribute(PayloadAttribute.class);
p_offsetAtt = prefix.addAttribute(OffsetAttribute.class);
p_typeAtt = prefix.addAttribute(TypeAttribute.class);
p_flagsAtt = prefix.addAttribute(FlagsAttribute.class);
}
private Token previousPrefixToken = new Token();

View File

@ -77,8 +77,8 @@ public class EdgeNGramTokenFilter extends TokenFilter {
protected EdgeNGramTokenFilter(TokenStream input) {
super(input);
this.termAtt = (TermAttribute) addAttribute(TermAttribute.class);
this.offsetAtt = (OffsetAttribute) addAttribute(OffsetAttribute.class);
this.termAtt = addAttribute(TermAttribute.class);
this.offsetAtt = addAttribute(OffsetAttribute.class);
}
/**
@ -107,8 +107,8 @@ public class EdgeNGramTokenFilter extends TokenFilter {
this.minGram = minGram;
this.maxGram = maxGram;
this.side = side;
this.termAtt = (TermAttribute) addAttribute(TermAttribute.class);
this.offsetAtt = (OffsetAttribute) addAttribute(OffsetAttribute.class);
this.termAtt = addAttribute(TermAttribute.class);
this.offsetAtt = addAttribute(OffsetAttribute.class);
}
/**

View File

@ -175,8 +175,8 @@ public class EdgeNGramTokenizer extends Tokenizer {
this.maxGram = maxGram;
this.side = side;
this.termAtt = (TermAttribute) addAttribute(TermAttribute.class);
this.offsetAtt = (OffsetAttribute) addAttribute(OffsetAttribute.class);
this.termAtt = addAttribute(TermAttribute.class);
this.offsetAtt = addAttribute(OffsetAttribute.class);
}

View File

@ -59,8 +59,8 @@ public class NGramTokenFilter extends TokenFilter {
this.minGram = minGram;
this.maxGram = maxGram;
this.termAtt = (TermAttribute) addAttribute(TermAttribute.class);
this.offsetAtt = (OffsetAttribute) addAttribute(OffsetAttribute.class);
this.termAtt = addAttribute(TermAttribute.class);
this.offsetAtt = addAttribute(OffsetAttribute.class);
}
/**

View File

@ -96,8 +96,8 @@ public class NGramTokenizer extends Tokenizer {
this.minGram = minGram;
this.maxGram = maxGram;
this.termAtt = (TermAttribute) addAttribute(TermAttribute.class);
this.offsetAtt = (OffsetAttribute) addAttribute(OffsetAttribute.class);
this.termAtt = addAttribute(TermAttribute.class);
this.offsetAtt = addAttribute(OffsetAttribute.class);
}
/** Returns the next token in the stream, or null at EOS. */

View File

@ -47,7 +47,7 @@ public final class DutchStemFilter extends TokenFilter {
public DutchStemFilter(TokenStream _in) {
super(_in);
stemmer = new DutchStemmer();
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
termAtt = addAttribute(TermAttribute.class);
}
/**

View File

@ -53,8 +53,8 @@ public final class DelimitedPayloadTokenFilter extends TokenFilter {
public DelimitedPayloadTokenFilter(TokenStream input, char delimiter, PayloadEncoder encoder) {
super(input);
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
payAtt = (PayloadAttribute) addAttribute(PayloadAttribute.class);
termAtt = addAttribute(TermAttribute.class);
payAtt = addAttribute(PayloadAttribute.class);
this.delimiter = delimiter;
this.encoder = encoder;
}

View File

@ -44,8 +44,8 @@ public class NumericPayloadTokenFilter extends TokenFilter {
//Need to encode the payload
thePayload = new Payload(PayloadHelper.encodeFloat(payload));
this.typeMatch = typeMatch;
payloadAtt = (PayloadAttribute) addAttribute(PayloadAttribute.class);
typeAtt = (TypeAttribute) addAttribute(TypeAttribute.class);
payloadAtt = addAttribute(PayloadAttribute.class);
typeAtt = addAttribute(TypeAttribute.class);
}
public final boolean incrementToken() throws IOException {

View File

@ -39,8 +39,8 @@ public class TokenOffsetPayloadTokenFilter extends TokenFilter {
public TokenOffsetPayloadTokenFilter(TokenStream input) {
super(input);
offsetAtt = (OffsetAttribute) addAttribute(OffsetAttribute.class);
payAtt = (PayloadAttribute) addAttribute(PayloadAttribute.class);
offsetAtt = addAttribute(OffsetAttribute.class);
payAtt = addAttribute(PayloadAttribute.class);
}
public final boolean incrementToken() throws IOException {

View File

@ -39,8 +39,8 @@ public class TypeAsPayloadTokenFilter extends TokenFilter {
public TypeAsPayloadTokenFilter(TokenStream input) {
super(input);
payloadAtt = (PayloadAttribute) addAttribute(PayloadAttribute.class);
typeAtt = (TypeAttribute) addAttribute(TypeAttribute.class);
payloadAtt = addAttribute(PayloadAttribute.class);
typeAtt = addAttribute(TypeAttribute.class);
}

View File

@ -46,7 +46,7 @@ public class PositionFilter extends TokenFilter {
*/
public PositionFilter(final TokenStream input) {
super(input);
posIncrAtt = (PositionIncrementAttribute) addAttribute(PositionIncrementAttribute.class);
posIncrAtt = addAttribute(PositionIncrementAttribute.class);
}
/**

View File

@ -87,7 +87,7 @@ public final class ReverseStringFilter extends TokenFilter {
public ReverseStringFilter(TokenStream in, char marker) {
super(in);
this.marker = marker;
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
termAtt = addAttribute(TermAttribute.class);
}
public boolean incrementToken() throws IOException {

View File

@ -45,7 +45,7 @@ public final class RussianLowerCaseFilter extends TokenFilter
{
super(in);
this.charset = charset;
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
termAtt = addAttribute(TermAttribute.class);
}
public RussianLowerCaseFilter(TokenStream in)

View File

@ -50,7 +50,7 @@ public final class RussianStemFilter extends TokenFilter
{
super(in);
stemmer = new RussianStemmer(charset);
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
termAtt = addAttribute(TermAttribute.class);
}
public RussianStemFilter(TokenStream in)

View File

@ -84,10 +84,10 @@ public class ShingleFilter extends TokenFilter {
public ShingleFilter(TokenStream input, int maxShingleSize) {
super(input);
setMaxShingleSize(maxShingleSize);
this.termAtt = (TermAttribute) addAttribute(TermAttribute.class);
this.offsetAtt = (OffsetAttribute) addAttribute(OffsetAttribute.class);
this.posIncrAtt = (PositionIncrementAttribute) addAttribute(PositionIncrementAttribute.class);
this.typeAtt = (TypeAttribute) addAttribute(TypeAttribute.class);
this.termAtt = addAttribute(TermAttribute.class);
this.offsetAtt = addAttribute(OffsetAttribute.class);
this.posIncrAtt = addAttribute(PositionIncrementAttribute.class);
this.typeAtt = addAttribute(TypeAttribute.class);
}
/**

View File

@ -228,22 +228,22 @@ public class ShingleMatrixFilter extends TokenStream {
this.ignoringSinglePrefixOrSuffixShingle = ignoringSinglePrefixOrSuffixShingle;
this.settingsCodec = settingsCodec;
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
posIncrAtt = (PositionIncrementAttribute) addAttribute(PositionIncrementAttribute.class);
payloadAtt = (PayloadAttribute) addAttribute(PayloadAttribute.class);
offsetAtt = (OffsetAttribute) addAttribute(OffsetAttribute.class);
typeAtt = (TypeAttribute) addAttribute(TypeAttribute.class);
flagsAtt = (FlagsAttribute) addAttribute(FlagsAttribute.class);
termAtt = addAttribute(TermAttribute.class);
posIncrAtt = addAttribute(PositionIncrementAttribute.class);
payloadAtt = addAttribute(PayloadAttribute.class);
offsetAtt = addAttribute(OffsetAttribute.class);
typeAtt = addAttribute(TypeAttribute.class);
flagsAtt = addAttribute(FlagsAttribute.class);
// set the input to be an empty token stream, we already have the data.
this.input = new EmptyTokenStream();
in_termAtt = (TermAttribute) input.addAttribute(TermAttribute.class);
in_posIncrAtt = (PositionIncrementAttribute) input.addAttribute(PositionIncrementAttribute.class);
in_payloadAtt = (PayloadAttribute) input.addAttribute(PayloadAttribute.class);
in_offsetAtt = (OffsetAttribute) input.addAttribute(OffsetAttribute.class);
in_typeAtt = (TypeAttribute) input.addAttribute(TypeAttribute.class);
in_flagsAtt = (FlagsAttribute) input.addAttribute(FlagsAttribute.class);
in_termAtt = input.addAttribute(TermAttribute.class);
in_posIncrAtt = input.addAttribute(PositionIncrementAttribute.class);
in_payloadAtt = input.addAttribute(PayloadAttribute.class);
in_offsetAtt = input.addAttribute(OffsetAttribute.class);
in_typeAtt = input.addAttribute(TypeAttribute.class);
in_flagsAtt = input.addAttribute(FlagsAttribute.class);
}
/**
@ -310,19 +310,19 @@ public class ShingleMatrixFilter extends TokenStream {
this.spacerCharacter = spacerCharacter;
this.ignoringSinglePrefixOrSuffixShingle = ignoringSinglePrefixOrSuffixShingle;
this.settingsCodec = settingsCodec;
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
posIncrAtt = (PositionIncrementAttribute) addAttribute(PositionIncrementAttribute.class);
payloadAtt = (PayloadAttribute) addAttribute(PayloadAttribute.class);
offsetAtt = (OffsetAttribute) addAttribute(OffsetAttribute.class);
typeAtt = (TypeAttribute) addAttribute(TypeAttribute.class);
flagsAtt = (FlagsAttribute) addAttribute(FlagsAttribute.class);
termAtt = addAttribute(TermAttribute.class);
posIncrAtt = addAttribute(PositionIncrementAttribute.class);
payloadAtt = addAttribute(PayloadAttribute.class);
offsetAtt = addAttribute(OffsetAttribute.class);
typeAtt = addAttribute(TypeAttribute.class);
flagsAtt = addAttribute(FlagsAttribute.class);
in_termAtt = (TermAttribute) input.addAttribute(TermAttribute.class);
in_posIncrAtt = (PositionIncrementAttribute) input.addAttribute(PositionIncrementAttribute.class);
in_payloadAtt = (PayloadAttribute) input.addAttribute(PayloadAttribute.class);
in_offsetAtt = (OffsetAttribute) input.addAttribute(OffsetAttribute.class);
in_typeAtt = (TypeAttribute) input.addAttribute(TypeAttribute.class);
in_flagsAtt = (FlagsAttribute) input.addAttribute(FlagsAttribute.class);
in_termAtt = input.addAttribute(TermAttribute.class);
in_posIncrAtt = input.addAttribute(PositionIncrementAttribute.class);
in_payloadAtt = input.addAttribute(PayloadAttribute.class);
in_offsetAtt = input.addAttribute(OffsetAttribute.class);
in_typeAtt = input.addAttribute(TypeAttribute.class);
in_flagsAtt = input.addAttribute(FlagsAttribute.class);
}
// internal filter instance variables

View File

@ -51,7 +51,7 @@ public class DateRecognizerSinkFilter extends SinkFilter {
public boolean accept(AttributeSource source) {
if (termAtt == null) {
termAtt = (TermAttribute) source.addAttribute(TermAttribute.class);
termAtt = source.addAttribute(TermAttribute.class);
}
try {
Date date = dateFormat.parse(termAtt.term());//We don't care about the date, just that we can parse it as a date

View File

@ -31,7 +31,7 @@ public class TokenTypeSinkFilter extends SinkFilter {
public boolean accept(AttributeSource source) {
if (typeAtt == null) {
typeAtt = (TypeAttribute) source.addAttribute(TypeAttribute.class);
typeAtt = source.addAttribute(TypeAttribute.class);
}
//check to see if this is a Category

View File

@ -44,8 +44,8 @@ public class ThaiWordFilter extends TokenFilter {
public ThaiWordFilter(TokenStream input) {
super(input);
breaker = BreakIterator.getWordInstance(new Locale("th"));
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
offsetAtt = (OffsetAttribute) addAttribute(OffsetAttribute.class);
termAtt = addAttribute(TermAttribute.class);
offsetAtt = addAttribute(OffsetAttribute.class);
}
public final boolean incrementToken() throws IOException {

View File

@ -48,9 +48,9 @@ public class TestCJKTokenizer extends BaseTokenStreamTestCase {
public void checkCJKToken(final String str, final TestToken[] out_tokens) throws IOException {
CJKTokenizer tokenizer = new CJKTokenizer(new StringReader(str));
TermAttribute termAtt = (TermAttribute) tokenizer.getAttribute(TermAttribute.class);
OffsetAttribute offsetAtt = (OffsetAttribute) tokenizer.getAttribute(OffsetAttribute.class);
TypeAttribute typeAtt = (TypeAttribute) tokenizer.getAttribute(TypeAttribute.class);
TermAttribute termAtt = tokenizer.getAttribute(TermAttribute.class);
OffsetAttribute offsetAtt = tokenizer.getAttribute(OffsetAttribute.class);
TypeAttribute typeAtt = tokenizer.getAttribute(TypeAttribute.class);
for (int i = 0; i < out_tokens.length; i++) {
assertTrue(tokenizer.incrementToken());
assertEquals(termAtt.term(), out_tokens[i].termText);
@ -63,9 +63,9 @@ public class TestCJKTokenizer extends BaseTokenStreamTestCase {
public void checkCJKTokenReusable(final Analyzer a, final String str, final TestToken[] out_tokens) throws IOException {
TokenStream ts = a.reusableTokenStream("dummy", new StringReader(str));
TermAttribute termAtt = (TermAttribute) ts.getAttribute(TermAttribute.class);
OffsetAttribute offsetAtt = (OffsetAttribute) ts.getAttribute(OffsetAttribute.class);
TypeAttribute typeAtt = (TypeAttribute) ts.getAttribute(TypeAttribute.class);
TermAttribute termAtt = ts.getAttribute(TermAttribute.class);
OffsetAttribute offsetAtt = ts.getAttribute(OffsetAttribute.class);
TypeAttribute typeAtt = ts.getAttribute(TypeAttribute.class);
for (int i = 0; i < out_tokens.length; i++) {
assertTrue(ts.incrementToken());
assertEquals(termAtt.term(), out_tokens[i].termText);
@ -220,7 +220,7 @@ public class TestCJKTokenizer extends BaseTokenStreamTestCase {
public void testTokenStream() throws Exception {
Analyzer analyzer = new CJKAnalyzer();
TokenStream ts = analyzer.tokenStream("dummy", new StringReader("\u4e00\u4e01\u4e02"));
TermAttribute termAtt = (TermAttribute) ts.getAttribute(TermAttribute.class);
TermAttribute termAtt = ts.getAttribute(TermAttribute.class);
assertTrue(ts.incrementToken());
assertEquals("\u4e00\u4e01", termAtt.term());
assertTrue(ts.incrementToken());

View File

@ -38,7 +38,7 @@ public class TestChineseTokenizer extends BaseTokenStreamTestCase
int correctStartOffset = 0;
int correctEndOffset = 1;
OffsetAttribute offsetAtt = (OffsetAttribute) tokenizer.getAttribute(OffsetAttribute.class);
OffsetAttribute offsetAtt = tokenizer.getAttribute(OffsetAttribute.class);
while (tokenizer.incrementToken()) {
assertEquals(correctStartOffset, offsetAtt.startOffset());
assertEquals(correctEndOffset, offsetAtt.endOffset());

View File

@ -173,7 +173,7 @@ public class TestCompoundWordTokenFilter extends BaseTokenStreamTestCase {
CompoundWordTokenFilterBase.DEFAULT_MIN_SUBWORD_SIZE,
CompoundWordTokenFilterBase.DEFAULT_MAX_SUBWORD_SIZE, false);
TermAttribute termAtt = (TermAttribute) tf.getAttribute(TermAttribute.class);
TermAttribute termAtt = tf.getAttribute(TermAttribute.class);
assertTrue(tf.incrementToken());
assertEquals("Rindfleischüberwachungsgesetz", termAtt.term());
assertTrue(tf.incrementToken());

View File

@ -50,7 +50,7 @@ public class TestElision extends BaseTokenStreamTestCase {
private List filtre(TokenFilter filter) throws IOException {
List tas = new ArrayList();
TermAttribute termAtt = (TermAttribute) filter.getAttribute(TermAttribute.class);
TermAttribute termAtt = filter.getAttribute(TermAttribute.class);
while (filter.incrementToken()) {
tas.add(termAtt.term());
}

View File

@ -45,8 +45,8 @@ public class TestPrefixAndSuffixAwareTokenFilter extends BaseTokenStreamTestCase
private void assertNext(TokenStream ts, String text, int startOffset, int endOffset) throws IOException {
TermAttribute termAtt = (TermAttribute) ts.addAttribute(TermAttribute.class);
OffsetAttribute offsetAtt = (OffsetAttribute) ts.addAttribute(OffsetAttribute.class);
TermAttribute termAtt = ts.addAttribute(TermAttribute.class);
OffsetAttribute offsetAtt = ts.addAttribute(OffsetAttribute.class);
assertTrue(ts.incrementToken());
assertEquals(text, termAtt.term());

View File

@ -54,8 +54,8 @@ public class TestPrefixAwareTokenFilter extends BaseTokenStreamTestCase {
private void assertNext(TokenStream ts, String text, int startOffset, int endOffset) throws IOException {
TermAttribute termAtt = (TermAttribute) ts.addAttribute(TermAttribute.class);
OffsetAttribute offsetAtt = (OffsetAttribute) ts.addAttribute(OffsetAttribute.class);
TermAttribute termAtt = ts.addAttribute(TermAttribute.class);
OffsetAttribute offsetAtt = ts.addAttribute(OffsetAttribute.class);
assertTrue(ts.incrementToken());
assertEquals(text, termAtt.term());

View File

@ -35,8 +35,8 @@ public class DelimitedPayloadTokenFilterTest extends LuceneTestCase {
public void testPayloads() throws Exception {
String test = "The quick|JJ red|JJ fox|NN jumped|VB over the lazy|JJ brown|JJ dogs|NN";
DelimitedPayloadTokenFilter filter = new DelimitedPayloadTokenFilter(new WhitespaceTokenizer(new StringReader(test)));
TermAttribute termAtt = (TermAttribute) filter.getAttribute(TermAttribute.class);
PayloadAttribute payAtt = (PayloadAttribute) filter.getAttribute(PayloadAttribute.class);
TermAttribute termAtt = filter.getAttribute(TermAttribute.class);
PayloadAttribute payAtt = filter.getAttribute(PayloadAttribute.class);
assertTermEquals("The", filter, termAtt, payAtt, null);
assertTermEquals("quick", filter, termAtt, payAtt, "JJ".getBytes("UTF-8"));
assertTermEquals("red", filter, termAtt, payAtt, "JJ".getBytes("UTF-8"));
@ -71,8 +71,8 @@ public class DelimitedPayloadTokenFilterTest extends LuceneTestCase {
public void testFloatEncoding() throws Exception {
String test = "The quick|1.0 red|2.0 fox|3.5 jumped|0.5 over the lazy|5 brown|99.3 dogs|83.7";
DelimitedPayloadTokenFilter filter = new DelimitedPayloadTokenFilter(new WhitespaceTokenizer(new StringReader(test)), '|', new FloatEncoder());
TermAttribute termAtt = (TermAttribute) filter.getAttribute(TermAttribute.class);
PayloadAttribute payAtt = (PayloadAttribute) filter.getAttribute(PayloadAttribute.class);
TermAttribute termAtt = filter.getAttribute(TermAttribute.class);
PayloadAttribute payAtt = filter.getAttribute(PayloadAttribute.class);
assertTermEquals("The", filter, termAtt, payAtt, null);
assertTermEquals("quick", filter, termAtt, payAtt, PayloadHelper.encodeFloat(1.0f));
assertTermEquals("red", filter, termAtt, payAtt, PayloadHelper.encodeFloat(2.0f));
@ -89,8 +89,8 @@ public class DelimitedPayloadTokenFilterTest extends LuceneTestCase {
public void testIntEncoding() throws Exception {
String test = "The quick|1 red|2 fox|3 jumped over the lazy|5 brown|99 dogs|83";
DelimitedPayloadTokenFilter filter = new DelimitedPayloadTokenFilter(new WhitespaceTokenizer(new StringReader(test)), '|', new IntegerEncoder());
TermAttribute termAtt = (TermAttribute) filter.getAttribute(TermAttribute.class);
PayloadAttribute payAtt = (PayloadAttribute) filter.getAttribute(PayloadAttribute.class);
TermAttribute termAtt = filter.getAttribute(TermAttribute.class);
PayloadAttribute payAtt = filter.getAttribute(PayloadAttribute.class);
assertTermEquals("The", filter, termAtt, payAtt, null);
assertTermEquals("quick", filter, termAtt, payAtt, PayloadHelper.encodeInt(1));
assertTermEquals("red", filter, termAtt, payAtt, PayloadHelper.encodeInt(2));
@ -105,8 +105,8 @@ public class DelimitedPayloadTokenFilterTest extends LuceneTestCase {
}
void assertTermEquals(String expected, TokenStream stream, byte[] expectPay) throws Exception {
TermAttribute termAtt = (TermAttribute) stream.getAttribute(TermAttribute.class);
PayloadAttribute payloadAtt = (PayloadAttribute) stream.getAttribute(PayloadAttribute.class);
TermAttribute termAtt = stream.getAttribute(TermAttribute.class);
PayloadAttribute payloadAtt = stream.getAttribute(PayloadAttribute.class);
assertTrue(stream.incrementToken());
assertEquals(expected, termAtt.term());
Payload payload = payloadAtt.getPayload();

View File

@ -39,9 +39,9 @@ public class NumericPayloadTokenFilterTest extends BaseTokenStreamTestCase {
NumericPayloadTokenFilter nptf = new NumericPayloadTokenFilter(new WordTokenFilter(new WhitespaceTokenizer(new StringReader(test))), 3, "D");
boolean seenDogs = false;
TermAttribute termAtt = (TermAttribute) nptf.getAttribute(TermAttribute.class);
TypeAttribute typeAtt = (TypeAttribute) nptf.getAttribute(TypeAttribute.class);
PayloadAttribute payloadAtt = (PayloadAttribute) nptf.getAttribute(PayloadAttribute.class);
TermAttribute termAtt = nptf.getAttribute(TermAttribute.class);
TypeAttribute typeAtt = nptf.getAttribute(TypeAttribute.class);
PayloadAttribute payloadAtt = nptf.getAttribute(PayloadAttribute.class);
while (nptf.incrementToken()) {
if (termAtt.term().equals("dogs")) {
seenDogs = true;
@ -65,8 +65,8 @@ public class NumericPayloadTokenFilterTest extends BaseTokenStreamTestCase {
private WordTokenFilter(TokenStream input) {
super(input);
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
typeAtt = (TypeAttribute) addAttribute(TypeAttribute.class);
termAtt = addAttribute(TermAttribute.class);
typeAtt = addAttribute(TypeAttribute.class);
}
public boolean incrementToken() throws IOException {

View File

@ -37,8 +37,8 @@ public class TokenOffsetPayloadTokenFilterTest extends BaseTokenStreamTestCase {
TokenOffsetPayloadTokenFilter nptf = new TokenOffsetPayloadTokenFilter(new WhitespaceTokenizer(new StringReader(test)));
int count = 0;
PayloadAttribute payloadAtt = (PayloadAttribute) nptf.getAttribute(PayloadAttribute.class);
OffsetAttribute offsetAtt = (OffsetAttribute) nptf.getAttribute(OffsetAttribute.class);
PayloadAttribute payloadAtt = nptf.getAttribute(PayloadAttribute.class);
OffsetAttribute offsetAtt = nptf.getAttribute(OffsetAttribute.class);
while (nptf.incrementToken()) {
Payload pay = payloadAtt.getPayload();

View File

@ -39,9 +39,9 @@ public class TypeAsPayloadTokenFilterTest extends BaseTokenStreamTestCase {
TypeAsPayloadTokenFilter nptf = new TypeAsPayloadTokenFilter(new WordTokenFilter(new WhitespaceTokenizer(new StringReader(test))));
int count = 0;
TermAttribute termAtt = (TermAttribute) nptf.getAttribute(TermAttribute.class);
TypeAttribute typeAtt = (TypeAttribute) nptf.getAttribute(TypeAttribute.class);
PayloadAttribute payloadAtt = (PayloadAttribute) nptf.getAttribute(PayloadAttribute.class);
TermAttribute termAtt = nptf.getAttribute(TermAttribute.class);
TypeAttribute typeAtt = nptf.getAttribute(TypeAttribute.class);
PayloadAttribute payloadAtt = nptf.getAttribute(PayloadAttribute.class);
while (nptf.incrementToken()) {
assertTrue(typeAtt.type() + " is not null and it should be", typeAtt.type().equals(String.valueOf(Character.toUpperCase(termAtt.termBuffer()[0]))));
@ -61,8 +61,8 @@ public class TypeAsPayloadTokenFilterTest extends BaseTokenStreamTestCase {
private WordTokenFilter(TokenStream input) {
super(input);
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
typeAtt = (TypeAttribute) addAttribute(TypeAttribute.class);
termAtt = addAttribute(TermAttribute.class);
typeAtt = addAttribute(TypeAttribute.class);
}
public boolean incrementToken() throws IOException {

View File

@ -35,7 +35,7 @@ public class PositionFilterTest extends BaseTokenStreamTestCase {
public TestTokenStream(String[] testToken) {
super();
this.testToken = testToken;
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
termAtt = addAttribute(TermAttribute.class);
}
public final boolean incrementToken() throws IOException {

View File

@ -193,7 +193,7 @@ public class QueryAutoStopWordAnalyzerTest extends BaseTokenStreamTestCase {
QueryAutoStopWordAnalyzer a = new QueryAutoStopWordAnalyzer(new WhitespaceAnalyzer());
a.addStopWords(reader, 10);
TokenStream ts = a.tokenStream("repetitiveField", new StringReader("this boring"));
TermAttribute termAtt = (TermAttribute) ts.getAttribute(TermAttribute.class);
TermAttribute termAtt = ts.getAttribute(TermAttribute.class);
assertTrue(ts.incrementToken());
assertEquals("this", termAtt.term());
assertFalse(ts.incrementToken());

View File

@ -29,7 +29,7 @@ public class TestReverseStringFilter extends BaseTokenStreamTestCase {
TokenStream stream = new WhitespaceTokenizer(
new StringReader("Do have a nice day")); // 1-4 length string
ReverseStringFilter filter = new ReverseStringFilter(stream);
TermAttribute text = (TermAttribute) filter.getAttribute(TermAttribute.class);
TermAttribute text = filter.getAttribute(TermAttribute.class);
assertTrue(filter.incrementToken());
assertEquals("oD", text.term());
assertTrue(filter.incrementToken());
@ -47,7 +47,7 @@ public class TestReverseStringFilter extends BaseTokenStreamTestCase {
TokenStream stream = new WhitespaceTokenizer(new StringReader(
"Do have a nice day")); // 1-4 length string
ReverseStringFilter filter = new ReverseStringFilter(stream, '\u0001');
TermAttribute text = (TermAttribute) filter
TermAttribute text = filter
.getAttribute(TermAttribute.class);
assertTrue(filter.incrementToken());
assertEquals("\u0001oD", text.term());

View File

@ -77,8 +77,8 @@ public class TestRussianAnalyzer extends BaseTokenStreamTestCase
new RussianLetterTokenizer(
sampleUnicode);
TermAttribute text = (TermAttribute) in.getAttribute(TermAttribute.class);
TermAttribute sampleText = (TermAttribute) sample.getAttribute(TermAttribute.class);
TermAttribute text = in.getAttribute(TermAttribute.class);
TermAttribute sampleText = sample.getAttribute(TermAttribute.class);
for (;;)
{
@ -113,8 +113,8 @@ public class TestRussianAnalyzer extends BaseTokenStreamTestCase
sampleKOI8,
RussianCharsets.KOI8);
TermAttribute text = (TermAttribute) in.getAttribute(TermAttribute.class);
TermAttribute sampleText = (TermAttribute) sample.getAttribute(TermAttribute.class);
TermAttribute text = in.getAttribute(TermAttribute.class);
TermAttribute sampleText = sample.getAttribute(TermAttribute.class);
for (;;)
{
@ -147,8 +147,8 @@ public class TestRussianAnalyzer extends BaseTokenStreamTestCase
sample1251,
RussianCharsets.CP1251);
TermAttribute text = (TermAttribute) in.getAttribute(TermAttribute.class);
TermAttribute sampleText = (TermAttribute) sample.getAttribute(TermAttribute.class);
TermAttribute text = in.getAttribute(TermAttribute.class);
TermAttribute sampleText = sample.getAttribute(TermAttribute.class);
for (;;)
{
@ -174,7 +174,7 @@ public class TestRussianAnalyzer extends BaseTokenStreamTestCase
RussianAnalyzer ra = new RussianAnalyzer();
TokenStream stream = ra.tokenStream("", reader);
TermAttribute termText = (TermAttribute) stream.getAttribute(TermAttribute.class);
TermAttribute termText = stream.getAttribute(TermAttribute.class);
try {
assertTrue(stream.incrementToken());
assertEquals("text", termText.term());

View File

@ -157,8 +157,8 @@ public class ShingleAnalyzerWrapperTest extends BaseTokenStreamTestCase {
new StringReader("this sentence"));
int j = -1;
PositionIncrementAttribute posIncrAtt = (PositionIncrementAttribute) ts.addAttribute(PositionIncrementAttribute.class);
TermAttribute termAtt = (TermAttribute) ts.addAttribute(TermAttribute.class);
PositionIncrementAttribute posIncrAtt = ts.addAttribute(PositionIncrementAttribute.class);
TermAttribute termAtt = ts.addAttribute(TermAttribute.class);
while (ts.incrementToken()) {
j += posIncrAtt.getPositionIncrement();
@ -185,7 +185,7 @@ public class ShingleAnalyzerWrapperTest extends BaseTokenStreamTestCase {
TokenStream ts = analyzer.tokenStream("content",
new StringReader("test sentence"));
TermAttribute termAtt = (TermAttribute) ts.addAttribute(TermAttribute.class);
TermAttribute termAtt = ts.addAttribute(TermAttribute.class);
while (ts.incrementToken()) {
String termText = termAtt.term();

View File

@ -42,10 +42,10 @@ public class ShingleFilterTest extends BaseTokenStreamTestCase {
public TestTokenStream(Token[] testToken) {
super();
this.testToken = testToken;
this.termAtt = (TermAttribute) addAttribute(TermAttribute.class);
this.offsetAtt = (OffsetAttribute) addAttribute(OffsetAttribute.class);
this.posIncrAtt = (PositionIncrementAttribute) addAttribute(PositionIncrementAttribute.class);
this.typeAtt = (TypeAttribute) addAttribute(TypeAttribute.class);
this.termAtt = addAttribute(TermAttribute.class);
this.offsetAtt = addAttribute(OffsetAttribute.class);
this.posIncrAtt = addAttribute(PositionIncrementAttribute.class);
this.typeAtt = addAttribute(TypeAttribute.class);
}
public final boolean incrementToken() throws IOException {
@ -299,10 +299,10 @@ public class ShingleFilterTest extends BaseTokenStreamTestCase {
ShingleFilter filter = new ShingleFilter(new TestTokenStream(tokensToShingle), maxSize);
filter.setOutputUnigrams(outputUnigrams);
TermAttribute termAtt = (TermAttribute) filter.addAttribute(TermAttribute.class);
OffsetAttribute offsetAtt = (OffsetAttribute) filter.addAttribute(OffsetAttribute.class);
PositionIncrementAttribute posIncrAtt = (PositionIncrementAttribute) filter.addAttribute(PositionIncrementAttribute.class);
TypeAttribute typeAtt = (TypeAttribute) filter.addAttribute(TypeAttribute.class);
TermAttribute termAtt = filter.addAttribute(TermAttribute.class);
OffsetAttribute offsetAtt = filter.addAttribute(OffsetAttribute.class);
PositionIncrementAttribute posIncrAtt = filter.addAttribute(PositionIncrementAttribute.class);
TypeAttribute typeAtt = filter.addAttribute(TypeAttribute.class);
int i = 0;
while (filter.incrementToken()) {

View File

@ -454,16 +454,16 @@ public class TestShingleMatrixFilter extends BaseTokenStreamTestCase {
// assert-methods start here
private void assertNext(TokenStream ts, String text) throws IOException {
TermAttribute termAtt = (TermAttribute) ts.addAttribute(TermAttribute.class);
TermAttribute termAtt = ts.addAttribute(TermAttribute.class);
assertTrue(ts.incrementToken());
assertEquals(text, termAtt.term());
}
private void assertNext(TokenStream ts, String text, int positionIncrement, float boost) throws IOException {
TermAttribute termAtt = (TermAttribute) ts.addAttribute(TermAttribute.class);
PositionIncrementAttribute posIncrAtt = (PositionIncrementAttribute) ts.addAttribute(PositionIncrementAttribute.class);
PayloadAttribute payloadAtt = (PayloadAttribute) ts.addAttribute(PayloadAttribute.class);
TermAttribute termAtt = ts.addAttribute(TermAttribute.class);
PositionIncrementAttribute posIncrAtt = ts.addAttribute(PositionIncrementAttribute.class);
PayloadAttribute payloadAtt = ts.addAttribute(PayloadAttribute.class);
assertTrue(ts.incrementToken());
assertEquals(text, termAtt.term());
@ -472,10 +472,10 @@ public class TestShingleMatrixFilter extends BaseTokenStreamTestCase {
}
private void assertNext(TokenStream ts, String text, int positionIncrement, float boost, int startOffset, int endOffset) throws IOException {
TermAttribute termAtt = (TermAttribute) ts.addAttribute(TermAttribute.class);
PositionIncrementAttribute posIncrAtt = (PositionIncrementAttribute) ts.addAttribute(PositionIncrementAttribute.class);
PayloadAttribute payloadAtt = (PayloadAttribute) ts.addAttribute(PayloadAttribute.class);
OffsetAttribute offsetAtt = (OffsetAttribute) ts.addAttribute(OffsetAttribute.class);
TermAttribute termAtt = ts.addAttribute(TermAttribute.class);
PositionIncrementAttribute posIncrAtt = ts.addAttribute(PositionIncrementAttribute.class);
PayloadAttribute payloadAtt = ts.addAttribute(PayloadAttribute.class);
OffsetAttribute offsetAtt = ts.addAttribute(OffsetAttribute.class);
assertTrue(ts.incrementToken());
assertEquals(text, termAtt.term());
@ -486,8 +486,8 @@ public class TestShingleMatrixFilter extends BaseTokenStreamTestCase {
}
private void assertNext(TokenStream ts, String text, int startOffset, int endOffset) throws IOException {
TermAttribute termAtt = (TermAttribute) ts.addAttribute(TermAttribute.class);
OffsetAttribute offsetAtt = (OffsetAttribute) ts.addAttribute(OffsetAttribute.class);
TermAttribute termAtt = ts.addAttribute(TermAttribute.class);
OffsetAttribute offsetAtt = ts.addAttribute(OffsetAttribute.class);
assertTrue(ts.incrementToken());
assertEquals(text, termAtt.term());
@ -515,12 +515,12 @@ public class TestShingleMatrixFilter extends BaseTokenStreamTestCase {
public TokenListStream(Collection tokens) {
this.tokens = tokens;
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
posIncrAtt = (PositionIncrementAttribute) addAttribute(PositionIncrementAttribute.class);
payloadAtt = (PayloadAttribute) addAttribute(PayloadAttribute.class);
offsetAtt = (OffsetAttribute) addAttribute(OffsetAttribute.class);
typeAtt = (TypeAttribute) addAttribute(TypeAttribute.class);
flagsAtt = (FlagsAttribute) addAttribute(FlagsAttribute.class);
termAtt = addAttribute(TermAttribute.class);
posIncrAtt = addAttribute(PositionIncrementAttribute.class);
payloadAtt = addAttribute(PayloadAttribute.class);
offsetAtt = addAttribute(OffsetAttribute.class);
typeAtt = addAttribute(TypeAttribute.class);
flagsAtt = addAttribute(FlagsAttribute.class);
}
private Iterator iterator;

View File

@ -44,8 +44,8 @@ public class TokenTypeSinkTokenizerTest extends BaseTokenStreamTestCase {
boolean seenDogs = false;
TermAttribute termAtt = (TermAttribute) ttf.addAttribute(TermAttribute.class);
TypeAttribute typeAtt = (TypeAttribute) ttf.addAttribute(TypeAttribute.class);
TermAttribute termAtt = ttf.addAttribute(TermAttribute.class);
TypeAttribute typeAtt = ttf.addAttribute(TypeAttribute.class);
ttf.reset();
while (ttf.incrementToken()) {
if (termAtt.term().equals("dogs")) {
@ -72,8 +72,8 @@ public class TokenTypeSinkTokenizerTest extends BaseTokenStreamTestCase {
private WordTokenFilter(TokenStream input) {
super(input);
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
typeAtt = (TypeAttribute) addAttribute(TypeAttribute.class);
termAtt = addAttribute(TermAttribute.class);
typeAtt = addAttribute(TypeAttribute.class);
}
public final boolean incrementToken() throws IOException {

View File

@ -22,9 +22,6 @@
<description>
Smart Chinese Analyzer
</description>
<property name="javac.source" value="1.4" />
<property name="javac.target" value="1.4" />
<property name="build.dir" location="../../../build/contrib/analyzers/smartcn" />
<property name="dist.dir" location="../../../dist/contrib/analyzers/smartcn" />

View File

@ -68,9 +68,9 @@ public final class SentenceTokenizer extends Tokenizer {
}
private void init() {
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
offsetAtt = (OffsetAttribute) addAttribute(OffsetAttribute.class);
typeAtt = (TypeAttribute) addAttribute(TypeAttribute.class);
termAtt = addAttribute(TermAttribute.class);
offsetAtt = addAttribute(OffsetAttribute.class);
typeAtt = addAttribute(TypeAttribute.class);
}
public boolean incrementToken() throws IOException {

View File

@ -56,9 +56,9 @@ public final class WordTokenFilter extends TokenFilter {
public WordTokenFilter(TokenStream in) {
super(in);
this.wordSegmenter = new WordSegmenter();
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
offsetAtt = (OffsetAttribute) addAttribute(OffsetAttribute.class);
typeAtt = (TypeAttribute) addAttribute(TypeAttribute.class);
termAtt = addAttribute(TermAttribute.class);
offsetAtt = addAttribute(OffsetAttribute.class);
typeAtt = addAttribute(TypeAttribute.class);
}
public boolean incrementToken() throws IOException {

View File

@ -85,7 +85,7 @@ public final class CollationKeyFilter extends TokenFilter {
public CollationKeyFilter(TokenStream input, Collator collator) {
super(input);
this.collator = collator;
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
termAtt = addAttribute(TermAttribute.class);
}
public boolean incrementToken() throws IOException {

View File

@ -83,7 +83,7 @@ public final class ICUCollationKeyFilter extends TokenFilter {
public ICUCollationKeyFilter(TokenStream input, Collator collator) {
super(input);
this.collator = collator;
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
termAtt = addAttribute(TermAttribute.class);
}
public boolean incrementToken() throws IOException {

View File

@ -195,8 +195,8 @@ public abstract class AbstractTestCase extends TestCase {
ch = 0;
}
TermAttribute termAtt = (TermAttribute) addAttribute(TermAttribute.class);
OffsetAttribute offsetAtt = (OffsetAttribute) addAttribute(OffsetAttribute.class);
TermAttribute termAtt = addAttribute(TermAttribute.class);
OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class);
public boolean incrementToken() throws IOException {
if( !getNextPartialSnippet() )
return false;

View File

@ -217,8 +217,8 @@ public class Highlighter
ArrayList docFrags = new ArrayList();
StringBuffer newText=new StringBuffer();
TermAttribute termAtt = (TermAttribute) tokenStream.addAttribute(TermAttribute.class);
OffsetAttribute offsetAtt = (OffsetAttribute) tokenStream.addAttribute(OffsetAttribute.class);
TermAttribute termAtt = tokenStream.addAttribute(TermAttribute.class);
OffsetAttribute offsetAtt = tokenStream.addAttribute(OffsetAttribute.class);
tokenStream.addAttribute(PositionIncrementAttribute.class);
tokenStream.reset();

View File

@ -175,8 +175,8 @@ public class QueryScorer implements Scorer {
*/
public TokenStream init(TokenStream tokenStream) throws IOException {
position = -1;
termAtt = (TermAttribute) tokenStream.addAttribute(TermAttribute.class);
posIncAtt = (PositionIncrementAttribute) tokenStream.addAttribute(PositionIncrementAttribute.class);
termAtt = tokenStream.addAttribute(TermAttribute.class);
posIncAtt = tokenStream.addAttribute(PositionIncrementAttribute.class);
if(!skipInitExtractor) {
if(fieldWeightedSpanTerms != null) {
fieldWeightedSpanTerms.clear();

View File

@ -95,7 +95,7 @@ public class QueryTermScorer implements Scorer {
* @see org.apache.lucene.search.highlight.Scorer#init(org.apache.lucene.analysis.TokenStream)
*/
public TokenStream init(TokenStream tokenStream) {
termAtt = (TermAttribute) tokenStream.addAttribute(TermAttribute.class);
termAtt = tokenStream.addAttribute(TermAttribute.class);
return null;
}

View File

@ -47,7 +47,7 @@ public class SimpleFragmenter implements Fragmenter {
* @see org.apache.lucene.search.highlight.Fragmenter#start(java.lang.String, org.apache.lucene.analysis.TokenStream)
*/
public void start(String originalText, TokenStream stream) {
offsetAtt = (OffsetAttribute) stream.addAttribute(OffsetAttribute.class);
offsetAtt = stream.addAttribute(OffsetAttribute.class);
currentNumFrags = 1;
}

View File

@ -101,8 +101,8 @@ public class SimpleSpanFragmenter implements Fragmenter {
position = -1;
currentNumFrags = 1;
textSize = originalText.length();
termAtt = (TermAttribute) tokenStream.addAttribute(TermAttribute.class);
posIncAtt = (PositionIncrementAttribute) tokenStream.addAttribute(PositionIncrementAttribute.class);
offsetAtt = (OffsetAttribute) tokenStream.addAttribute(OffsetAttribute.class);
termAtt = tokenStream.addAttribute(TermAttribute.class);
posIncAtt = tokenStream.addAttribute(PositionIncrementAttribute.class);
offsetAtt = tokenStream.addAttribute(OffsetAttribute.class);
}
}

View File

@ -41,8 +41,8 @@ public class TokenGroup {
private TermAttribute termAtt;
public TokenGroup(TokenStream tokenStream) {
offsetAtt = (OffsetAttribute) tokenStream.addAttribute(OffsetAttribute.class);
termAtt = (TermAttribute) tokenStream.addAttribute(TermAttribute.class);
offsetAtt = tokenStream.addAttribute(OffsetAttribute.class);
termAtt = tokenStream.addAttribute(TermAttribute.class);
}
void addToken(float score) {

View File

@ -147,8 +147,8 @@ public class TokenSources
StoredTokenStream(Token tokens[]) {
this.tokens = tokens;
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
offsetAtt = (OffsetAttribute) addAttribute(OffsetAttribute.class);
termAtt = addAttribute(TermAttribute.class);
offsetAtt = addAttribute(OffsetAttribute.class);
}
public boolean incrementToken() throws IOException {

View File

@ -1316,9 +1316,9 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
private PositionIncrementAttribute posIncrAtt;
private OffsetAttribute offsetAtt;
{
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
posIncrAtt = (PositionIncrementAttribute) addAttribute(PositionIncrementAttribute.class);
offsetAtt = (OffsetAttribute) addAttribute(OffsetAttribute.class);
termAtt = addAttribute(TermAttribute.class);
posIncrAtt = addAttribute(PositionIncrementAttribute.class);
offsetAtt = addAttribute(OffsetAttribute.class);
lst = new ArrayList();
Token t;
t = createToken("hi", 0, 2);
@ -1363,9 +1363,9 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
private PositionIncrementAttribute posIncrAtt;
private OffsetAttribute offsetAtt;
{
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
posIncrAtt = (PositionIncrementAttribute) addAttribute(PositionIncrementAttribute.class);
offsetAtt = (OffsetAttribute) addAttribute(OffsetAttribute.class);
termAtt = addAttribute(TermAttribute.class);
posIncrAtt = addAttribute(PositionIncrementAttribute.class);
offsetAtt = addAttribute(OffsetAttribute.class);
lst = new ArrayList();
Token t;
t = createToken("hispeed", 0, 8);
@ -1686,13 +1686,13 @@ class SynonymTokenizer extends TokenStream {
public SynonymTokenizer(TokenStream realStream, Map synonyms) {
this.realStream = realStream;
this.synonyms = synonyms;
realTermAtt = (TermAttribute) realStream.addAttribute(TermAttribute.class);
realPosIncrAtt = (PositionIncrementAttribute) realStream.addAttribute(PositionIncrementAttribute.class);
realOffsetAtt = (OffsetAttribute) realStream.addAttribute(OffsetAttribute.class);
realTermAtt = realStream.addAttribute(TermAttribute.class);
realPosIncrAtt = realStream.addAttribute(PositionIncrementAttribute.class);
realOffsetAtt = realStream.addAttribute(OffsetAttribute.class);
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
posIncrAtt = (PositionIncrementAttribute) addAttribute(PositionIncrementAttribute.class);
offsetAtt = (OffsetAttribute) addAttribute(OffsetAttribute.class);
termAtt = addAttribute(TermAttribute.class);
posIncrAtt = addAttribute(PositionIncrementAttribute.class);
offsetAtt = addAttribute(OffsetAttribute.class);
}
public boolean incrementToken() throws IOException {

View File

@ -282,8 +282,8 @@ class LuceneMethods {
int position = 0;
// Tokenize field and add to postingTable
TokenStream stream = analyzer.tokenStream(fieldName, reader);
TermAttribute termAtt = (TermAttribute) stream.addAttribute(TermAttribute.class);
PositionIncrementAttribute posIncrAtt = (PositionIncrementAttribute) stream.addAttribute(PositionIncrementAttribute.class);
TermAttribute termAtt = stream.addAttribute(TermAttribute.class);
PositionIncrementAttribute posIncrAtt = stream.addAttribute(PositionIncrementAttribute.class);
try {
while (stream.incrementToken()) {

View File

@ -75,10 +75,10 @@ public class AnalyzerUtil {
public TokenStream tokenStream(final String fieldName, Reader reader) {
return new TokenFilter(child.tokenStream(fieldName, reader)) {
private int position = -1;
private TermAttribute termAtt = (TermAttribute) addAttribute(TermAttribute.class);
private PositionIncrementAttribute posIncrAtt = (PositionIncrementAttribute) addAttribute(PositionIncrementAttribute.class);
private OffsetAttribute offsetAtt = (OffsetAttribute) addAttribute(OffsetAttribute.class);
private TypeAttribute typeAtt = (TypeAttribute) addAttribute(TypeAttribute.class);
private TermAttribute termAtt = addAttribute(TermAttribute.class);
private PositionIncrementAttribute posIncrAtt = addAttribute(PositionIncrementAttribute.class);
private OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class);
private TypeAttribute typeAtt = addAttribute(TypeAttribute.class);
public boolean incrementToken() throws IOException {
boolean hasNext = input.incrementToken();
@ -307,7 +307,7 @@ public class AnalyzerUtil {
// compute frequencies of distinct terms
HashMap map = new HashMap();
TokenStream stream = analyzer.tokenStream("", new StringReader(text));
TermAttribute termAtt = (TermAttribute) stream.addAttribute(TermAttribute.class);
TermAttribute termAtt = stream.addAttribute(TermAttribute.class);
try {
while (stream.incrementToken()) {
MutableInteger freq = (MutableInteger) map.get(termAtt.term());

View File

@ -276,8 +276,8 @@ public class MemoryIndex implements Serializable {
return new TokenStream() {
private Iterator iter = keywords.iterator();
private int start = 0;
private TermAttribute termAtt = (TermAttribute) addAttribute(TermAttribute.class);
private OffsetAttribute offsetAtt = (OffsetAttribute) addAttribute(OffsetAttribute.class);
private TermAttribute termAtt = addAttribute(TermAttribute.class);
private OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class);
public boolean incrementToken() {
if (!iter.hasNext()) return false;
@ -338,9 +338,9 @@ public class MemoryIndex implements Serializable {
int numOverlapTokens = 0;
int pos = -1;
TermAttribute termAtt = (TermAttribute) stream.addAttribute(TermAttribute.class);
PositionIncrementAttribute posIncrAttribute = (PositionIncrementAttribute) stream.addAttribute(PositionIncrementAttribute.class);
OffsetAttribute offsetAtt = (OffsetAttribute) stream.addAttribute(OffsetAttribute.class);
TermAttribute termAtt = stream.addAttribute(TermAttribute.class);
PositionIncrementAttribute posIncrAttribute = stream.addAttribute(PositionIncrementAttribute.class);
OffsetAttribute offsetAtt = stream.addAttribute(OffsetAttribute.class);
stream.reset();
while (stream.incrementToken()) {

View File

@ -332,8 +332,8 @@ public class PatternAnalyzer extends Analyzer {
private Matcher matcher;
private int pos = 0;
private static final Locale locale = Locale.getDefault();
private TermAttribute termAtt = (TermAttribute) addAttribute(TermAttribute.class);
private OffsetAttribute offsetAtt = (OffsetAttribute) addAttribute(OffsetAttribute.class);
private TermAttribute termAtt = addAttribute(TermAttribute.class);
private OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class);
public PatternTokenizer(String str, Pattern pattern, boolean toLowerCase) {
this.str = str;
@ -390,8 +390,8 @@ public class PatternAnalyzer extends Analyzer {
private final boolean toLowerCase;
private final Set stopWords;
private static final Locale locale = Locale.getDefault();
private TermAttribute termAtt = (TermAttribute) addAttribute(TermAttribute.class);
private OffsetAttribute offsetAtt = (OffsetAttribute) addAttribute(OffsetAttribute.class);
private TermAttribute termAtt = addAttribute(TermAttribute.class);
private OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class);
public FastStringTokenizer(String str, boolean isLetter, boolean toLowerCase, Set stopWords) {
this.str = str;

View File

@ -72,9 +72,9 @@ public class SynonymTokenFilter extends TokenFilter {
this.synonyms = synonyms;
this.maxSynonyms = maxSynonyms;
this.termAtt = (TermAttribute) addAttribute(TermAttribute.class);
this.typeAtt = (TypeAttribute) addAttribute(TypeAttribute.class);
this.posIncrAtt = (PositionIncrementAttribute) addAttribute(PositionIncrementAttribute.class);
this.termAtt = addAttribute(TermAttribute.class);
this.typeAtt = addAttribute(TypeAttribute.class);
this.posIncrAtt = addAttribute(PositionIncrementAttribute.class);
}
/** Returns the next token in the stream, or null at EOS. */

View File

@ -106,7 +106,7 @@ public class AnalyzingQueryParser extends org.apache.lucene.queryParser.QueryPar
// get Analyzer from superclass and tokenize the term
TokenStream source = getAnalyzer().tokenStream(field, new StringReader(termStr));
TermAttribute termAtt = (TermAttribute) source.addAttribute(TermAttribute.class);
TermAttribute termAtt = source.addAttribute(TermAttribute.class);
int countTokens = 0;
while (true) {
@ -188,7 +188,7 @@ public class AnalyzingQueryParser extends org.apache.lucene.queryParser.QueryPar
// get Analyzer from superclass and tokenize the term
TokenStream source = getAnalyzer().tokenStream(field, new StringReader(termStr));
List tlist = new ArrayList();
TermAttribute termAtt = (TermAttribute) source.addAttribute(TermAttribute.class);
TermAttribute termAtt = source.addAttribute(TermAttribute.class);
while (true) {
try {
@ -234,7 +234,7 @@ public class AnalyzingQueryParser extends org.apache.lucene.queryParser.QueryPar
throws ParseException {
// get Analyzer from superclass and tokenize the term
TokenStream source = getAnalyzer().tokenStream(field, new StringReader(termStr));
TermAttribute termAtt = (TermAttribute) source.addAttribute(TermAttribute.class);
TermAttribute termAtt = source.addAttribute(TermAttribute.class);
String nextToken = null;
boolean multipleTokens = false;
@ -269,7 +269,7 @@ public class AnalyzingQueryParser extends org.apache.lucene.queryParser.QueryPar
throws ParseException {
// get Analyzer from superclass and tokenize the terms
TokenStream source = getAnalyzer().tokenStream(field, new StringReader(part1));
TermAttribute termAtt = (TermAttribute) source.addAttribute(TermAttribute.class);
TermAttribute termAtt = source.addAttribute(TermAttribute.class);
boolean multipleTokens = false;
// part1
@ -293,7 +293,7 @@ public class AnalyzingQueryParser extends org.apache.lucene.queryParser.QueryPar
// part2
source = getAnalyzer().tokenStream(field, new StringReader(part2));
termAtt = (TermAttribute) source.addAttribute(TermAttribute.class);
termAtt = source.addAttribute(TermAttribute.class);
try {
if (source.incrementToken()) {

View File

@ -67,8 +67,8 @@ public class TestPrecedenceQueryParser extends LocalizedTestCase {
boolean inPhrase = false;
int savedStart = 0, savedEnd = 0;
TermAttribute termAtt = (TermAttribute) addAttribute(TermAttribute.class);
OffsetAttribute offsetAtt = (OffsetAttribute) addAttribute(OffsetAttribute.class);
TermAttribute termAtt = addAttribute(TermAttribute.class);
OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class);
public boolean incrementToken() throws IOException {
if (inPhrase) {

View File

@ -182,7 +182,7 @@ public class FuzzyLikeThisQuery extends Query
{
if(f.queryString==null) return;
TokenStream ts=analyzer.tokenStream(f.fieldName,new StringReader(f.queryString));
TermAttribute termAtt = (TermAttribute) ts.addAttribute(TermAttribute.class);
TermAttribute termAtt = ts.addAttribute(TermAttribute.class);
int corpusNumDocs=reader.numDocs();
Term internSavingTemplateTerm =new Term(f.fieldName); //optimization to avoid constructing new Term() objects

View File

@ -829,7 +829,7 @@ public final class MoreLikeThis {
TokenStream ts = analyzer.tokenStream(fieldName, r);
int tokenCount=0;
// for every token
TermAttribute termAtt = (TermAttribute) ts.addAttribute(TermAttribute.class);
TermAttribute termAtt = ts.addAttribute(TermAttribute.class);
while (ts.incrementToken()) {
String word = termAtt.term();

View File

@ -86,7 +86,7 @@ public final class SimilarityQueries
throws IOException
{
TokenStream ts = a.tokenStream( field, new StringReader( body));
TermAttribute termAtt = (TermAttribute) ts.addAttribute(TermAttribute.class);
TermAttribute termAtt = ts.addAttribute(TermAttribute.class);
BooleanQuery tmp = new BooleanQuery();
Set already = new HashSet(); // ignore dups

View File

@ -55,11 +55,11 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.util.Parameter;
/**
* This class performs the query parsing using the new query parser implementation, but
* keeps the old {@link QueryParser} API. <br/>
* This class performs the query parsing using the new query parser
* implementation, but keeps the old {@link QueryParser} API. <br/>
* <br/>
* This class should be used when the new query parser features are and
* the old {@link QueryParser} API are needed at the same time. <br/>
* This class should be used when the new query parser features are and the old
* {@link QueryParser} API are needed at the same time. <br/>
*
* @deprecated this class will be removed soon, it's a temporary class to be
* used along the transition from the old query parser to the new
@ -79,12 +79,14 @@ public class QueryParserWrapper {
}
static public final Operator OR = new Operator("OR");
static public final Operator AND = new Operator("AND");
}
// the nested class:
/** Alternative form of QueryParser.Operator.AND */
public static final Operator AND_OPERATOR = Operator.AND;
/** Alternative form of QueryParser.Operator.OR */
public static final Operator OR_OPERATOR = Operator.OR;
@ -111,6 +113,7 @@ public class QueryParserWrapper {
private SyntaxParser syntaxParser = new StandardSyntaxParser();
private StandardQueryConfigHandler config;
private StandardQueryParser qpHelper;
private QueryNodeProcessor processorPipeline;
@ -121,9 +124,9 @@ public class QueryParserWrapper {
public QueryParserWrapper(String defaultField, Analyzer analyzer) {
this.defaultField = defaultField;
this.qpHelper = new StandardQueryParser();
this.config = (StandardQueryConfigHandler) qpHelper.getQueryConfigHandler();
this.qpHelper.setAnalyzer(analyzer);
@ -135,7 +138,7 @@ public class QueryParserWrapper {
StandardQueryParser getQueryParserHelper() {
return qpHelper;
}
public String getField() {
return this.defaultField;
}
@ -144,8 +147,9 @@ public class QueryParserWrapper {
if (this.config != null
&& this.config.hasAttribute(AnalyzerAttribute.class)) {
return ((AnalyzerAttribute) this.config
.getAttribute(AnalyzerAttribute.class)).getAnalyzer();
return this.config.getAttribute(AnalyzerAttribute.class).getAnalyzer();
}
return null;
@ -153,11 +157,10 @@ public class QueryParserWrapper {
}
/**
* Sets the {@link StandardQueryBuilder} used to generate a {@link Query} object
* from the parsed and processed query node tree.
* Sets the {@link StandardQueryBuilder} used to generate a {@link Query}
* object from the parsed and processed query node tree.
*
* @param builder
* the builder
* @param builder the builder
*/
public void setQueryBuilder(StandardQueryBuilder builder) {
this.builder = builder;
@ -168,8 +171,7 @@ public class QueryParserWrapper {
* generated by the
* {@link org.apache.lucene.queryParser.standard.parser.StandardSyntaxParser}.
*
* @param processor
* the processor
* @param processor the processor
*/
public void setQueryProcessor(QueryNodeProcessor processor) {
this.processorPipeline = processor;
@ -181,8 +183,7 @@ public class QueryParserWrapper {
* Sets the {@link QueryConfigHandler} used by the {@link QueryNodeProcessor}
* set to this object.
*
* @param queryConfig
* the query config handler
* @param queryConfig the query config handler
*/
public void setQueryConfig(StandardQueryConfigHandler queryConfig) {
this.config = queryConfig;
@ -221,9 +222,10 @@ public class QueryParserWrapper {
if (this.config != null
&& this.config.hasAttribute(AllowLeadingWildcardAttribute.class)) {
return ((AllowLeadingWildcardAttribute) this.config
.getAttribute(AllowLeadingWildcardAttribute.class))
return this.config.getAttribute(AllowLeadingWildcardAttribute.class)
.isAllowLeadingWildcard();
}
return false;
@ -231,11 +233,13 @@ public class QueryParserWrapper {
}
public MultiTermQuery.RewriteMethod getMultiTermRewriteMethod() {
if (this.config != null
&& this.config.hasAttribute(MultiTermRewriteMethodAttribute.class)) {
return ((MultiTermRewriteMethodAttribute) this.config
.getAttribute(MultiTermRewriteMethodAttribute.class))
return this.config.getAttribute(MultiTermRewriteMethodAttribute.class)
.getMultiTermRewriteMethod();
}
return MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT;
@ -250,8 +254,10 @@ public class QueryParserWrapper {
if (fieldConfig != null) {
if (this.config.hasAttribute(DateResolutionAttribute.class)) {
return ((DateResolutionAttribute) this.config
.getAttribute(DateResolutionAttribute.class)).getDateResolution();
return this.config.getAttribute(DateResolutionAttribute.class)
.getDateResolution();
}
}
@ -266,9 +272,10 @@ public class QueryParserWrapper {
if (this.config != null
&& this.config.hasAttribute(PositionIncrementsAttribute.class)) {
return ((PositionIncrementsAttribute) this.config
.getAttribute(PositionIncrementsAttribute.class))
return this.config.getAttribute(PositionIncrementsAttribute.class)
.isPositionIncrementsEnabled();
}
return false;
@ -286,8 +293,7 @@ public class QueryParserWrapper {
public Locale getLocale() {
if (this.config != null && this.config.hasAttribute(LocaleAttribute.class)) {
return ((LocaleAttribute) this.config.getAttribute(LocaleAttribute.class))
.getLocale();
return this.config.getAttribute(LocaleAttribute.class).getLocale();
}
return Locale.getDefault();
@ -298,9 +304,10 @@ public class QueryParserWrapper {
if (this.config != null
&& this.config.hasAttribute(LowercaseExpandedTermsAttribute.class)) {
return ((LowercaseExpandedTermsAttribute) this.config
.getAttribute(LowercaseExpandedTermsAttribute.class))
return this.config.getAttribute(LowercaseExpandedTermsAttribute.class)
.isLowercaseExpandedTerms();
}
return true;
@ -311,9 +318,10 @@ public class QueryParserWrapper {
if (this.config != null
&& this.config.hasAttribute(AllowLeadingWildcardAttribute.class)) {
return ((DefaultPhraseSlopAttribute) this.config
.getAttribute(DefaultPhraseSlopAttribute.class))
return this.config.getAttribute(DefaultPhraseSlopAttribute.class)
.getDefaultPhraseSlop();
}
return 0;
@ -324,8 +332,10 @@ public class QueryParserWrapper {
if (this.config != null
&& this.config.hasAttribute(RangeCollatorAttribute.class)) {
return ((RangeCollatorAttribute) this.config
.getAttribute(RangeCollatorAttribute.class)).getRangeCollator();
return this.config.getAttribute(RangeCollatorAttribute.class)
.getRangeCollator();
}
return null;
@ -357,7 +367,7 @@ public class QueryParserWrapper {
this.qpHelper.setAllowLeadingWildcard(allowLeadingWildcard);
}
public void setMultiTermRewriteMethod(MultiTermQuery.RewriteMethod method) {
public void setMultiTermRewriteMethod(MultiTermQuery.RewriteMethod method) {
this.qpHelper.setMultiTermRewriteMethod(method);
}
@ -365,8 +375,8 @@ public class QueryParserWrapper {
this.qpHelper.setDateResolution(dateResolution);
}
private Map<CharSequence, DateTools.Resolution> dateRes = new HashMap<CharSequence, DateTools.Resolution>();
private Map<CharSequence, DateTools.Resolution> dateRes = new HashMap<CharSequence, DateTools.Resolution>();
public void setDateResolution(String fieldName, Resolution dateResolution) {
dateRes.put(fieldName, dateResolution);
this.qpHelper.setDateResolution(dateRes);
@ -385,8 +395,8 @@ public class QueryParserWrapper {
if (this.config != null
&& this.config.hasAttribute(DefaultOperatorAttribute.class)) {
return (((DefaultOperatorAttribute) this.config
.getAttribute(DefaultOperatorAttribute.class)).getOperator() == org.apache.lucene.queryParser.standard.config.DefaultOperatorAttribute.Operator.AND) ? AND_OPERATOR
return (this.config.getAttribute(DefaultOperatorAttribute.class)
.getOperator() == org.apache.lucene.queryParser.standard.config.DefaultOperatorAttribute.Operator.AND) ? AND_OPERATOR
: OR_OPERATOR;
}
@ -449,8 +459,7 @@ public class QueryParserWrapper {
}
/**
* @exception ParseException
* throw in overridden method to disallow
* @exception ParseException throw in overridden method to disallow
*/
protected Query getFieldQuery(String field, String queryText)
throws ParseException {
@ -458,7 +467,7 @@ public class QueryParserWrapper {
}
@SuppressWarnings("unchecked")
protected Query getBooleanQuery(List clauses, boolean disableCoord)
protected Query getBooleanQuery(List clauses, boolean disableCoord)
throws ParseException {
throw new UnsupportedOperationException();
}
@ -468,8 +477,7 @@ protected Query getBooleanQuery(List clauses, boolean disableCoord)
* This method may be overridden, for example, to return a SpanNearQuery
* instead of a PhraseQuery.
*
* @exception ParseException
* throw in overridden method to disallow
* @exception ParseException throw in overridden method to disallow
*/
protected Query getFieldQuery(String field, String queryText, int slop)
throws ParseException {
@ -477,8 +485,7 @@ protected Query getBooleanQuery(List clauses, boolean disableCoord)
}
/**
* @exception ParseException
* throw in overridden method to disallow
* @exception ParseException throw in overridden method to disallow
*/
protected Query getRangeQuery(String field, String part1, String part2,
boolean inclusive) throws ParseException {

View File

@ -182,7 +182,7 @@ public class StandardQueryParser extends QueryParserHelper {
* or {@link Operator#OR}.
*/
public Operator getDefaultOperator() {
DefaultOperatorAttribute attr = (DefaultOperatorAttribute) getQueryConfigHandler().getAttribute(DefaultOperatorAttribute.class);
DefaultOperatorAttribute attr = getQueryConfigHandler().getAttribute(DefaultOperatorAttribute.class);
return attr.getOperator();
}
@ -199,7 +199,7 @@ public class StandardQueryParser extends QueryParserHelper {
* the collator to use when constructing {@link RangeQueryNode}s
*/
public void setRangeCollator(Collator collator) {
RangeCollatorAttribute attr = (RangeCollatorAttribute) getQueryConfigHandler().getAttribute(RangeCollatorAttribute.class);
RangeCollatorAttribute attr = getQueryConfigHandler().getAttribute(RangeCollatorAttribute.class);
attr.setDateResolution(collator);
}
@ -208,7 +208,7 @@ public class StandardQueryParser extends QueryParserHelper {
* RangeQuerys.
*/
public Collator getRangeCollator() {
RangeCollatorAttribute attr = (RangeCollatorAttribute) getQueryConfigHandler().getAttribute(RangeCollatorAttribute.class);
RangeCollatorAttribute attr = getQueryConfigHandler().getAttribute(RangeCollatorAttribute.class);
return attr.getRangeCollator();
}
@ -221,7 +221,7 @@ public class StandardQueryParser extends QueryParserHelper {
* above mentioned query is parsed as <code>capital AND of AND Hungary</code>
*/
public void setDefaultOperator(Operator operator) {
DefaultOperatorAttribute attr = (DefaultOperatorAttribute) getQueryConfigHandler().getAttribute(DefaultOperatorAttribute.class);
DefaultOperatorAttribute attr = getQueryConfigHandler().getAttribute(DefaultOperatorAttribute.class);
attr.setOperator(operator);
}
@ -235,7 +235,7 @@ public class StandardQueryParser extends QueryParserHelper {
* Default: false.
*/
public void setLowercaseExpandedTerms(boolean lowercaseExpandedTerms) {
LowercaseExpandedTermsAttribute attr= (LowercaseExpandedTermsAttribute) getQueryConfigHandler().getAttribute(LowercaseExpandedTermsAttribute.class);
LowercaseExpandedTermsAttribute attr = getQueryConfigHandler().getAttribute(LowercaseExpandedTermsAttribute.class);
attr.setLowercaseExpandedTerms(lowercaseExpandedTerms);
}
@ -243,7 +243,7 @@ public class StandardQueryParser extends QueryParserHelper {
* @see #setLowercaseExpandedTerms(boolean)
*/
public boolean getLowercaseExpandedTerms() {
LowercaseExpandedTermsAttribute attr = (LowercaseExpandedTermsAttribute) getQueryConfigHandler().getAttribute(LowercaseExpandedTermsAttribute.class);
LowercaseExpandedTermsAttribute attr = getQueryConfigHandler().getAttribute(LowercaseExpandedTermsAttribute.class);
return attr.isLowercaseExpandedTerms();
}
@ -257,7 +257,7 @@ public class StandardQueryParser extends QueryParserHelper {
* Default: false.
*/
public void setAllowLeadingWildcard(boolean allowLeadingWildcard) {
AllowLeadingWildcardAttribute attr = (AllowLeadingWildcardAttribute) getQueryConfigHandler().getAttribute(AllowLeadingWildcardAttribute.class);
AllowLeadingWildcardAttribute attr = getQueryConfigHandler().getAttribute(AllowLeadingWildcardAttribute.class);
attr.setAllowLeadingWildcard(allowLeadingWildcard);
}
@ -271,7 +271,7 @@ public class StandardQueryParser extends QueryParserHelper {
* Default: false.
*/
public void setEnablePositionIncrements(boolean enabled) {
PositionIncrementsAttribute attr = (PositionIncrementsAttribute) getQueryConfigHandler().getAttribute(PositionIncrementsAttribute.class);
PositionIncrementsAttribute attr = getQueryConfigHandler().getAttribute(PositionIncrementsAttribute.class);
attr.setPositionIncrementsEnabled(enabled);
}
@ -279,7 +279,7 @@ public class StandardQueryParser extends QueryParserHelper {
* @see #setEnablePositionIncrements(boolean)
*/
public boolean getEnablePositionIncrements() {
PositionIncrementsAttribute attr = (PositionIncrementsAttribute) getQueryConfigHandler().getAttribute(PositionIncrementsAttribute.class);
PositionIncrementsAttribute attr = getQueryConfigHandler().getAttribute(PositionIncrementsAttribute.class);
return attr.isPositionIncrementsEnabled();
}
@ -294,7 +294,7 @@ public class StandardQueryParser extends QueryParserHelper {
* not relevant then use this change the rewrite method.
*/
public void setMultiTermRewriteMethod(MultiTermQuery.RewriteMethod method) {
MultiTermRewriteMethodAttribute attr = (MultiTermRewriteMethodAttribute) getQueryConfigHandler().getAttribute(MultiTermRewriteMethodAttribute.class);
MultiTermRewriteMethodAttribute attr = getQueryConfigHandler().getAttribute(MultiTermRewriteMethodAttribute.class);
attr.setMultiTermRewriteMethod(method);
}
@ -302,7 +302,7 @@ public class StandardQueryParser extends QueryParserHelper {
* @see #setMultiTermRewriteMethod(org.apache.lucene.search.MultiTermQuery.RewriteMethod)
*/
public MultiTermQuery.RewriteMethod getMultiTermRewriteMethod() {
MultiTermRewriteMethodAttribute attr =(MultiTermRewriteMethodAttribute) getQueryConfigHandler().getAttribute(MultiTermRewriteMethodAttribute.class);
MultiTermRewriteMethodAttribute attr = getQueryConfigHandler().getAttribute(MultiTermRewriteMethodAttribute.class);
return attr.getMultiTermRewriteMethod();
}
@ -312,7 +312,7 @@ public class StandardQueryParser extends QueryParserHelper {
fields = new CharSequence[0];
}
MultiFieldAttribute attr = (MultiFieldAttribute) getQueryConfigHandler().addAttribute(MultiFieldAttribute.class);
MultiFieldAttribute attr = getQueryConfigHandler().addAttribute(MultiFieldAttribute.class);
attr.setFields(fields);
}
@ -324,7 +324,7 @@ public class StandardQueryParser extends QueryParserHelper {
* The fuzzyPrefixLength to set.
*/
public void setFuzzyPrefixLength(int fuzzyPrefixLength) {
FuzzyAttribute attr = (FuzzyAttribute) getQueryConfigHandler().addAttribute(FuzzyAttribute.class);
FuzzyAttribute attr = getQueryConfigHandler().addAttribute(FuzzyAttribute.class);
attr.setPrefixLength(fuzzyPrefixLength);
}
@ -332,7 +332,7 @@ public class StandardQueryParser extends QueryParserHelper {
* Set locale used by date range parsing.
*/
public void setLocale(Locale locale) {
LocaleAttribute attr = (LocaleAttribute) getQueryConfigHandler().addAttribute(LocaleAttribute.class);
LocaleAttribute attr = getQueryConfigHandler().addAttribute(LocaleAttribute.class);
attr.setLocale(locale);
}
@ -340,7 +340,7 @@ public class StandardQueryParser extends QueryParserHelper {
* Returns current locale, allowing access by subclasses.
*/
public Locale getLocale() {
LocaleAttribute attr = (LocaleAttribute) getQueryConfigHandler().addAttribute(LocaleAttribute.class);
LocaleAttribute attr = getQueryConfigHandler().addAttribute(LocaleAttribute.class);
return attr.getLocale();
}
@ -349,12 +349,12 @@ public class StandardQueryParser extends QueryParserHelper {
* required. Default value is zero.
*/
public void setDefaultPhraseSlop(int defaultPhraseSlop) {
DefaultPhraseSlopAttribute attr = (DefaultPhraseSlopAttribute) getQueryConfigHandler().addAttribute(DefaultPhraseSlopAttribute.class);
DefaultPhraseSlopAttribute attr = getQueryConfigHandler().addAttribute(DefaultPhraseSlopAttribute.class);
attr.setDefaultPhraseSlop(defaultPhraseSlop);
}
public void setAnalyzer(Analyzer analyzer) {
AnalyzerAttribute attr= (AnalyzerAttribute) getQueryConfigHandler().getAttribute(AnalyzerAttribute.class);
AnalyzerAttribute attr = getQueryConfigHandler().getAttribute(AnalyzerAttribute.class);
attr.setAnalyzer(analyzer);
}
@ -362,7 +362,7 @@ public class StandardQueryParser extends QueryParserHelper {
QueryConfigHandler config = this.getQueryConfigHandler();
if ( config.hasAttribute(AnalyzerAttribute.class)) {
AnalyzerAttribute attr= (AnalyzerAttribute) config.getAttribute(AnalyzerAttribute.class);
AnalyzerAttribute attr = config.getAttribute(AnalyzerAttribute.class);
return attr.getAnalyzer();
}
@ -373,7 +373,7 @@ public class StandardQueryParser extends QueryParserHelper {
* @see #setAllowLeadingWildcard(boolean)
*/
public boolean getAllowLeadingWildcard() {
AllowLeadingWildcardAttribute attr = (AllowLeadingWildcardAttribute) getQueryConfigHandler().addAttribute(AllowLeadingWildcardAttribute.class);
AllowLeadingWildcardAttribute attr = getQueryConfigHandler().addAttribute(AllowLeadingWildcardAttribute.class);
return attr.isAllowLeadingWildcard();
}
@ -381,7 +381,7 @@ public class StandardQueryParser extends QueryParserHelper {
* Get the minimal similarity for fuzzy queries.
*/
public float getFuzzyMinSim() {
FuzzyAttribute attr = (FuzzyAttribute) getQueryConfigHandler().addAttribute(FuzzyAttribute.class);
FuzzyAttribute attr = getQueryConfigHandler().addAttribute(FuzzyAttribute.class);
return attr.getFuzzyMinSimilarity();
}
@ -391,7 +391,7 @@ public class StandardQueryParser extends QueryParserHelper {
* @return Returns the fuzzyPrefixLength.
*/
public int getFuzzyPrefixLength() {
FuzzyAttribute attr = (FuzzyAttribute) getQueryConfigHandler().addAttribute(FuzzyAttribute.class);
FuzzyAttribute attr = getQueryConfigHandler().addAttribute(FuzzyAttribute.class);
return attr.getPrefixLength();
}
@ -399,7 +399,7 @@ public class StandardQueryParser extends QueryParserHelper {
* Gets the default slop for phrases.
*/
public int getPhraseSlop() {
DefaultPhraseSlopAttribute attr = (DefaultPhraseSlopAttribute) getQueryConfigHandler().addAttribute(DefaultPhraseSlopAttribute.class);
DefaultPhraseSlopAttribute attr = getQueryConfigHandler().addAttribute(DefaultPhraseSlopAttribute.class);
return attr.getDefaultPhraseSlop();
}
@ -408,22 +408,22 @@ public class StandardQueryParser extends QueryParserHelper {
* {@link FuzzyQuery#defaultMinSimilarity}.
*/
public void setFuzzyMinSim(float fuzzyMinSim) {
FuzzyAttribute attr = (FuzzyAttribute) getQueryConfigHandler().addAttribute(FuzzyAttribute.class);
FuzzyAttribute attr = getQueryConfigHandler().addAttribute(FuzzyAttribute.class);
attr.setFuzzyMinSimilarity(fuzzyMinSim);
}
public void setFieldsBoost(Map<CharSequence, Float> boosts) {
FieldBoostMapAttribute attr = (FieldBoostMapAttribute) getQueryConfigHandler().addAttribute(FieldBoostMapAttribute.class);
FieldBoostMapAttribute attr = getQueryConfigHandler().addAttribute(FieldBoostMapAttribute.class);
attr.setFieldBoostMap(boosts);
}
public void setDateResolution(DateTools.Resolution dateResolution) {
DateResolutionAttribute attr = (DateResolutionAttribute) getQueryConfigHandler().addAttribute(DateResolutionAttribute.class);
DateResolutionAttribute attr = getQueryConfigHandler().addAttribute(DateResolutionAttribute.class);
attr.setDateResolution(dateResolution);
}
public void setDateResolution(Map<CharSequence, DateTools.Resolution> dateRes) {
FieldDateResolutionMapAttribute attr = (FieldDateResolutionMapAttribute) getQueryConfigHandler().addAttribute(FieldDateResolutionMapAttribute.class);
FieldDateResolutionMapAttribute attr = getQueryConfigHandler().addAttribute(FieldDateResolutionMapAttribute.class);
attr.setFieldDateResolutionMap(dateRes);
}

View File

@ -44,8 +44,8 @@ public class FieldBoostMapFCListener implements FieldConfigListener {
public void buildFieldConfig(FieldConfig fieldConfig) {
if (this.config.hasAttribute(FieldBoostMapAttribute.class)) {
FieldBoostMapAttribute fieldBoostMapAttr = (FieldBoostMapAttribute) this.config.getAttribute(FieldBoostMapAttribute.class);
BoostAttribute boostAttr = (BoostAttribute) fieldConfig.addAttribute(BoostAttribute.class);
FieldBoostMapAttribute fieldBoostMapAttr = this.config.getAttribute(FieldBoostMapAttribute.class);
BoostAttribute boostAttr = fieldConfig.addAttribute(BoostAttribute.class);
Float boost = fieldBoostMapAttr.getFieldBoostMap().get(fieldConfig.getFieldName());

View File

@ -24,9 +24,10 @@ import org.apache.lucene.queryParser.core.config.QueryConfigHandler;
/**
* This listener listens for every field configuration request and assign a
* {@link DateResolutionAttribute} to the equivalent {@link FieldConfig} based on a
* defined map: fieldName -> DateTools.Resolution stored in {@link FieldDateResolutionMapAttribute}
* in the {@link DateResolutionAttribute}.
* {@link DateResolutionAttribute} to the equivalent {@link FieldConfig} based
* on a defined map: fieldName -> DateTools.Resolution stored in
* {@link FieldDateResolutionMapAttribute} in the
* {@link DateResolutionAttribute}.
*
* @see DateResolutionAttribute
* @see FieldDateResolutionMapAttribute
@ -38,27 +39,27 @@ public class FieldDateResolutionFCListener implements FieldConfigListener {
private static final long serialVersionUID = -5929802948798314067L;
private QueryConfigHandler config = null;
public FieldDateResolutionFCListener(QueryConfigHandler config) {
this.config = config;
}
public void buildFieldConfig(FieldConfig fieldConfig) {
DateResolutionAttribute fieldDateResAttr = (DateResolutionAttribute) fieldConfig
DateResolutionAttribute fieldDateResAttr = fieldConfig
.addAttribute(DateResolutionAttribute.class);
DateTools.Resolution dateRes = null;
if (this.config.hasAttribute(FieldDateResolutionMapAttribute.class)) {
FieldDateResolutionMapAttribute dateResMapAttr = (FieldDateResolutionMapAttribute) this.config
FieldDateResolutionMapAttribute dateResMapAttr = this.config
.addAttribute(FieldDateResolutionMapAttribute.class);
dateRes = dateResMapAttr.getFieldDateResolutionMap().get(
fieldConfig.getFieldName().toString());
}
if (dateRes == null) {
if (this.config.hasAttribute(DateResolutionAttribute.class)) {
DateResolutionAttribute dateResAttr = (DateResolutionAttribute) this.config
DateResolutionAttribute dateResAttr = this.config
.addAttribute(DateResolutionAttribute.class);
dateRes = dateResAttr.getDateResolution();

View File

@ -49,7 +49,7 @@ public class AllowLeadingWildcardProcessor extends QueryNodeProcessorImpl {
if (getQueryConfigHandler().hasAttribute(AllowLeadingWildcardAttribute.class)) {
AllowLeadingWildcardAttribute alwAttr= (AllowLeadingWildcardAttribute) getQueryConfigHandler().getAttribute(AllowLeadingWildcardAttribute.class);
AllowLeadingWildcardAttribute alwAttr= getQueryConfigHandler().getAttribute(AllowLeadingWildcardAttribute.class);
if (!alwAttr.isAllowLeadingWildcard()) {
return super.process(queryTree);
}

View File

@ -80,17 +80,16 @@ public class AnalyzerQueryNodeProcessor extends QueryNodeProcessorImpl {
if (getQueryConfigHandler().hasAttribute(AnalyzerAttribute.class)) {
this.analyzer = ((AnalyzerAttribute) getQueryConfigHandler()
.getAttribute(AnalyzerAttribute.class)).getAnalyzer();
this.analyzer = getQueryConfigHandler().getAttribute(
AnalyzerAttribute.class).getAnalyzer();
this.positionIncrementsEnabled = false;
if (getQueryConfigHandler().hasAttribute(
PositionIncrementsAttribute.class)) {
if (((PositionIncrementsAttribute) getQueryConfigHandler()
.getAttribute(PositionIncrementsAttribute.class))
.isPositionIncrementsEnabled()) {
if (getQueryConfigHandler().getAttribute(
PositionIncrementsAttribute.class).isPositionIncrementsEnabled()) {
this.positionIncrementsEnabled = true;
@ -130,8 +129,7 @@ public class AnalyzerQueryNodeProcessor extends QueryNodeProcessorImpl {
boolean severalTokensAtSamePosition = false;
if (buffer.hasAttribute(PositionIncrementAttribute.class)) {
posIncrAtt = (PositionIncrementAttribute) buffer
.getAttribute(PositionIncrementAttribute.class);
posIncrAtt = buffer.getAttribute(PositionIncrementAttribute.class);
}
try {
@ -167,8 +165,7 @@ public class AnalyzerQueryNodeProcessor extends QueryNodeProcessorImpl {
return new NoTokenFoundQueryNode();
}
TermAttribute termAtt = (TermAttribute) buffer
.getAttribute(TermAttribute.class);
TermAttribute termAtt = buffer.getAttribute(TermAttribute.class);
if (numTokens == 0) {
return new NoTokenFoundQueryNode();
@ -209,7 +206,8 @@ public class AnalyzerQueryNodeProcessor extends QueryNodeProcessorImpl {
}
return new GroupQueryNode(new StandardBooleanQueryNode(children, true));
return new GroupQueryNode(
new StandardBooleanQueryNode(children, true));
} else {
// phrase query:

View File

@ -52,7 +52,7 @@ public class BoostQueryNodeProcessor extends QueryNodeProcessorImpl {
FieldConfig fieldConfig = config.getFieldConfig(fieldNode.getField());
if (fieldConfig != null && fieldConfig.hasAttribute(BoostAttribute.class)) {
BoostAttribute boostAttr = (BoostAttribute) fieldConfig.getAttribute(BoostAttribute.class);
BoostAttribute boostAttr = fieldConfig.getAttribute(BoostAttribute.class);
return new BoostQueryNode(node, boostAttr.getBoost());

View File

@ -55,9 +55,8 @@ public class DefaultPhraseSlopQueryNodeProcessor extends QueryNodeProcessorImpl
if (queryConfig != null) {
if (queryConfig.hasAttribute(DefaultPhraseSlopAttribute.class)) {
this.defaultPhraseSlop = ((DefaultPhraseSlopAttribute) queryConfig
.getAttribute(DefaultPhraseSlopAttribute.class))
.getDefaultPhraseSlop();
this.defaultPhraseSlop = queryConfig.getAttribute(
DefaultPhraseSlopAttribute.class).getDefaultPhraseSlop();
return super.process(queryTree);

View File

@ -55,8 +55,7 @@ public class FuzzyQueryNodeProcessor extends QueryNodeProcessorImpl {
QueryConfigHandler config = getQueryConfigHandler();
if (config != null && config.hasAttribute(FuzzyAttribute.class)) {
FuzzyAttribute fuzzyAttr = (FuzzyAttribute) config
.getAttribute(FuzzyAttribute.class);
FuzzyAttribute fuzzyAttr = config.getAttribute(FuzzyAttribute.class);
fuzzyNode.setPrefixLength(fuzzyAttr.getPrefixLength());
if (fuzzyNode.getSimilarity() < 0) {

View File

@ -70,8 +70,8 @@ public class GroupQueryNodeProcessor implements QueryNodeProcessor {
"DefaultOperatorAttribute should be set on the QueryConfigHandler");
}
usingAnd = Operator.AND == ((DefaultOperatorAttribute) getQueryConfigHandler()
.getAttribute(DefaultOperatorAttribute.class)).getOperator();
this.usingAnd = Operator.AND == getQueryConfigHandler()
.getAttribute(DefaultOperatorAttribute.class).getOperator();
if (queryTree instanceof GroupQueryNode) {
queryTree = ((GroupQueryNode) queryTree).getChild();

View File

@ -52,10 +52,11 @@ public class LowercaseExpandedTermsQueryNodeProcessor extends
if (getQueryConfigHandler().hasAttribute(
LowercaseExpandedTermsAttribute.class)) {
if (((LowercaseExpandedTermsAttribute) getQueryConfigHandler()
.getAttribute(LowercaseExpandedTermsAttribute.class))
.isLowercaseExpandedTerms()) {
if (getQueryConfigHandler().getAttribute(
LowercaseExpandedTermsAttribute.class).isLowercaseExpandedTerms()) {
return super.process(queryTree);
}
}
@ -69,7 +70,7 @@ public class LowercaseExpandedTermsQueryNodeProcessor extends
if (node instanceof WildcardQueryNode || node instanceof FuzzyQueryNode
|| node instanceof ParametricQueryNode) {
FieldQueryNode fieldNode = (FieldQueryNode) node;
FieldQueryNode fieldNode = (FieldQueryNode) node;
fieldNode.setText(UnescapedCharSequence.toLowerCase(fieldNode.getText()));
}

View File

@ -81,8 +81,8 @@ public class MultiFieldQueryNodeProcessor extends QueryNodeProcessorImpl {
"MultiFieldAttribute should be set on the QueryConfigHandler");
}
CharSequence[] fields = ((MultiFieldAttribute) getQueryConfigHandler()
.getAttribute(MultiFieldAttribute.class)).getFields();
CharSequence[] fields = getQueryConfigHandler().getAttribute(
MultiFieldAttribute.class).getFields();
if (fields != null && fields.length > 0) {
fieldNode.setField(fields[0]);

View File

@ -27,31 +27,37 @@ import org.apache.lucene.queryParser.standard.nodes.WildcardQueryNode;
import org.apache.lucene.search.MultiTermQuery;
/**
* This processor instates the default {@link
* org.apache.lucene.search.MultiTermQuery.RewriteMethod}, {@link
* MultiTermQuery#CONSTANT_SCORE_AUTO_REWRITE_DEFAULT}, for
* multi-term query nodes.
* This processor instates the default
* {@link org.apache.lucene.search.MultiTermQuery.RewriteMethod},
* {@link MultiTermQuery#CONSTANT_SCORE_AUTO_REWRITE_DEFAULT}, for multi-term
* query nodes.
*/
public class MultiTermRewriteMethodProcessor extends QueryNodeProcessorImpl {
public class MultiTermRewriteMethodProcessor extends QueryNodeProcessorImpl {
protected QueryNode postProcessNode(QueryNode node) {
// set setMultiTermRewriteMethod for WildcardQueryNode and PrefixWildcardQueryNode
if (node instanceof WildcardQueryNode || node instanceof ParametricRangeQueryNode) {
if (!getQueryConfigHandler().hasAttribute(MultiTermRewriteMethodAttribute.class)) {
// This should not happen, this attribute is created in the StandardQueryConfigHandler
throw new IllegalArgumentException("MultiTermRewriteMethodAttribute should be set on the QueryConfigHandler");
// set setMultiTermRewriteMethod for WildcardQueryNode and
// PrefixWildcardQueryNode
if (node instanceof WildcardQueryNode
|| node instanceof ParametricRangeQueryNode) {
if (!getQueryConfigHandler().hasAttribute(
MultiTermRewriteMethodAttribute.class)) {
// This should not happen, this attribute is created in the
// StandardQueryConfigHandler
throw new IllegalArgumentException(
"MultiTermRewriteMethodAttribute should be set on the QueryConfigHandler");
}
//read the attribute value and use a TAG to take the value to the Builder
MultiTermQuery.RewriteMethod rewriteMethod = ((MultiTermRewriteMethodAttribute) getQueryConfigHandler()
.getAttribute(MultiTermRewriteMethodAttribute.class))
// read the attribute value and use a TAG to take the value to the Builder
MultiTermQuery.RewriteMethod rewriteMethod = getQueryConfigHandler()
.getAttribute(MultiTermRewriteMethodAttribute.class)
.getMultiTermRewriteMethod();
node.setTag(MultiTermRewriteMethodAttribute.TAG_ID, rewriteMethod);
}
return node;
}

View File

@ -84,13 +84,17 @@ public class ParametricRangeQueryNodeProcessor extends QueryNodeProcessorImpl {
boolean inclusive = false;
if (getQueryConfigHandler().hasAttribute(RangeCollatorAttribute.class)) {
collator = ((RangeCollatorAttribute) getQueryConfigHandler()
.getAttribute(RangeCollatorAttribute.class)).getRangeCollator();
collator = getQueryConfigHandler().getAttribute(
RangeCollatorAttribute.class).getRangeCollator();
}
if (getQueryConfigHandler().hasAttribute(LocaleAttribute.class)) {
locale = ((LocaleAttribute) getQueryConfigHandler().getAttribute(
LocaleAttribute.class)).getLocale();
locale = getQueryConfigHandler().getAttribute(LocaleAttribute.class)
.getLocale();
}
FieldConfig fieldConfig = getQueryConfigHandler().getFieldConfig(
@ -99,8 +103,10 @@ public class ParametricRangeQueryNodeProcessor extends QueryNodeProcessorImpl {
if (fieldConfig != null) {
if (fieldConfig.hasAttribute(DateResolutionAttribute.class)) {
dateRes = ((DateResolutionAttribute) fieldConfig
.getAttribute(DateResolutionAttribute.class)).getDateResolution();
dateRes = fieldConfig.getAttribute(DateResolutionAttribute.class)
.getDateResolution();
}
}

View File

@ -116,11 +116,14 @@ public class TestSpanQueryParser extends TestCase {
this.spansQueryTreeBuilder = new SpansQueryTreeBuilder();
// set up the processor pipeline
this.spanProcessorPipeline.setQueryConfigHandler(this.spanQueryConfigHandler);
this.spanProcessorPipeline
.setQueryConfigHandler(this.spanQueryConfigHandler);
this.spanProcessorPipeline.addProcessor(new WildcardQueryNodeProcessor());
this.spanProcessorPipeline.addProcessor(new SpansValidatorQueryNodeProcessor());
this.spanProcessorPipeline.addProcessor(new UniqueFieldQueryNodeProcessor());
this.spanProcessorPipeline
.addProcessor(new SpansValidatorQueryNodeProcessor());
this.spanProcessorPipeline
.addProcessor(new UniqueFieldQueryNodeProcessor());
}
@ -130,7 +133,7 @@ public class TestSpanQueryParser extends TestCase {
public SpanQuery getSpanQuery(CharSequence uniqueField, CharSequence query)
throws QueryNodeException {
UniqueFieldAttribute uniqueFieldAtt = (UniqueFieldAttribute) this.spanQueryConfigHandler
UniqueFieldAttribute uniqueFieldAtt = this.spanQueryConfigHandler
.getAttribute(UniqueFieldAttribute.class);
uniqueFieldAtt.setUniqueField(uniqueField);

View File

@ -119,7 +119,7 @@ public class TestSpanQueryParserSimpleSample extends TestCase {
// create a config handler with a attribute used in
// UniqueFieldQueryNodeProcessor
QueryConfigHandler spanQueryConfigHandler = new SpansQueryConfigHandler();
UniqueFieldAttribute uniqueFieldAtt = (UniqueFieldAttribute) spanQueryConfigHandler
UniqueFieldAttribute uniqueFieldAtt = spanQueryConfigHandler
.getAttribute(UniqueFieldAttribute.class);
uniqueFieldAtt.setUniqueField("index");

View File

@ -62,8 +62,8 @@ public class UniqueFieldQueryNodeProcessor extends QueryNodeProcessorImpl {
"UniqueFieldAttribute should be defined in the config handler!");
}
CharSequence uniqueField = ((UniqueFieldAttribute) queryConfig
.getAttribute(UniqueFieldAttribute.class)).getUniqueField();
CharSequence uniqueField = queryConfig.getAttribute(
UniqueFieldAttribute.class).getUniqueField();
fieldNode.setField(uniqueField);

View File

@ -174,10 +174,10 @@ public class TestMultiAnalyzerQPHelper extends LuceneTestCase {
public TestFilter(TokenStream in) {
super(in);
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
posIncrAtt = (PositionIncrementAttribute) addAttribute(PositionIncrementAttribute.class);
offsetAtt = (OffsetAttribute) addAttribute(OffsetAttribute.class);
typeAtt = (TypeAttribute) addAttribute(TypeAttribute.class);
termAtt = addAttribute(TermAttribute.class);
posIncrAtt = addAttribute(PositionIncrementAttribute.class);
offsetAtt = addAttribute(OffsetAttribute.class);
typeAtt = addAttribute(TypeAttribute.class);
}
@ -278,8 +278,8 @@ public class TestMultiAnalyzerQPHelper extends LuceneTestCase {
public TestPosIncrementFilter(TokenStream in) {
super(in);
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
posIncrAtt = (PositionIncrementAttribute) addAttribute(PositionIncrementAttribute.class);
termAtt = addAttribute(TermAttribute.class);
posIncrAtt = addAttribute(PositionIncrementAttribute.class);
}
private Token token = new Token();

View File

@ -168,10 +168,10 @@ public class TestMultiAnalyzerWrapper extends LuceneTestCase {
public TestFilter(TokenStream in) {
super(in);
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
posIncrAtt = (PositionIncrementAttribute) addAttribute(PositionIncrementAttribute.class);
offsetAtt = (OffsetAttribute) addAttribute(OffsetAttribute.class);
typeAtt = (TypeAttribute) addAttribute(TypeAttribute.class);
termAtt = addAttribute(TermAttribute.class);
posIncrAtt = addAttribute(PositionIncrementAttribute.class);
offsetAtt = addAttribute(OffsetAttribute.class);
typeAtt = addAttribute(TypeAttribute.class);
}
@ -272,8 +272,8 @@ public class TestMultiAnalyzerWrapper extends LuceneTestCase {
public TestPosIncrementFilter(TokenStream in) {
super(in);
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
posIncrAtt = (PositionIncrementAttribute) addAttribute(PositionIncrementAttribute.class);
termAtt = addAttribute(TermAttribute.class);
posIncrAtt = addAttribute(PositionIncrementAttribute.class);
}
private Token token = new Token();

View File

@ -106,8 +106,8 @@ public class TestQPHelper extends LocalizedTestCase {
*/
public QPTestFilter(TokenStream in) {
super(in);
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
offsetAtt = (OffsetAttribute) addAttribute(OffsetAttribute.class);
termAtt = addAttribute(TermAttribute.class);
offsetAtt = addAttribute(OffsetAttribute.class);
}
boolean inPhrase = false;
@ -1189,8 +1189,8 @@ public class TestQPHelper extends LocalizedTestCase {
if (upto == 4) {
return false;
}
PositionIncrementAttribute posIncr = (PositionIncrementAttribute) addAttribute(PositionIncrementAttribute.class);
TermAttribute term = (TermAttribute) addAttribute(TermAttribute.class);
PositionIncrementAttribute posIncr = addAttribute(PositionIncrementAttribute.class);
TermAttribute term = addAttribute(TermAttribute.class);
if (upto == 0) {
posIncr.setPositionIncrement(1);
term.setTermBuffer("a");

View File

@ -101,8 +101,8 @@ public class TestQueryParserWrapper extends LocalizedTestCase {
*/
public QPTestFilter(TokenStream in) {
super(in);
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
offsetAtt = (OffsetAttribute) addAttribute(OffsetAttribute.class);
termAtt = addAttribute(TermAttribute.class);
offsetAtt = addAttribute(OffsetAttribute.class);
}
boolean inPhrase = false;

View File

@ -39,7 +39,7 @@ public class SnowballFilter extends TokenFilter {
public SnowballFilter(TokenStream input, SnowballProgram stemmer) {
super(input);
this.stemmer = stemmer;
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
termAtt = addAttribute(TermAttribute.class);
}
/**
@ -60,7 +60,7 @@ public class SnowballFilter extends TokenFilter {
} catch (Exception e) {
throw new RuntimeException(e.toString());
}
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
termAtt = addAttribute(TermAttribute.class);
}
/** Returns the next input Token, after being stemmed */

View File

@ -69,12 +69,12 @@ public class TestSnowball extends BaseTokenStreamTestCase {
public void testFilterTokens() throws Exception {
SnowballFilter filter = new SnowballFilter(new TestTokenStream(), "English");
TermAttribute termAtt = (TermAttribute) filter.getAttribute(TermAttribute.class);
OffsetAttribute offsetAtt = (OffsetAttribute) filter.getAttribute(OffsetAttribute.class);
TypeAttribute typeAtt = (TypeAttribute) filter.getAttribute(TypeAttribute.class);
PayloadAttribute payloadAtt = (PayloadAttribute) filter.getAttribute(PayloadAttribute.class);
PositionIncrementAttribute posIncAtt = (PositionIncrementAttribute) filter.getAttribute(PositionIncrementAttribute.class);
FlagsAttribute flagsAtt = (FlagsAttribute) filter.getAttribute(FlagsAttribute.class);
TermAttribute termAtt = filter.getAttribute(TermAttribute.class);
OffsetAttribute offsetAtt = filter.getAttribute(OffsetAttribute.class);
TypeAttribute typeAtt = filter.getAttribute(TypeAttribute.class);
PayloadAttribute payloadAtt = filter.getAttribute(PayloadAttribute.class);
PositionIncrementAttribute posIncAtt = filter.getAttribute(PositionIncrementAttribute.class);
FlagsAttribute flagsAtt = filter.getAttribute(FlagsAttribute.class);
filter.incrementToken();
@ -97,12 +97,12 @@ public class TestSnowball extends BaseTokenStreamTestCase {
TestTokenStream() {
super();
termAtt = (TermAttribute) addAttribute(TermAttribute.class);
offsetAtt = (OffsetAttribute) addAttribute(OffsetAttribute.class);
typeAtt = (TypeAttribute) addAttribute(TypeAttribute.class);
payloadAtt = (PayloadAttribute) addAttribute(PayloadAttribute.class);
posIncAtt = (PositionIncrementAttribute) addAttribute(PositionIncrementAttribute.class);
flagsAtt = (FlagsAttribute) addAttribute(FlagsAttribute.class);
termAtt = addAttribute(TermAttribute.class);
offsetAtt = addAttribute(OffsetAttribute.class);
typeAtt = addAttribute(TypeAttribute.class);
payloadAtt = addAttribute(PayloadAttribute.class);
posIncAtt = addAttribute(PositionIncrementAttribute.class);
flagsAtt = addAttribute(FlagsAttribute.class);
}
public boolean incrementToken() {

View File

@ -181,11 +181,11 @@ public class WikipediaTokenizer extends Tokenizer {
private void init(int tokenOutput, Set untokenizedTypes) {
this.tokenOutput = tokenOutput;
this.untokenizedTypes = untokenizedTypes;
this.offsetAtt = (OffsetAttribute) addAttribute(OffsetAttribute.class);
this.typeAtt = (TypeAttribute) addAttribute(TypeAttribute.class);
this.posIncrAtt = (PositionIncrementAttribute) addAttribute(PositionIncrementAttribute.class);
this.termAtt = (TermAttribute) addAttribute(TermAttribute.class);
this.flagsAtt = (FlagsAttribute) addAttribute(FlagsAttribute.class);
this.offsetAtt = addAttribute(OffsetAttribute.class);
this.typeAtt = addAttribute(TypeAttribute.class);
this.posIncrAtt = addAttribute(PositionIncrementAttribute.class);
this.termAtt = addAttribute(TermAttribute.class);
this.flagsAtt = addAttribute(FlagsAttribute.class);
}
/** @deprecated Will be removed in Lucene 3.0. This method is final, as it should

View File

@ -128,8 +128,8 @@ public class WikipediaTokenizerTest extends BaseTokenStreamTestCase {
int numBoldItalics = 0;
int numCategory = 0;
int numCitation = 0;
TermAttribute termAtt = (TermAttribute) tf.addAttribute(TermAttribute.class);
TypeAttribute typeAtt = (TypeAttribute) tf.addAttribute(TypeAttribute.class);
TermAttribute termAtt = tf.addAttribute(TermAttribute.class);
TypeAttribute typeAtt = tf.addAttribute(TypeAttribute.class);
while (tf.incrementToken()) {
String tokText = termAtt.term();
@ -164,8 +164,8 @@ public class WikipediaTokenizerTest extends BaseTokenStreamTestCase {
}
private void checkLinkPhrases(WikipediaTokenizer tf) throws IOException {
TermAttribute termAtt = (TermAttribute) tf.addAttribute(TermAttribute.class);
PositionIncrementAttribute posIncrAtt = (PositionIncrementAttribute) tf.addAttribute(PositionIncrementAttribute.class);
TermAttribute termAtt = tf.addAttribute(TermAttribute.class);
PositionIncrementAttribute posIncrAtt = tf.addAttribute(PositionIncrementAttribute.class);
assertTrue(tf.incrementToken());
assertTrue(termAtt.term() + " is not equal to " + "click", termAtt.term().equals("click") == true);
@ -229,8 +229,8 @@ public class WikipediaTokenizerTest extends BaseTokenStreamTestCase {
public void testLinks() throws Exception {
String test = "[http://lucene.apache.org/java/docs/index.html#news here] [http://lucene.apache.org/java/docs/index.html?b=c here] [https://lucene.apache.org/java/docs/index.html?b=c here]";
WikipediaTokenizer tf = new WikipediaTokenizer(new StringReader(test));
TermAttribute termAtt = (TermAttribute) tf.addAttribute(TermAttribute.class);
TypeAttribute typeAtt = (TypeAttribute) tf.addAttribute(TypeAttribute.class);
TermAttribute termAtt = tf.addAttribute(TermAttribute.class);
TypeAttribute typeAtt = tf.addAttribute(TypeAttribute.class);
assertTrue(tf.incrementToken());
assertTrue(termAtt.term() + " is not equal to " + "http://lucene.apache.org/java/docs/index.html#news",
@ -262,9 +262,9 @@ public class WikipediaTokenizerTest extends BaseTokenStreamTestCase {
checkLinkPhrases(tf);
String test = "[[Category:a b c d]] [[Category:e f g]] [[link here]] [[link there]] ''italics here'' something ''more italics'' [[Category:h i j]]";
tf = new WikipediaTokenizer(new StringReader(test), WikipediaTokenizer.UNTOKENIZED_ONLY, untoks);
TermAttribute termAtt = (TermAttribute) tf.addAttribute(TermAttribute.class);
PositionIncrementAttribute posIncrAtt = (PositionIncrementAttribute) tf.addAttribute(PositionIncrementAttribute.class);
OffsetAttribute offsetAtt = (OffsetAttribute) tf.addAttribute(OffsetAttribute.class);
TermAttribute termAtt = tf.addAttribute(TermAttribute.class);
PositionIncrementAttribute posIncrAtt = tf.addAttribute(PositionIncrementAttribute.class);
OffsetAttribute offsetAtt = tf.addAttribute(OffsetAttribute.class);
assertTrue(tf.incrementToken());
assertTrue(termAtt.term() + " is not equal to " + "a b c d",
@ -338,11 +338,11 @@ public class WikipediaTokenizerTest extends BaseTokenStreamTestCase {
String test = "[[Category:a b c d]] [[Category:e f g]] [[link here]] [[link there]] ''italics here'' something ''more italics'' [[Category:h i j]]";
//should output all the indivual tokens plus the untokenized tokens as well. Untokenized tokens
WikipediaTokenizer tf = new WikipediaTokenizer(new StringReader(test), WikipediaTokenizer.BOTH, untoks);
TermAttribute termAtt = (TermAttribute) tf.addAttribute(TermAttribute.class);
TypeAttribute typeAtt = (TypeAttribute) tf.addAttribute(TypeAttribute.class);
PositionIncrementAttribute posIncrAtt = (PositionIncrementAttribute) tf.addAttribute(PositionIncrementAttribute.class);
OffsetAttribute offsetAtt = (OffsetAttribute) tf.addAttribute(OffsetAttribute.class);
FlagsAttribute flagsAtt = (FlagsAttribute) tf.addAttribute(FlagsAttribute.class);
TermAttribute termAtt = tf.addAttribute(TermAttribute.class);
TypeAttribute typeAtt = tf.addAttribute(TypeAttribute.class);
PositionIncrementAttribute posIncrAtt = tf.addAttribute(PositionIncrementAttribute.class);
OffsetAttribute offsetAtt = tf.addAttribute(OffsetAttribute.class);
FlagsAttribute flagsAtt = tf.addAttribute(FlagsAttribute.class);
assertTrue(tf.incrementToken());
assertTrue(termAtt.term() + " is not equal to " + "a b c d",

Some files were not shown because too many files have changed in this diff Show More