mirror of https://github.com/apache/lucene.git
LUCENE-1775: Change contrib tee/sink filters to use new TokenStream API.
git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@800606 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
6a47cbb5f9
commit
c91651e4f2
|
@ -0,0 +1,70 @@
|
|||
package org.apache.lucene.analysis.sinks;
|
||||
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.text.DateFormat;
|
||||
import java.text.ParseException;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.Date;
|
||||
|
||||
import org.apache.lucene.analysis.TeeSinkTokenFilter.SinkFilter;
|
||||
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
|
||||
import org.apache.lucene.util.AttributeSource;
|
||||
|
||||
/**
|
||||
* Attempts to parse the {@link org.apache.lucene.analysis.Token#termBuffer()} as a Date using a {@link java.text.DateFormat}.
|
||||
* If the value is a Date, it will add it to the sink.
|
||||
* <p/>
|
||||
*
|
||||
**/
|
||||
public class DateRecognizerSinkFilter extends SinkFilter {
|
||||
public static final String DATE_TYPE = "date";
|
||||
|
||||
protected DateFormat dateFormat;
|
||||
protected TermAttribute termAtt;
|
||||
|
||||
/**
|
||||
* Uses {@link java.text.SimpleDateFormat#getDateInstance()} as the {@link java.text.DateFormat} object.
|
||||
*/
|
||||
public DateRecognizerSinkFilter() {
|
||||
this(SimpleDateFormat.getDateInstance());
|
||||
}
|
||||
|
||||
public DateRecognizerSinkFilter(DateFormat dateFormat) {
|
||||
this.dateFormat = dateFormat;
|
||||
}
|
||||
|
||||
public boolean accept(AttributeSource source) {
|
||||
if (termAtt == null) {
|
||||
termAtt = (TermAttribute) source.getAttribute(TermAttribute.class);
|
||||
}
|
||||
if (termAtt != null) {
|
||||
try {
|
||||
Date date = dateFormat.parse(termAtt.term());//We don't care about the date, just that we can parse it as a date
|
||||
if (date != null) {
|
||||
return true;
|
||||
}
|
||||
} catch (ParseException e) {
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
|
@ -17,6 +17,7 @@ package org.apache.lucene.analysis.sinks;
|
|||
*/
|
||||
|
||||
import org.apache.lucene.analysis.SinkTokenizer;
|
||||
import org.apache.lucene.analysis.TeeSinkTokenFilter;
|
||||
import org.apache.lucene.analysis.Token;
|
||||
|
||||
import java.text.DateFormat;
|
||||
|
@ -32,7 +33,7 @@ import java.util.Date;
|
|||
* <p/>
|
||||
* Also marks the sink token with {@link org.apache.lucene.analysis.Token#type()} equal to {@link #DATE_TYPE}
|
||||
*
|
||||
*
|
||||
* @deprecated Use {@link DateRecognizerSinkFilter} and {@link TeeSinkTokenFilter} instead.
|
||||
**/
|
||||
public class DateRecognizerSinkTokenizer extends SinkTokenizer {
|
||||
public static final String DATE_TYPE = "date";
|
||||
|
|
|
@ -0,0 +1,54 @@
|
|||
package org.apache.lucene.analysis.sinks;
|
||||
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.analysis.TeeSinkTokenFilter.SinkFilter;
|
||||
import org.apache.lucene.util.AttributeSource;
|
||||
|
||||
/**
|
||||
* Counts the tokens as they go by and saves to the internal list those between the range of lower and upper, exclusive of upper
|
||||
*
|
||||
**/
|
||||
public class TokenRangeSinkFilter extends SinkFilter {
|
||||
private int lower;
|
||||
private int upper;
|
||||
private int count;
|
||||
|
||||
public TokenRangeSinkFilter(int lower, int upper) {
|
||||
this.lower = lower;
|
||||
this.upper = upper;
|
||||
}
|
||||
|
||||
|
||||
public boolean accept(AttributeSource source) {
|
||||
try {
|
||||
if (count >= lower && count < upper){
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
} finally {
|
||||
count++;
|
||||
}
|
||||
}
|
||||
|
||||
public void reset() throws IOException {
|
||||
count = 0;
|
||||
}
|
||||
}
|
|
@ -1,6 +1,7 @@
|
|||
package org.apache.lucene.analysis.sinks;
|
||||
|
||||
import org.apache.lucene.analysis.SinkTokenizer;
|
||||
import org.apache.lucene.analysis.TeeSinkTokenFilter;
|
||||
import org.apache.lucene.analysis.Token;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -24,7 +25,7 @@ import java.io.IOException;
|
|||
|
||||
/**
|
||||
* Counts the tokens as they go by and saves to the internal list those between the range of lower and upper, exclusive of upper
|
||||
*
|
||||
* @deprecated Use {@link TokenRangeSinkFilter} and {@link TeeSinkTokenFilter} instead.
|
||||
**/
|
||||
public class TokenRangeSinkTokenizer extends SinkTokenizer {
|
||||
private int lower;
|
||||
|
|
|
@ -0,0 +1,45 @@
|
|||
package org.apache.lucene.analysis.sinks;
|
||||
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import org.apache.lucene.analysis.TeeSinkTokenFilter.SinkFilter;
|
||||
import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
|
||||
import org.apache.lucene.util.AttributeSource;
|
||||
|
||||
public class TokenTypeSinkFilter extends SinkFilter {
|
||||
private String typeToMatch;
|
||||
private TypeAttribute typeAtt;
|
||||
|
||||
public TokenTypeSinkFilter(String typeToMatch) {
|
||||
this.typeToMatch = typeToMatch;
|
||||
}
|
||||
|
||||
public boolean accept(AttributeSource source) {
|
||||
if (typeAtt == null) {
|
||||
typeAtt = (TypeAttribute) source.getAttribute(TypeAttribute.class);
|
||||
}
|
||||
|
||||
//check to see if this is a Category
|
||||
if (typeAtt != null && typeToMatch.equals(typeAtt.type())){
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
|
@ -17,6 +17,7 @@ package org.apache.lucene.analysis.sinks;
|
|||
*/
|
||||
|
||||
import org.apache.lucene.analysis.SinkTokenizer;
|
||||
import org.apache.lucene.analysis.TeeSinkTokenFilter;
|
||||
import org.apache.lucene.analysis.Token;
|
||||
|
||||
import java.util.List;
|
||||
|
@ -26,6 +27,7 @@ import java.util.List;
|
|||
* If the {@link org.apache.lucene.analysis.Token#type()} matches the passed in <code>typeToMatch</code> then
|
||||
* add it to the sink
|
||||
*
|
||||
* @deprecated Use {@link TokenTypeSinkFilter} and {@link TeeSinkTokenFilter} instead.
|
||||
**/
|
||||
public class TokenTypeSinkTokenizer extends SinkTokenizer {
|
||||
|
||||
|
|
|
@ -16,15 +16,16 @@ package org.apache.lucene.analysis.sinks;
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.apache.lucene.analysis.TeeTokenFilter;
|
||||
import org.apache.lucene.analysis.WhitespaceTokenizer;
|
||||
import org.apache.lucene.analysis.Token;
|
||||
|
||||
import java.io.StringReader;
|
||||
import java.io.IOException;
|
||||
import java.io.StringReader;
|
||||
import java.text.SimpleDateFormat;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.lucene.analysis.TeeSinkTokenFilter;
|
||||
import org.apache.lucene.analysis.WhitespaceTokenizer;
|
||||
import org.apache.lucene.analysis.TeeSinkTokenFilter.SinkTokenStream;
|
||||
|
||||
public class DateRecognizerSinkTokenizerTest extends TestCase {
|
||||
|
||||
|
||||
|
@ -40,21 +41,24 @@ public class DateRecognizerSinkTokenizerTest extends TestCase {
|
|||
}
|
||||
|
||||
public void test() throws IOException {
|
||||
DateRecognizerSinkTokenizer sink = new DateRecognizerSinkTokenizer(new SimpleDateFormat("MM/dd/yyyy"));
|
||||
DateRecognizerSinkFilter sinkFilter = new DateRecognizerSinkFilter(new SimpleDateFormat("MM/dd/yyyy"));
|
||||
String test = "The quick red fox jumped over the lazy brown dogs on 7/11/2006 The dogs finally reacted on 7/12/2006";
|
||||
TeeTokenFilter tee = new TeeTokenFilter(new WhitespaceTokenizer(new StringReader(test)), sink);
|
||||
TeeSinkTokenFilter tee = new TeeSinkTokenFilter(new WhitespaceTokenizer(new StringReader(test)));
|
||||
SinkTokenStream sink = tee.newSinkTokenStream(sinkFilter);
|
||||
int count = 0;
|
||||
final Token reusableToken = new Token();
|
||||
for (Token nextToken = tee.next(reusableToken); nextToken != null; nextToken = tee.next(reusableToken)) {
|
||||
assertTrue("nextToken is null and it shouldn't be", nextToken != null);
|
||||
if (nextToken.termBuffer()[0] == '7'){
|
||||
assertTrue(nextToken.type() + " is not equal to " + DateRecognizerSinkTokenizer.DATE_TYPE,
|
||||
nextToken.type().equals(DateRecognizerSinkTokenizer.DATE_TYPE) == true);
|
||||
}
|
||||
|
||||
tee.reset();
|
||||
while (tee.incrementToken()) {
|
||||
count++;
|
||||
}
|
||||
assertTrue(count + " does not equal: " + 18, count == 18);
|
||||
assertTrue("sink Size: " + sink.getTokens().size() + " is not: " + 2, sink.getTokens().size() == 2);
|
||||
|
||||
int sinkCount = 0;
|
||||
sink.reset();
|
||||
while (sink.incrementToken()) {
|
||||
sinkCount++;
|
||||
}
|
||||
assertTrue("sink Size: " + sinkCount + " is not: " + 2, sinkCount == 2);
|
||||
|
||||
}
|
||||
}
|
|
@ -16,13 +16,14 @@ package org.apache.lucene.analysis.sinks;
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.apache.lucene.analysis.TeeTokenFilter;
|
||||
import org.apache.lucene.analysis.WhitespaceTokenizer;
|
||||
import org.apache.lucene.analysis.Token;
|
||||
|
||||
import java.io.StringReader;
|
||||
import java.io.IOException;
|
||||
import java.io.StringReader;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.lucene.analysis.TeeSinkTokenFilter;
|
||||
import org.apache.lucene.analysis.WhitespaceTokenizer;
|
||||
import org.apache.lucene.analysis.TeeSinkTokenFilter.SinkTokenStream;
|
||||
|
||||
public class TokenRangeSinkTokenizerTest extends TestCase {
|
||||
|
||||
|
@ -39,16 +40,24 @@ public class TokenRangeSinkTokenizerTest extends TestCase {
|
|||
}
|
||||
|
||||
public void test() throws IOException {
|
||||
TokenRangeSinkTokenizer rangeToks = new TokenRangeSinkTokenizer(2, 4);
|
||||
TokenRangeSinkFilter sinkFilter = new TokenRangeSinkFilter(2, 4);
|
||||
String test = "The quick red fox jumped over the lazy brown dogs";
|
||||
TeeTokenFilter tee = new TeeTokenFilter(new WhitespaceTokenizer(new StringReader(test)), rangeToks);
|
||||
TeeSinkTokenFilter tee = new TeeSinkTokenFilter(new WhitespaceTokenizer(new StringReader(test)));
|
||||
SinkTokenStream rangeToks = tee.newSinkTokenStream(sinkFilter);
|
||||
|
||||
int count = 0;
|
||||
final Token reusableToken = new Token();
|
||||
for (Token nextToken = tee.next(reusableToken); nextToken != null; nextToken = tee.next(reusableToken)) {
|
||||
assertTrue("nextToken is null and it shouldn't be", nextToken != null);
|
||||
tee.reset();
|
||||
while(tee.incrementToken()) {
|
||||
count++;
|
||||
}
|
||||
|
||||
int sinkCount = 0;
|
||||
rangeToks.reset();
|
||||
while (rangeToks.incrementToken()) {
|
||||
sinkCount++;
|
||||
}
|
||||
|
||||
assertTrue(count + " does not equal: " + 10, count == 10);
|
||||
assertTrue("rangeToks Size: " + rangeToks.getTokens().size() + " is not: " + 2, rangeToks.getTokens().size() == 2);
|
||||
assertTrue("rangeToks Size: " + sinkCount + " is not: " + 2, sinkCount == 2);
|
||||
}
|
||||
}
|
|
@ -21,11 +21,11 @@ import java.io.StringReader;
|
|||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.lucene.analysis.TeeTokenFilter;
|
||||
import org.apache.lucene.analysis.Token;
|
||||
import org.apache.lucene.analysis.TeeSinkTokenFilter;
|
||||
import org.apache.lucene.analysis.TokenFilter;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.analysis.WhitespaceTokenizer;
|
||||
import org.apache.lucene.analysis.TeeSinkTokenFilter.SinkTokenStream;
|
||||
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
|
||||
import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
|
||||
|
||||
|
@ -44,22 +44,34 @@ public class TokenTypeSinkTokenizerTest extends TestCase {
|
|||
}
|
||||
|
||||
public void test() throws IOException {
|
||||
TokenTypeSinkTokenizer sink = new TokenTypeSinkTokenizer("D");
|
||||
TokenTypeSinkFilter sinkFilter = new TokenTypeSinkFilter("D");
|
||||
String test = "The quick red fox jumped over the lazy brown dogs";
|
||||
|
||||
TeeTokenFilter ttf = new TeeTokenFilter(new WordTokenFilter(new WhitespaceTokenizer(new StringReader(test))), sink);
|
||||
TeeSinkTokenFilter ttf = new TeeSinkTokenFilter(new WordTokenFilter(new WhitespaceTokenizer(new StringReader(test))));
|
||||
SinkTokenStream sink = ttf.newSinkTokenStream(sinkFilter);
|
||||
|
||||
boolean seenDogs = false;
|
||||
final Token reusableToken = new Token();
|
||||
for (Token nextToken = ttf.next(reusableToken); nextToken != null; nextToken = ttf.next(reusableToken)) {
|
||||
if (nextToken.term().equals("dogs")) {
|
||||
|
||||
TermAttribute termAtt = (TermAttribute) ttf.addAttribute(TermAttribute.class);
|
||||
TypeAttribute typeAtt = (TypeAttribute) ttf.addAttribute(TypeAttribute.class);
|
||||
ttf.reset();
|
||||
while (ttf.incrementToken()) {
|
||||
if (termAtt.term().equals("dogs")) {
|
||||
seenDogs = true;
|
||||
assertTrue(nextToken.type() + " is not equal to " + "D", nextToken.type().equals("D") == true);
|
||||
assertTrue(typeAtt.type() + " is not equal to " + "D", typeAtt.type().equals("D") == true);
|
||||
} else {
|
||||
assertTrue(nextToken.type() + " is not null and it should be", nextToken.type().equals("word"));
|
||||
assertTrue(typeAtt.type() + " is not null and it should be", typeAtt.type().equals("word"));
|
||||
}
|
||||
}
|
||||
assertTrue(seenDogs + " does not equal: " + true, seenDogs == true);
|
||||
assertTrue("sink Size: " + sink.getTokens().size() + " is not: " + 1, sink.getTokens().size() == 1);
|
||||
|
||||
int sinkCount = 0;
|
||||
sink.reset();
|
||||
while (sink.incrementToken()) {
|
||||
sinkCount++;
|
||||
}
|
||||
|
||||
assertTrue("sink Size: " + sinkCount + " is not: " + 1, sinkCount == 1);
|
||||
}
|
||||
|
||||
private class WordTokenFilter extends TokenFilter {
|
||||
|
|
|
@ -22,7 +22,6 @@ import java.lang.ref.WeakReference;
|
|||
import java.util.Iterator;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Collections;
|
||||
|
||||
import org.apache.lucene.util.AttributeImpl;
|
||||
import org.apache.lucene.util.AttributeSource;
|
||||
|
@ -163,12 +162,20 @@ public final class TeeSinkTokenFilter extends TokenFilter {
|
|||
/**
|
||||
* A filter that decides which {@link AttributeSource} states to store in the sink.
|
||||
*/
|
||||
public static interface SinkFilter {
|
||||
public static abstract class SinkFilter {
|
||||
/**
|
||||
* Returns true, iff the current state of the passed-in {@link AttributeSource} shall be stored
|
||||
* in the sink.
|
||||
*/
|
||||
boolean accept(AttributeSource source);
|
||||
public abstract boolean accept(AttributeSource source);
|
||||
|
||||
/**
|
||||
* Called by {@link SinkTokenStream#reset()}. This method does nothing by default
|
||||
* and can optionally be overridden.
|
||||
*/
|
||||
public void reset() throws IOException {
|
||||
// nothing to do; can be overridden
|
||||
}
|
||||
}
|
||||
|
||||
public static final class SinkTokenStream extends TokenStream {
|
||||
|
|
|
@ -248,7 +248,7 @@ public class TestTeeSinkTokenFilter extends LuceneTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
class ModuloSinkFilter implements TeeSinkTokenFilter.SinkFilter {
|
||||
class ModuloSinkFilter extends TeeSinkTokenFilter.SinkFilter {
|
||||
int count = 0;
|
||||
int modCount;
|
||||
|
||||
|
|
Loading…
Reference in New Issue