- Applied a patch from Grant Ingersoll

git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@150189 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Otis Gospodnetic 2004-02-05 13:45:55 +00:00
parent f37a66bb2f
commit eb0c2853bb
1 changed files with 63 additions and 47 deletions

View File

@ -1,60 +1,76 @@
package org.apache.lucene.analysis;
/**
* Copyright 2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import junit.framework.TestCase;
import java.io.StringReader;
import java.util.ArrayList;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.search.Hits;
import java.io.IOException;
import java.util.Set;
import java.util.HashSet;
public class TestStopAnalyzer extends TestCase {
private StopAnalyzer stopAnalyzer = new StopAnalyzer();
private StopAnalyzer stop = new StopAnalyzer();
public Token[] tokensFromAnalyzer(Analyzer analyzer, String text)
throws Exception {
TokenStream stream =
analyzer.tokenStream("contents", new StringReader(text));
ArrayList tokenList = new ArrayList();
while (true) {
Token token = stream.next();
if (token == null) break;
tokenList.add(token);
}
return (Token[]) tokenList.toArray(new Token[0]);
private Set inValidTokens = new HashSet();
public TestStopAnalyzer(String s) {
super(s);
}
protected void setUp() {
for (int i = 0; i < StopAnalyzer.ENGLISH_STOP_WORDS.length; i++) {
inValidTokens.add(StopAnalyzer.ENGLISH_STOP_WORDS[i]);
}
}
public void testPhraseQuery() throws Exception {
RAMDirectory directory = new RAMDirectory();
IndexWriter writer = new IndexWriter(directory, stopAnalyzer, true);
Document doc = new Document();
doc.add(Field.Text("field", "the stop words are here"));
writer.addDocument(doc);
writer.close();
public void testDefaults() {
assertTrue(stop != null);
StringReader reader = new StringReader("This is a test of the english stop analyzer");
TokenStream stream = stop.tokenStream("test", reader);
assertTrue(stream != null);
Token token = null;
try {
while ((token = stream.next()) != null)
{
assertTrue(inValidTokens.contains(token.termText()) == false);
}
} catch (IOException e) {
assertTrue(false);
}
}
IndexSearcher searcher = new IndexSearcher(directory);
// valid exact phrase query
PhraseQuery query = new PhraseQuery();
query.add(new Term("field","stop"));
query.add(new Term("field","words"));
Hits hits = searcher.search(query);
assertEquals(1, hits.length());
// currently StopAnalyzer does not leave "holes", so this matches.
query = new PhraseQuery();
query.add(new Term("field", "words"));
query.add(new Term("field", "here"));
hits = searcher.search(query);
assertEquals(1, hits.length());
searcher.close();
public void testStopList() {
Set stopWordsSet = new HashSet();
stopWordsSet.add("good");
stopWordsSet.add("test");
stopWordsSet.add("analyzer");
StopAnalyzer newStop = new StopAnalyzer((String[])stopWordsSet.toArray(new String[3]));
StringReader reader = new StringReader("This is a good test of the english stop analyzer");
TokenStream stream = newStop.tokenStream("test", reader);
assertTrue(stream != null);
Token token = null;
try {
while ((token = stream.next()) != null)
{
String text = token.termText();
assertTrue(stopWordsSet.contains(text) == false);
}
} catch (IOException e) {
assertTrue(false);
}
}
}