rename Test to Tests, so it will be executed as part of the mvn tests as well, reformat a bit

This commit is contained in:
Shay Banon 2012-08-01 16:20:37 +03:00
parent d13a7809d1
commit e88dbafe51
2 changed files with 21 additions and 19 deletions

View File

@ -19,8 +19,6 @@
package org.elasticsearch.test.unit.index.analysis;
import java.io.IOException;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.elasticsearch.common.inject.Injector;
@ -39,7 +37,10 @@ import org.elasticsearch.indices.analysis.IndicesAnalysisModule;
import org.elasticsearch.indices.analysis.IndicesAnalysisService;
import org.testng.Assert;
public class AnalysisHelper {
import java.io.IOException;
public class AnalysisTestsHelper {
public static AnalysisService createAnalysisServiceFromClassPath(String resource) {
Settings settings = ImmutableSettings.settingsBuilder()
.loadFromClasspath(resource).build();

View File

@ -19,12 +19,6 @@
package org.elasticsearch.test.unit.index.analysis;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.instanceOf;
import java.io.IOException;
import java.io.StringReader;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.WhitespaceTokenizer;
import org.apache.lucene.util.Version;
@ -33,39 +27,46 @@ import org.elasticsearch.index.analysis.ShingleTokenFilterFactory;
import org.elasticsearch.index.analysis.TokenFilterFactory;
import org.testng.annotations.Test;
public class ShingleTokenFilterFactoryTest {
import java.io.IOException;
import java.io.StringReader;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.instanceOf;
public class ShingleTokenFilterFactoryTests {
private static final String RESOURCE = "org/elasticsearch/test/unit/index/analysis/shingle_analysis.json";
@Test
public void testDefault() throws IOException {
AnalysisService analysisService = AnalysisHelper.createAnalysisServiceFromClassPath(RESOURCE);
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromClassPath(RESOURCE);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("shingle");
String source = "the quick brown fox";
String[] expected = new String[] { "the", "the quick", "quick", "quick brown", "brown", "brown fox", "fox" };
String[] expected = new String[]{"the", "the quick", "quick", "quick brown", "brown", "brown fox", "fox"};
Tokenizer tokenizer = new WhitespaceTokenizer(Version.LUCENE_36, new StringReader(source));
AnalysisHelper.assertSimpleTSOutput(tokenFilter.create(tokenizer), expected);
AnalysisTestsHelper.assertSimpleTSOutput(tokenFilter.create(tokenizer), expected);
}
@Test
public void testInverseMapping() throws IOException {
AnalysisService analysisService = AnalysisHelper.createAnalysisServiceFromClassPath(RESOURCE);
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromClassPath(RESOURCE);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("shingle_inverse");
assertThat(tokenFilter, instanceOf(ShingleTokenFilterFactory.class));
String source = "the quick brown fox";
String[] expected = new String[] { "the_quick_brown", "quick_brown_fox" };
String[] expected = new String[]{"the_quick_brown", "quick_brown_fox"};
Tokenizer tokenizer = new WhitespaceTokenizer(Version.LUCENE_36, new StringReader(source));
AnalysisHelper.assertSimpleTSOutput(tokenFilter.create(tokenizer), expected);
AnalysisTestsHelper.assertSimpleTSOutput(tokenFilter.create(tokenizer), expected);
}
@Test
public void testInverseMappingNoShingles() throws IOException {
AnalysisService analysisService = AnalysisHelper.createAnalysisServiceFromClassPath(RESOURCE);
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromClassPath(RESOURCE);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("shingle_inverse");
assertThat(tokenFilter, instanceOf(ShingleTokenFilterFactory.class));
String source = "the quick";
String[] expected = new String[] { "the", "quick" };
String[] expected = new String[]{"the", "quick"};
Tokenizer tokenizer = new WhitespaceTokenizer(Version.LUCENE_36, new StringReader(source));
AnalysisHelper.assertSimpleTSOutput(tokenFilter.create(tokenizer), expected);
AnalysisTestsHelper.assertSimpleTSOutput(tokenFilter.create(tokenizer), expected);
}
}