diff --git a/modules/analysis/icu/src/test/org/apache/lucene/analysis/icu/TestICUFoldingFilter.java b/modules/analysis/icu/src/test/org/apache/lucene/analysis/icu/TestICUFoldingFilter.java index dc0f26462e8..3cb16cbab2c 100644 --- a/modules/analysis/icu/src/test/org/apache/lucene/analysis/icu/TestICUFoldingFilter.java +++ b/modules/analysis/icu/src/test/org/apache/lucene/analysis/icu/TestICUFoldingFilter.java @@ -21,7 +21,6 @@ import java.io.IOException; import java.io.Reader; import org.apache.lucene.analysis.*; -import org.apache.lucene.analysis.core.WhitespaceTokenizer; /** * Tests ICUFoldingFilter @@ -30,7 +29,7 @@ public class TestICUFoldingFilter extends BaseTokenStreamTestCase { Analyzer a = new Analyzer() { @Override public TokenStreamComponents createComponents(String fieldName, Reader reader) { - Tokenizer tokenizer = new WhitespaceTokenizer(TEST_VERSION_CURRENT, reader); + Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false); return new TokenStreamComponents(tokenizer, new ICUFoldingFilter(tokenizer)); } }; diff --git a/modules/analysis/icu/src/test/org/apache/lucene/analysis/icu/TestICUNormalizer2Filter.java b/modules/analysis/icu/src/test/org/apache/lucene/analysis/icu/TestICUNormalizer2Filter.java index e81c02d6a6a..267920debcd 100644 --- a/modules/analysis/icu/src/test/org/apache/lucene/analysis/icu/TestICUNormalizer2Filter.java +++ b/modules/analysis/icu/src/test/org/apache/lucene/analysis/icu/TestICUNormalizer2Filter.java @@ -21,7 +21,6 @@ import java.io.IOException; import java.io.Reader; import org.apache.lucene.analysis.*; -import org.apache.lucene.analysis.core.WhitespaceTokenizer; import com.ibm.icu.text.Normalizer2; @@ -32,7 +31,7 @@ public class TestICUNormalizer2Filter extends BaseTokenStreamTestCase { Analyzer a = new Analyzer() { @Override public TokenStreamComponents createComponents(String fieldName, Reader reader) { - Tokenizer tokenizer = new WhitespaceTokenizer(TEST_VERSION_CURRENT, reader); + Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false); return new TokenStreamComponents(tokenizer, new ICUNormalizer2Filter(tokenizer)); } }; @@ -62,7 +61,7 @@ public class TestICUNormalizer2Filter extends BaseTokenStreamTestCase { Analyzer a = new Analyzer() { @Override public TokenStreamComponents createComponents(String fieldName, Reader reader) { - Tokenizer tokenizer = new WhitespaceTokenizer(TEST_VERSION_CURRENT, reader); + Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false); return new TokenStreamComponents(tokenizer, new ICUNormalizer2Filter( tokenizer, /* specify nfc with decompose to get nfd */ diff --git a/modules/analysis/icu/src/test/org/apache/lucene/analysis/icu/TestICUTransformFilter.java b/modules/analysis/icu/src/test/org/apache/lucene/analysis/icu/TestICUTransformFilter.java index 4d766155d91..c39a1a13fd4 100644 --- a/modules/analysis/icu/src/test/org/apache/lucene/analysis/icu/TestICUTransformFilter.java +++ b/modules/analysis/icu/src/test/org/apache/lucene/analysis/icu/TestICUTransformFilter.java @@ -23,9 +23,9 @@ import java.io.StringReader; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.BaseTokenStreamTestCase; +import org.apache.lucene.analysis.MockTokenizer; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.core.KeywordTokenizer; -import org.apache.lucene.analysis.core.WhitespaceTokenizer; import org.apache.lucene.analysis.TokenStream; import com.ibm.icu.text.Transliterator; @@ -94,7 +94,7 @@ public class TestICUTransformFilter extends BaseTokenStreamTestCase { Analyzer a = new Analyzer() { @Override protected TokenStreamComponents createComponents(String fieldName, Reader reader) { - Tokenizer tokenizer = new WhitespaceTokenizer(TEST_VERSION_CURRENT, reader); + Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false); return new TokenStreamComponents(tokenizer, new ICUTransformFilter(tokenizer, transform)); } };