fix CWD usage in analysis tests

This commit is contained in:
Robert Muir 2015-04-29 09:44:37 -04:00
parent 6bd69b74f1
commit c0587cb244
16 changed files with 90 additions and 33 deletions

View File

@ -32,7 +32,6 @@ import java.nio.file.Path;
import java.util.ArrayList; import java.util.ArrayList;
import static org.elasticsearch.common.Strings.cleanPath; import static org.elasticsearch.common.Strings.cleanPath;
import static org.elasticsearch.common.settings.ImmutableSettings.Builder.EMPTY_SETTINGS;
/** /**
* The environment of where things exists. * The environment of where things exists.
@ -69,16 +68,12 @@ public class Environment {
fileStores = allStores.toArray(new ESFileStore[allStores.size()]); fileStores = allStores.toArray(new ESFileStore[allStores.size()]);
} }
public Environment() {
this(EMPTY_SETTINGS);
}
public Environment(Settings settings) { public Environment(Settings settings) {
this.settings = settings; this.settings = settings;
if (settings.get("path.home") != null) { if (settings.get("path.home") != null) {
homeFile = PathUtils.get(cleanPath(settings.get("path.home"))); homeFile = PathUtils.get(cleanPath(settings.get("path.home")));
} else { } else {
homeFile = PathUtils.get(System.getProperty("user.dir")); throw new IllegalStateException("path.home is not configured");
} }
if (settings.get("path.conf") != null) { if (settings.get("path.conf") != null) {

View File

@ -33,6 +33,7 @@ public class ASCIIFoldingTokenFilterFactoryTests extends ElasticsearchTokenStrea
@Test @Test
public void testDefault() throws IOException { public void testDefault() throws IOException {
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder()
.put("path.home", createTempDir().toString())
.put("index.analysis.filter.my_ascii_folding.type", "asciifolding") .put("index.analysis.filter.my_ascii_folding.type", "asciifolding")
.build()); .build());
TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_ascii_folding"); TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_ascii_folding");
@ -46,6 +47,7 @@ public class ASCIIFoldingTokenFilterFactoryTests extends ElasticsearchTokenStrea
@Test @Test
public void testPreserveOriginal() throws IOException { public void testPreserveOriginal() throws IOException {
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder()
.put("path.home", createTempDir().toString())
.put("index.analysis.filter.my_ascii_folding.type", "asciifolding") .put("index.analysis.filter.my_ascii_folding.type", "asciifolding")
.put("index.analysis.filter.my_ascii_folding.preserve_original", true) .put("index.analysis.filter.my_ascii_folding.preserve_original", true)
.build()); .build());

View File

@ -121,7 +121,10 @@ public class AnalysisModuleTests extends ElasticsearchTestCase {
} }
private void assertTokenFilter(String name, Class clazz) throws IOException { private void assertTokenFilter(String name, Class clazz) throws IOException {
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(ImmutableSettings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build()); Settings settings = ImmutableSettings.settingsBuilder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put("path.home", createTempDir().toString()).build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
TokenFilterFactory tokenFilter = analysisService.tokenFilter(name); TokenFilterFactory tokenFilter = analysisService.tokenFilter(name);
Tokenizer tokenizer = new WhitespaceTokenizer(); Tokenizer tokenizer = new WhitespaceTokenizer();
tokenizer.setReader(new StringReader("foo bar")); tokenizer.setReader(new StringReader("foo bar"));

View File

@ -34,11 +34,15 @@ import org.elasticsearch.index.settings.IndexSettingsModule;
import org.elasticsearch.indices.analysis.IndicesAnalysisModule; import org.elasticsearch.indices.analysis.IndicesAnalysisModule;
import org.elasticsearch.indices.analysis.IndicesAnalysisService; import org.elasticsearch.indices.analysis.IndicesAnalysisService;
import java.nio.file.Path;
public class AnalysisTestsHelper { public class AnalysisTestsHelper {
public static AnalysisService createAnalysisServiceFromClassPath(String resource) { public static AnalysisService createAnalysisServiceFromClassPath(Path baseDir, String resource) {
Settings settings = ImmutableSettings.settingsBuilder() Settings settings = ImmutableSettings.settingsBuilder()
.loadFromClasspath(resource).build(); .loadFromClasspath(resource)
.put("path.home", baseDir.toString())
.build();
return createAnalysisServiceFromSettings(settings); return createAnalysisServiceFromSettings(settings);
} }

View File

@ -45,6 +45,7 @@ public class AnalyzerBackwardsCompatTests extends ElasticsearchTokenStreamTestCa
builder.put(SETTING_VERSION_CREATED, version); builder.put(SETTING_VERSION_CREATED, version);
} }
builder.put("index.analysis.analyzer.foo.type", type); builder.put("index.analysis.analyzer.foo.type", type);
builder.put("path.home", createTempDir().toString());
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(builder.build()); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(builder.build());
NamedAnalyzer analyzer = analysisService.analyzer("foo"); NamedAnalyzer analyzer = analysisService.analyzer("foo");
if (version.onOrAfter(noStopwordVersion)) { if (version.onOrAfter(noStopwordVersion)) {

View File

@ -33,7 +33,7 @@ public class CJKFilterFactoryTests extends ElasticsearchTokenStreamTestCase {
@Test @Test
public void testDefault() throws IOException { public void testDefault() throws IOException {
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromClassPath(RESOURCE); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromClassPath(createTempDir(), RESOURCE);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("cjk_bigram"); TokenFilterFactory tokenFilter = analysisService.tokenFilter("cjk_bigram");
String source = "多くの学生が試験に落ちた。"; String source = "多くの学生が試験に落ちた。";
String[] expected = new String[]{"多く", "くの", "の学", "学生", "生が", "が試", "試験", "験に", "に落", "落ち", "ちた" }; String[] expected = new String[]{"多く", "くの", "の学", "学生", "生が", "が試", "試験", "験に", "に落", "落ち", "ちた" };
@ -44,7 +44,7 @@ public class CJKFilterFactoryTests extends ElasticsearchTokenStreamTestCase {
@Test @Test
public void testNoFlags() throws IOException { public void testNoFlags() throws IOException {
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromClassPath(RESOURCE); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromClassPath(createTempDir(), RESOURCE);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("cjk_no_flags"); TokenFilterFactory tokenFilter = analysisService.tokenFilter("cjk_no_flags");
String source = "多くの学生が試験に落ちた。"; String source = "多くの学生が試験に落ちた。";
String[] expected = new String[]{"多く", "くの", "の学", "学生", "生が", "が試", "試験", "験に", "に落", "落ち", "ちた" }; String[] expected = new String[]{"多く", "くの", "の学", "学生", "生が", "が試", "試験", "験に", "に落", "落ち", "ちた" };
@ -55,7 +55,7 @@ public class CJKFilterFactoryTests extends ElasticsearchTokenStreamTestCase {
@Test @Test
public void testHanOnly() throws IOException { public void testHanOnly() throws IOException {
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromClassPath(RESOURCE); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromClassPath(createTempDir(), RESOURCE);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("cjk_han_only"); TokenFilterFactory tokenFilter = analysisService.tokenFilter("cjk_han_only");
String source = "多くの学生が試験に落ちた。"; String source = "多くの学生が試験に落ちた。";
String[] expected = new String[]{"", "", "", "学生", "", "試験", "", "", "", "" }; String[] expected = new String[]{"", "", "", "学生", "", "試験", "", "", "", "" };
@ -66,7 +66,7 @@ public class CJKFilterFactoryTests extends ElasticsearchTokenStreamTestCase {
@Test @Test
public void testHanUnigramOnly() throws IOException { public void testHanUnigramOnly() throws IOException {
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromClassPath(RESOURCE); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromClassPath(createTempDir(), RESOURCE);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("cjk_han_unigram_only"); TokenFilterFactory tokenFilter = analysisService.tokenFilter("cjk_han_unigram_only");
String source = "多くの学生が試験に落ちた。"; String source = "多くの学生が試験に落ちた。";
String[] expected = new String[]{"", "", "", "", "学生", "", "", "", "試験", "", "", "", "", "" }; String[] expected = new String[]{"", "", "", "", "学生", "", "", "", "試験", "", "", "", "", "" };

View File

@ -33,6 +33,7 @@ public class HunspellTokenFilterFactoryTests extends ElasticsearchTestCase {
@Test @Test
public void testDedup() throws IOException { public void testDedup() throws IOException {
Settings settings = settingsBuilder() Settings settings = settingsBuilder()
.put("path.home", createTempDir().toString())
.put("path.conf", getDataPath("/indices/analyze/conf_dir")) .put("path.conf", getDataPath("/indices/analyze/conf_dir"))
.put("index.analysis.filter.en_US.type", "hunspell") .put("index.analysis.filter.en_US.type", "hunspell")
.put("index.analysis.filter.en_US.locale", "en_US") .put("index.analysis.filter.en_US.locale", "en_US")
@ -45,6 +46,7 @@ public class HunspellTokenFilterFactoryTests extends ElasticsearchTestCase {
assertThat(hunspellTokenFilter.dedup(), is(true)); assertThat(hunspellTokenFilter.dedup(), is(true));
settings = settingsBuilder() settings = settingsBuilder()
.put("path.home", createTempDir().toString())
.put("path.conf", getDataPath("/indices/analyze/conf_dir")) .put("path.conf", getDataPath("/indices/analyze/conf_dir"))
.put("index.analysis.filter.en_US.type", "hunspell") .put("index.analysis.filter.en_US.type", "hunspell")
.put("index.analysis.filter.en_US.dedup", false) .put("index.analysis.filter.en_US.dedup", false)

View File

@ -40,7 +40,7 @@ public class KeepFilterFactoryTests extends ElasticsearchTokenStreamTestCase {
@Test @Test
public void testLoadWithoutSettings() { public void testLoadWithoutSettings() {
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromClassPath(RESOURCE); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromClassPath(createTempDir(), RESOURCE);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("keep"); TokenFilterFactory tokenFilter = analysisService.tokenFilter("keep");
Assert.assertNull(tokenFilter); Assert.assertNull(tokenFilter);
} }
@ -48,6 +48,7 @@ public class KeepFilterFactoryTests extends ElasticsearchTokenStreamTestCase {
@Test @Test
public void testLoadOverConfiguredSettings() { public void testLoadOverConfiguredSettings() {
Settings settings = ImmutableSettings.settingsBuilder() Settings settings = ImmutableSettings.settingsBuilder()
.put("path.home", createTempDir().toString())
.put("index.analysis.filter.broken_keep_filter.type", "keep") .put("index.analysis.filter.broken_keep_filter.type", "keep")
.put("index.analysis.filter.broken_keep_filter.keep_words_path", "does/not/exists.txt") .put("index.analysis.filter.broken_keep_filter.keep_words_path", "does/not/exists.txt")
.put("index.analysis.filter.broken_keep_filter.keep_words", "[\"Hello\", \"worlD\"]") .put("index.analysis.filter.broken_keep_filter.keep_words", "[\"Hello\", \"worlD\"]")
@ -63,6 +64,7 @@ public class KeepFilterFactoryTests extends ElasticsearchTokenStreamTestCase {
@Test @Test
public void testKeepWordsPathSettings() { public void testKeepWordsPathSettings() {
Settings settings = ImmutableSettings.settingsBuilder() Settings settings = ImmutableSettings.settingsBuilder()
.put("path.home", createTempDir().toString())
.put("index.analysis.filter.non_broken_keep_filter.type", "keep") .put("index.analysis.filter.non_broken_keep_filter.type", "keep")
.put("index.analysis.filter.non_broken_keep_filter.keep_words_path", "does/not/exists.txt") .put("index.analysis.filter.non_broken_keep_filter.keep_words_path", "does/not/exists.txt")
.build(); .build();
@ -89,7 +91,7 @@ public class KeepFilterFactoryTests extends ElasticsearchTokenStreamTestCase {
@Test @Test
public void testCaseInsensitiveMapping() throws IOException { public void testCaseInsensitiveMapping() throws IOException {
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromClassPath(RESOURCE); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromClassPath(createTempDir(), RESOURCE);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_keep_filter"); TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_keep_filter");
assertThat(tokenFilter, instanceOf(KeepWordFilterFactory.class)); assertThat(tokenFilter, instanceOf(KeepWordFilterFactory.class));
String source = "hello small world"; String source = "hello small world";
@ -101,7 +103,7 @@ public class KeepFilterFactoryTests extends ElasticsearchTokenStreamTestCase {
@Test @Test
public void testCaseSensitiveMapping() throws IOException { public void testCaseSensitiveMapping() throws IOException {
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromClassPath(RESOURCE); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromClassPath(createTempDir(), RESOURCE);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_case_sensitive_keep_filter"); TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_case_sensitive_keep_filter");
assertThat(tokenFilter, instanceOf(KeepWordFilterFactory.class)); assertThat(tokenFilter, instanceOf(KeepWordFilterFactory.class));
String source = "Hello small world"; String source = "Hello small world";

View File

@ -36,6 +36,7 @@ public class KeepTypesFilterFactoryTests extends ElasticsearchTokenStreamTestCas
@Test @Test
public void testKeepTypes() throws IOException { public void testKeepTypes() throws IOException {
Settings settings = ImmutableSettings.settingsBuilder() Settings settings = ImmutableSettings.settingsBuilder()
.put("path.home", createTempDir().toString())
.put("index.analysis.filter.keep_numbers.type", "keep_types") .put("index.analysis.filter.keep_numbers.type", "keep_types")
.putArray("index.analysis.filter.keep_numbers.types", new String[] {"<NUM>", "<SOMETHINGELSE>"}) .putArray("index.analysis.filter.keep_numbers.types", new String[] {"<NUM>", "<SOMETHINGELSE>"})
.build(); .build();

View File

@ -33,7 +33,10 @@ public class LimitTokenCountFilterFactoryTests extends ElasticsearchTokenStreamT
@Test @Test
public void testDefault() throws IOException { public void testDefault() throws IOException {
Settings settings = ImmutableSettings.settingsBuilder().put("index.analysis.filter.limit_default.type", "limit").build(); Settings settings = ImmutableSettings.settingsBuilder()
.put("index.analysis.filter.limit_default.type", "limit")
.put("path.home", createTempDir().toString())
.build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
{ {
TokenFilterFactory tokenFilter = analysisService.tokenFilter("limit_default"); TokenFilterFactory tokenFilter = analysisService.tokenFilter("limit_default");
@ -56,8 +59,11 @@ public class LimitTokenCountFilterFactoryTests extends ElasticsearchTokenStreamT
@Test @Test
public void testSettings() throws IOException { public void testSettings() throws IOException {
{ {
Settings settings = ImmutableSettings.settingsBuilder().put("index.analysis.filter.limit_1.type", "limit") Settings settings = ImmutableSettings.settingsBuilder()
.put("index.analysis.filter.limit_1.max_token_count", 3).put("index.analysis.filter.limit_1.consume_all_tokens", true) .put("index.analysis.filter.limit_1.type", "limit")
.put("index.analysis.filter.limit_1.max_token_count", 3)
.put("index.analysis.filter.limit_1.consume_all_tokens", true)
.put("path.home", createTempDir().toString())
.build(); .build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("limit_1"); TokenFilterFactory tokenFilter = analysisService.tokenFilter("limit_1");
@ -68,8 +74,11 @@ public class LimitTokenCountFilterFactoryTests extends ElasticsearchTokenStreamT
assertTokenStreamContents(tokenFilter.create(tokenizer), expected); assertTokenStreamContents(tokenFilter.create(tokenizer), expected);
} }
{ {
Settings settings = ImmutableSettings.settingsBuilder().put("index.analysis.filter.limit_1.type", "limit") Settings settings = ImmutableSettings.settingsBuilder()
.put("index.analysis.filter.limit_1.max_token_count", 3).put("index.analysis.filter.limit_1.consume_all_tokens", false) .put("index.analysis.filter.limit_1.type", "limit")
.put("index.analysis.filter.limit_1.max_token_count", 3)
.put("index.analysis.filter.limit_1.consume_all_tokens", false)
.put("path.home", createTempDir().toString())
.build(); .build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("limit_1"); TokenFilterFactory tokenFilter = analysisService.tokenFilter("limit_1");
@ -81,8 +90,11 @@ public class LimitTokenCountFilterFactoryTests extends ElasticsearchTokenStreamT
} }
{ {
Settings settings = ImmutableSettings.settingsBuilder().put("index.analysis.filter.limit_1.type", "limit") Settings settings = ImmutableSettings.settingsBuilder()
.put("index.analysis.filter.limit_1.max_token_count", 17).put("index.analysis.filter.limit_1.consume_all_tokens", true) .put("index.analysis.filter.limit_1.type", "limit")
.put("index.analysis.filter.limit_1.max_token_count", 17)
.put("index.analysis.filter.limit_1.consume_all_tokens", true)
.put("path.home", createTempDir().toString())
.build(); .build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("limit_1"); TokenFilterFactory tokenFilter = analysisService.tokenFilter("limit_1");

View File

@ -40,7 +40,7 @@ public class ShingleTokenFilterFactoryTests extends ElasticsearchTokenStreamTest
@Test @Test
public void testDefault() throws IOException { public void testDefault() throws IOException {
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromClassPath(RESOURCE); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromClassPath(createTempDir(), RESOURCE);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("shingle"); TokenFilterFactory tokenFilter = analysisService.tokenFilter("shingle");
String source = "the quick brown fox"; String source = "the quick brown fox";
String[] expected = new String[]{"the", "the quick", "quick", "quick brown", "brown", "brown fox", "fox"}; String[] expected = new String[]{"the", "the quick", "quick", "quick brown", "brown", "brown fox", "fox"};
@ -51,7 +51,7 @@ public class ShingleTokenFilterFactoryTests extends ElasticsearchTokenStreamTest
@Test @Test
public void testInverseMapping() throws IOException { public void testInverseMapping() throws IOException {
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromClassPath(RESOURCE); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromClassPath(createTempDir(), RESOURCE);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("shingle_inverse"); TokenFilterFactory tokenFilter = analysisService.tokenFilter("shingle_inverse");
assertThat(tokenFilter, instanceOf(ShingleTokenFilterFactory.class)); assertThat(tokenFilter, instanceOf(ShingleTokenFilterFactory.class));
String source = "the quick brown fox"; String source = "the quick brown fox";
@ -63,7 +63,7 @@ public class ShingleTokenFilterFactoryTests extends ElasticsearchTokenStreamTest
@Test @Test
public void testInverseMappingNoShingles() throws IOException { public void testInverseMappingNoShingles() throws IOException {
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromClassPath(RESOURCE); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromClassPath(createTempDir(), RESOURCE);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("shingle_inverse"); TokenFilterFactory tokenFilter = analysisService.tokenFilter("shingle_inverse");
assertThat(tokenFilter, instanceOf(ShingleTokenFilterFactory.class)); assertThat(tokenFilter, instanceOf(ShingleTokenFilterFactory.class));
String source = "the quick"; String source = "the quick";
@ -75,7 +75,7 @@ public class ShingleTokenFilterFactoryTests extends ElasticsearchTokenStreamTest
@Test @Test
public void testFillerToken() throws IOException { public void testFillerToken() throws IOException {
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromClassPath(RESOURCE); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromClassPath(createTempDir(), RESOURCE);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("shingle_filler"); TokenFilterFactory tokenFilter = analysisService.tokenFilter("shingle_filler");
String source = "simon the sorcerer"; String source = "simon the sorcerer";
String[] expected = new String[]{"simon FILLER", "simon FILLER sorcerer", "FILLER sorcerer"}; String[] expected = new String[]{"simon FILLER", "simon FILLER sorcerer", "FILLER sorcerer"};

View File

@ -54,6 +54,7 @@ public class StemmerTokenFilterFactoryTests extends ElasticsearchTokenStreamTest
.put("index.analysis.analyzer.my_english.tokenizer","whitespace") .put("index.analysis.analyzer.my_english.tokenizer","whitespace")
.put("index.analysis.analyzer.my_english.filter","my_english") .put("index.analysis.analyzer.my_english.filter","my_english")
.put(SETTING_VERSION_CREATED,v) .put(SETTING_VERSION_CREATED,v)
.put("path.home", createTempDir().toString())
.build(); .build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
@ -87,6 +88,7 @@ public class StemmerTokenFilterFactoryTests extends ElasticsearchTokenStreamTest
.put("index.analysis.analyzer.my_porter2.tokenizer","whitespace") .put("index.analysis.analyzer.my_porter2.tokenizer","whitespace")
.put("index.analysis.analyzer.my_porter2.filter","my_porter2") .put("index.analysis.analyzer.my_porter2.filter","my_porter2")
.put(SETTING_VERSION_CREATED,v) .put(SETTING_VERSION_CREATED,v)
.put("path.home", createTempDir().toString())
.build(); .build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);

View File

@ -49,6 +49,7 @@ public class StopTokenFilterTests extends ElasticsearchTokenStreamTestCase {
if (random().nextBoolean()) { if (random().nextBoolean()) {
builder.put("index.analysis.filter.my_stop.version", "5.0"); builder.put("index.analysis.filter.my_stop.version", "5.0");
} }
builder.put("path.home", createTempDir().toString());
Settings settings = builder.build(); Settings settings = builder.build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
analysisService.tokenFilter("my_stop"); analysisService.tokenFilter("my_stop");
@ -68,6 +69,7 @@ public class StopTokenFilterTests extends ElasticsearchTokenStreamTestCase {
} else { } else {
// don't specify // don't specify
} }
builder.put("path.home", createTempDir().toString());
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(builder.build()); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(builder.build());
TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_stop"); TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_stop");
assertThat(tokenFilter, instanceOf(StopTokenFilterFactory.class)); assertThat(tokenFilter, instanceOf(StopTokenFilterFactory.class));
@ -83,8 +85,11 @@ public class StopTokenFilterTests extends ElasticsearchTokenStreamTestCase {
@Test @Test
public void testDeprecatedPositionIncrementSettingWithVersions() throws IOException { public void testDeprecatedPositionIncrementSettingWithVersions() throws IOException {
Settings settings = ImmutableSettings.settingsBuilder().put("index.analysis.filter.my_stop.type", "stop") Settings settings = ImmutableSettings.settingsBuilder()
.put("index.analysis.filter.my_stop.enable_position_increments", false).put("index.analysis.filter.my_stop.version", "4.3") .put("index.analysis.filter.my_stop.type", "stop")
.put("index.analysis.filter.my_stop.enable_position_increments", false)
.put("index.analysis.filter.my_stop.version", "4.3")
.put("path.home", createTempDir().toString())
.build(); .build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_stop"); TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_stop");
@ -100,6 +105,7 @@ public class StopTokenFilterTests extends ElasticsearchTokenStreamTestCase {
Settings settings = ImmutableSettings.settingsBuilder() Settings settings = ImmutableSettings.settingsBuilder()
.put("index.analysis.filter.my_stop.type", "stop") .put("index.analysis.filter.my_stop.type", "stop")
.put("index.analysis.filter.my_stop.remove_trailing", false) .put("index.analysis.filter.my_stop.remove_trailing", false)
.put("path.home", createTempDir().toString())
.build(); .build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_stop"); TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_stop");

View File

@ -34,6 +34,7 @@ public class WordDelimiterTokenFilterFactoryTests extends ElasticsearchTokenStre
@Test @Test
public void testDefault() throws IOException { public void testDefault() throws IOException {
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder()
.put("path.home", createTempDir().toString())
.put("index.analysis.filter.my_word_delimiter.type", "word_delimiter") .put("index.analysis.filter.my_word_delimiter.type", "word_delimiter")
.build()); .build());
TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_word_delimiter"); TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_word_delimiter");
@ -47,6 +48,7 @@ public class WordDelimiterTokenFilterFactoryTests extends ElasticsearchTokenStre
@Test @Test
public void testCatenateWords() throws IOException { public void testCatenateWords() throws IOException {
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder()
.put("path.home", createTempDir().toString())
.put("index.analysis.filter.my_word_delimiter.type", "word_delimiter") .put("index.analysis.filter.my_word_delimiter.type", "word_delimiter")
.put("index.analysis.filter.my_word_delimiter.catenate_words", "true") .put("index.analysis.filter.my_word_delimiter.catenate_words", "true")
.put("index.analysis.filter.my_word_delimiter.generate_word_parts", "false") .put("index.analysis.filter.my_word_delimiter.generate_word_parts", "false")
@ -62,6 +64,7 @@ public class WordDelimiterTokenFilterFactoryTests extends ElasticsearchTokenStre
@Test @Test
public void testCatenateNumbers() throws IOException { public void testCatenateNumbers() throws IOException {
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder()
.put("path.home", createTempDir().toString())
.put("index.analysis.filter.my_word_delimiter.type", "word_delimiter") .put("index.analysis.filter.my_word_delimiter.type", "word_delimiter")
.put("index.analysis.filter.my_word_delimiter.generate_number_parts", "false") .put("index.analysis.filter.my_word_delimiter.generate_number_parts", "false")
.put("index.analysis.filter.my_word_delimiter.catenate_numbers", "true") .put("index.analysis.filter.my_word_delimiter.catenate_numbers", "true")
@ -77,6 +80,7 @@ public class WordDelimiterTokenFilterFactoryTests extends ElasticsearchTokenStre
@Test @Test
public void testCatenateAll() throws IOException { public void testCatenateAll() throws IOException {
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder()
.put("path.home", createTempDir().toString())
.put("index.analysis.filter.my_word_delimiter.type", "word_delimiter") .put("index.analysis.filter.my_word_delimiter.type", "word_delimiter")
.put("index.analysis.filter.my_word_delimiter.generate_word_parts", "false") .put("index.analysis.filter.my_word_delimiter.generate_word_parts", "false")
.put("index.analysis.filter.my_word_delimiter.generate_number_parts", "false") .put("index.analysis.filter.my_word_delimiter.generate_number_parts", "false")
@ -93,6 +97,7 @@ public class WordDelimiterTokenFilterFactoryTests extends ElasticsearchTokenStre
@Test @Test
public void testSplitOnCaseChange() throws IOException { public void testSplitOnCaseChange() throws IOException {
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder()
.put("path.home", createTempDir().toString())
.put("index.analysis.filter.my_word_delimiter.type", "word_delimiter") .put("index.analysis.filter.my_word_delimiter.type", "word_delimiter")
.put("index.analysis.filter.my_word_delimiter.split_on_case_change", "false") .put("index.analysis.filter.my_word_delimiter.split_on_case_change", "false")
.build()); .build());
@ -107,6 +112,7 @@ public class WordDelimiterTokenFilterFactoryTests extends ElasticsearchTokenStre
@Test @Test
public void testPreserveOriginal() throws IOException { public void testPreserveOriginal() throws IOException {
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder()
.put("path.home", createTempDir().toString())
.put("index.analysis.filter.my_word_delimiter.type", "word_delimiter") .put("index.analysis.filter.my_word_delimiter.type", "word_delimiter")
.put("index.analysis.filter.my_word_delimiter.preserve_original", "true") .put("index.analysis.filter.my_word_delimiter.preserve_original", "true")
.build()); .build());
@ -121,6 +127,7 @@ public class WordDelimiterTokenFilterFactoryTests extends ElasticsearchTokenStre
@Test @Test
public void testStemEnglishPossessive() throws IOException { public void testStemEnglishPossessive() throws IOException {
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder()
.put("path.home", createTempDir().toString())
.put("index.analysis.filter.my_word_delimiter.type", "word_delimiter") .put("index.analysis.filter.my_word_delimiter.type", "word_delimiter")
.put("index.analysis.filter.my_word_delimiter.stem_english_possessive", "false") .put("index.analysis.filter.my_word_delimiter.stem_english_possessive", "false")
.build()); .build());
@ -136,6 +143,7 @@ public class WordDelimiterTokenFilterFactoryTests extends ElasticsearchTokenStre
@Test @Test
public void testPartsAndCatenate() throws IOException { public void testPartsAndCatenate() throws IOException {
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder()
.put("path.home", createTempDir().toString())
.put("index.analysis.filter.my_word_delimiter.type", "word_delimiter") .put("index.analysis.filter.my_word_delimiter.type", "word_delimiter")
.put("index.analysis.filter.my_word_delimiter.catenate_words", "true") .put("index.analysis.filter.my_word_delimiter.catenate_words", "true")
.put("index.analysis.filter.my_word_delimiter.generate_word_parts", "true") .put("index.analysis.filter.my_word_delimiter.generate_word_parts", "true")
@ -153,6 +161,7 @@ public class WordDelimiterTokenFilterFactoryTests extends ElasticsearchTokenStre
@Test @Test
public void testDeprecatedPartsAndCatenate() throws IOException { public void testDeprecatedPartsAndCatenate() throws IOException {
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder()
.put("path.home", createTempDir().toString())
.put("index.analysis.filter.my_word_delimiter.type", "word_delimiter") .put("index.analysis.filter.my_word_delimiter.type", "word_delimiter")
.put("index.analysis.filter.my_word_delimiter.catenate_words", "true") .put("index.analysis.filter.my_word_delimiter.catenate_words", "true")
.put("index.analysis.filter.my_word_delimiter.generate_word_parts", "true") .put("index.analysis.filter.my_word_delimiter.generate_word_parts", "true")

View File

@ -39,7 +39,10 @@ public class CommonGramsTokenFilterFactoryTests extends ElasticsearchTokenStream
@Test @Test
public void testDefault() throws IOException { public void testDefault() throws IOException {
Settings settings = ImmutableSettings.settingsBuilder().put("index.analysis.filter.common_grams_default.type", "common_grams").build(); Settings settings = ImmutableSettings.settingsBuilder()
.put("index.analysis.filter.common_grams_default.type", "common_grams")
.put("path.home", createTempDir().toString())
.build();
try { try {
AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
@ -53,6 +56,7 @@ public class CommonGramsTokenFilterFactoryTests extends ElasticsearchTokenStream
{ {
Settings settings = ImmutableSettings.settingsBuilder().put("index.analysis.filter.common_grams_default.type", "common_grams") Settings settings = ImmutableSettings.settingsBuilder().put("index.analysis.filter.common_grams_default.type", "common_grams")
.putArray("index.analysis.filter.common_grams_default.common_words", "chromosome", "protein") .putArray("index.analysis.filter.common_grams_default.common_words", "chromosome", "protein")
.put("path.home", createTempDir().toString())
.build(); .build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
@ -69,6 +73,7 @@ public class CommonGramsTokenFilterFactoryTests extends ElasticsearchTokenStream
{ {
Settings settings = ImmutableSettings.settingsBuilder().put("index.analysis.filter.common_grams_default.type", "common_grams") Settings settings = ImmutableSettings.settingsBuilder().put("index.analysis.filter.common_grams_default.type", "common_grams")
.put("index.analysis.filter.common_grams_default.query_mode", false) .put("index.analysis.filter.common_grams_default.query_mode", false)
.put("path.home", createTempDir().toString())
.putArray("index.analysis.filter.common_grams_default.common_words", "chromosome", "protein") .putArray("index.analysis.filter.common_grams_default.common_words", "chromosome", "protein")
.build(); .build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
@ -88,6 +93,7 @@ public class CommonGramsTokenFilterFactoryTests extends ElasticsearchTokenStream
{ {
Settings settings = ImmutableSettings.settingsBuilder().put("index.analysis.filter.common_grams_1.type", "common_grams") Settings settings = ImmutableSettings.settingsBuilder().put("index.analysis.filter.common_grams_1.type", "common_grams")
.put("index.analysis.filter.common_grams_1.ignore_case", true) .put("index.analysis.filter.common_grams_1.ignore_case", true)
.put("path.home", createTempDir().toString())
.putArray("index.analysis.filter.common_grams_1.common_words", "the", "Or", "Not", "a", "is", "an", "they", "are") .putArray("index.analysis.filter.common_grams_1.common_words", "the", "Or", "Not", "a", "is", "an", "they", "are")
.build(); .build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
@ -101,6 +107,7 @@ public class CommonGramsTokenFilterFactoryTests extends ElasticsearchTokenStream
{ {
Settings settings = ImmutableSettings.settingsBuilder().put("index.analysis.filter.common_grams_2.type", "common_grams") Settings settings = ImmutableSettings.settingsBuilder().put("index.analysis.filter.common_grams_2.type", "common_grams")
.put("index.analysis.filter.common_grams_2.ignore_case", false) .put("index.analysis.filter.common_grams_2.ignore_case", false)
.put("path.home", createTempDir().toString())
.putArray("index.analysis.filter.common_grams_2.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are") .putArray("index.analysis.filter.common_grams_2.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are")
.build(); .build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
@ -114,6 +121,7 @@ public class CommonGramsTokenFilterFactoryTests extends ElasticsearchTokenStream
{ {
Settings settings = ImmutableSettings.settingsBuilder().put("index.analysis.filter.common_grams_3.type", "common_grams") Settings settings = ImmutableSettings.settingsBuilder().put("index.analysis.filter.common_grams_3.type", "common_grams")
.putArray("index.analysis.filter.common_grams_3.common_words", "the", "or", "not", "a", "is", "an", "they", "are") .putArray("index.analysis.filter.common_grams_3.common_words", "the", "or", "not", "a", "is", "an", "they", "are")
.put("path.home", createTempDir().toString())
.build(); .build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_3"); TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_3");
@ -127,7 +135,10 @@ public class CommonGramsTokenFilterFactoryTests extends ElasticsearchTokenStream
@Test @Test
public void testCommonGramsAnalysis() throws IOException { public void testCommonGramsAnalysis() throws IOException {
Settings settings = ImmutableSettings.settingsBuilder().loadFromClasspath("org/elasticsearch/index/analysis/commongrams/commongrams.json").build(); Settings settings = ImmutableSettings.settingsBuilder()
.loadFromClasspath("org/elasticsearch/index/analysis/commongrams/commongrams.json")
.put("path.home", createTempDir().toString())
.build();
{ {
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
Analyzer analyzer = analysisService.analyzer("commongramsAnalyzer").analyzer(); Analyzer analyzer = analysisService.analyzer("commongramsAnalyzer").analyzer();
@ -151,6 +162,7 @@ public class CommonGramsTokenFilterFactoryTests extends ElasticsearchTokenStream
.put("index.analysis.filter.common_grams_1.query_mode", true) .put("index.analysis.filter.common_grams_1.query_mode", true)
.putArray("index.analysis.filter.common_grams_1.common_words", "the", "Or", "Not", "a", "is", "an", "they", "are") .putArray("index.analysis.filter.common_grams_1.common_words", "the", "Or", "Not", "a", "is", "an", "they", "are")
.put("index.analysis.filter.common_grams_1.ignore_case", true) .put("index.analysis.filter.common_grams_1.ignore_case", true)
.put("path.home", createTempDir().toString())
.build(); .build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_1"); TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_1");
@ -165,6 +177,7 @@ public class CommonGramsTokenFilterFactoryTests extends ElasticsearchTokenStream
.put("index.analysis.filter.common_grams_2.query_mode", true) .put("index.analysis.filter.common_grams_2.query_mode", true)
.putArray("index.analysis.filter.common_grams_2.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are") .putArray("index.analysis.filter.common_grams_2.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are")
.put("index.analysis.filter.common_grams_2.ignore_case", false) .put("index.analysis.filter.common_grams_2.ignore_case", false)
.put("path.home", createTempDir().toString())
.build(); .build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_2"); TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_2");
@ -178,6 +191,7 @@ public class CommonGramsTokenFilterFactoryTests extends ElasticsearchTokenStream
Settings settings = ImmutableSettings.settingsBuilder().put("index.analysis.filter.common_grams_3.type", "common_grams") Settings settings = ImmutableSettings.settingsBuilder().put("index.analysis.filter.common_grams_3.type", "common_grams")
.put("index.analysis.filter.common_grams_3.query_mode", true) .put("index.analysis.filter.common_grams_3.query_mode", true)
.putArray("index.analysis.filter.common_grams_3.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are") .putArray("index.analysis.filter.common_grams_3.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are")
.put("path.home", createTempDir().toString())
.build(); .build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_3"); TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_3");
@ -191,6 +205,7 @@ public class CommonGramsTokenFilterFactoryTests extends ElasticsearchTokenStream
Settings settings = ImmutableSettings.settingsBuilder().put("index.analysis.filter.common_grams_4.type", "common_grams") Settings settings = ImmutableSettings.settingsBuilder().put("index.analysis.filter.common_grams_4.type", "common_grams")
.put("index.analysis.filter.common_grams_4.query_mode", true) .put("index.analysis.filter.common_grams_4.query_mode", true)
.putArray("index.analysis.filter.common_grams_4.common_words", "the", "or", "not", "a", "is", "an", "they", "are") .putArray("index.analysis.filter.common_grams_4.common_words", "the", "or", "not", "a", "is", "an", "they", "are")
.put("path.home", createTempDir().toString())
.build(); .build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_4"); TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_4");
@ -204,7 +219,10 @@ public class CommonGramsTokenFilterFactoryTests extends ElasticsearchTokenStream
@Test @Test
public void testQueryModeCommonGramsAnalysis() throws IOException { public void testQueryModeCommonGramsAnalysis() throws IOException {
Settings settings = ImmutableSettings.settingsBuilder().loadFromClasspath("org/elasticsearch/index/analysis/commongrams/commongrams_query_mode.json").build(); Settings settings = ImmutableSettings.settingsBuilder()
.loadFromClasspath("org/elasticsearch/index/analysis/commongrams/commongrams_query_mode.json")
.put("path.home", createTempDir().toString())
.build();
{ {
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
Analyzer analyzer = analysisService.analyzer("commongramsAnalyzer").analyzer(); Analyzer analyzer = analysisService.analyzer("commongramsAnalyzer").analyzer();

View File

@ -59,9 +59,9 @@ public class SynonymsAnalysisTest extends ElasticsearchTestCase {
@Test @Test
public void testSynonymsAnalysis() throws IOException { public void testSynonymsAnalysis() throws IOException {
Settings settings = settingsBuilder(). Settings settings = settingsBuilder().
loadFromClasspath("org/elasticsearch/index/analysis/synonyms/synonyms.json") loadFromClasspath("org/elasticsearch/index/analysis/synonyms/synonyms.json")
.put("path.home", createTempDir().toString())
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
Index index = new Index("test"); Index index = new Index("test");