Added tests for synonym parsers.

This commit is contained in:
Lukas Vlcek 2011-10-19 17:57:35 +02:00 committed by Shay Banon
parent f7747d7ff9
commit 54273dd1ee
6 changed files with 179 additions and 12 deletions

View File

@ -243,13 +243,13 @@ public class Analysis {
} }
/** /**
* @return null If no settings set for "settingsPrefix + _path" then return null. * @return null If no settings set for "settingsPrefix" then return <code>null</code>.
* *
* @throws ElasticSearchIllegalArgumentException * @throws ElasticSearchIllegalArgumentException
* If the Reader can not be instantiated. * If the Reader can not be instantiated.
*/ */
public static Reader getFileReader(Environment env, Settings settings, String settingPrefix) { public static Reader getReaderFromFile(Environment env, Settings settings, String settingPrefix) {
String filePath = settings.get(settingPrefix + "_path", null); String filePath = settings.get(settingPrefix, null);
if (filePath == null) { if (filePath == null) {
return null; return null;

View File

@ -29,7 +29,6 @@ import org.apache.lucene.analysis.synonym.SolrSynonymParser;
import org.apache.lucene.analysis.synonym.SynonymFilter; import org.apache.lucene.analysis.synonym.SynonymFilter;
import org.apache.lucene.analysis.synonym.SynonymMap; import org.apache.lucene.analysis.synonym.SynonymMap;
import org.apache.lucene.analysis.synonym.WordnetSynonymParser; import org.apache.lucene.analysis.synonym.WordnetSynonymParser;
import org.apache.lucene.util.CharsRef;
import org.elasticsearch.ElasticSearchIllegalArgumentException; import org.elasticsearch.ElasticSearchIllegalArgumentException;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.inject.assistedinject.Assisted; import org.elasticsearch.common.inject.assistedinject.Assisted;
@ -40,12 +39,8 @@ import org.elasticsearch.index.Index;
import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.indices.analysis.IndicesAnalysisService; import org.elasticsearch.indices.analysis.IndicesAnalysisService;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.LineNumberReader;
import java.io.Reader; import java.io.Reader;
import java.text.ParseException; import java.io.StringReader;
import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -59,10 +54,20 @@ public class SynonymTokenFilterFactory extends AbstractTokenFilterFactory {
@Assisted String name, @Assisted Settings settings) { @Assisted String name, @Assisted Settings settings) {
super(index, indexSettings, name, settings); super(index, indexSettings, name, settings);
Reader rulesReader = Analysis.getFileReader(env, settings, "synonyms"); Reader rulesReader = null;
if (rulesReader == null) { if (settings.getAsArray("synonyms", null) != null) {
List<String> rules = Analysis.getWordList(env, settings, "synonyms");
StringBuilder sb = new StringBuilder();
for (String line : rules) {
sb.append(line).append(System.getProperty("line.separator"));
}
rulesReader = new StringReader(sb.toString());
} else if (settings.get("synonyms_path") != null) {
rulesReader = Analysis.getReaderFromFile(env, settings, "synonyms_path");
} else {
throw new ElasticSearchIllegalArgumentException("synonym requires either `synonyms` or `synonyms_path` to be configured"); throw new ElasticSearchIllegalArgumentException("synonym requires either `synonyms` or `synonyms_path` to be configured");
} }
this.ignoreCase = settings.getAsBoolean("ignore_case", false); this.ignoreCase = settings.getAsBoolean("ignore_case", false);
boolean expand = settings.getAsBoolean("expand", true); boolean expand = settings.getAsBoolean("expand", true);
@ -89,13 +94,14 @@ public class SynonymTokenFilterFactory extends AbstractTokenFilterFactory {
try { try {
SynonymMap.Builder parser = null; SynonymMap.Builder parser = null;
if (settings.get("format","wordnet").equalsIgnoreCase("wordnet")) { if ("wordnet".equalsIgnoreCase(settings.get("format"))) {
parser = new WordnetSynonymParser(true, expand, analyzer); parser = new WordnetSynonymParser(true, expand, analyzer);
((WordnetSynonymParser)parser).add(rulesReader); ((WordnetSynonymParser)parser).add(rulesReader);
} else { } else {
parser = new SolrSynonymParser(true, expand, analyzer); parser = new SolrSynonymParser(true, expand, analyzer);
((SolrSynonymParser)parser).add(rulesReader); ((SolrSynonymParser)parser).add(rulesReader);
} }
synonymMap = parser.build(); synonymMap = parser.build();
} catch (Exception e) { } catch (Exception e) {
throw new ElasticSearchIllegalArgumentException("failed to build synonyms", e); throw new ElasticSearchIllegalArgumentException("failed to build synonyms", e);

View File

@ -0,0 +1,103 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.analysis.synonyms;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
import org.elasticsearch.common.inject.Injector;
import org.elasticsearch.common.inject.ModulesBuilder;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.lucene.all.AllEntries;
import org.elasticsearch.common.lucene.all.AllTokenStream;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsModule;
import org.elasticsearch.env.Environment;
import org.elasticsearch.env.EnvironmentModule;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexNameModule;
import org.elasticsearch.index.analysis.AnalysisModule;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.settings.IndexSettingsModule;
import org.elasticsearch.indices.analysis.IndicesAnalysisModule;
import org.elasticsearch.indices.analysis.IndicesAnalysisService;
import org.hamcrest.MatcherAssert;
import org.testng.annotations.Test;
import java.io.IOException;
import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder;
import static org.hamcrest.Matchers.*;
/**
* @author Lukas Vlcek
*/
public class SynonymsAnalysisTest {
protected final ESLogger logger = Loggers.getLogger(getClass());
private AnalysisService analysisService;
@Test public void testSynonymsAnalysis() throws IOException {
Settings settings = settingsBuilder().loadFromClasspath("org/elasticsearch/index/analysis/synonyms/synonyms.json").build();
Index index = new Index("test");
Injector parentInjector = new ModulesBuilder().add(
new SettingsModule(settings),
new EnvironmentModule(new Environment(settings)),
new IndicesAnalysisModule())
.createInjector();
Injector injector = new ModulesBuilder().add(
new IndexSettingsModule(index, settings),
new IndexNameModule(index),
new AnalysisModule(settings, parentInjector.getInstance(IndicesAnalysisService.class)))
.createChildInjector(parentInjector);
analysisService = injector.getInstance(AnalysisService.class);
match("synonymAnalyzer", "kimchy is the dude abides", "shay is the elasticsearch man!");
match("synonymAnalyzer_file", "kimchy is the dude abides", "shay is the elasticsearch man!");
match("synonymAnalyzerWordnet", "abstain", "abstain refrain desist");
match("synonymAnalyzerWordnet_file", "abstain", "abstain refrain desist");
}
private void match(String analyzerName, String source, String target) throws IOException {
Analyzer analyzer = analysisService.analyzer(analyzerName).analyzer();
AllEntries allEntries = new AllEntries();
allEntries.addText("field", source, 1.0f);
allEntries.reset();
TokenStream stream = AllTokenStream.allTokenStream("_all", allEntries, analyzer);
TermAttribute termAtt = stream.addAttribute(TermAttribute.class);
StringBuilder sb = new StringBuilder();
while (stream.incrementToken()) {
sb.append(termAtt.term()).append(" ");
}
MatcherAssert.assertThat(target, equalTo(sb.toString().trim()));
}
}

View File

@ -0,0 +1,52 @@
{
"index" : {
"analysis" : {
"analyzer" : {
"synonymAnalyzer" : {
"tokenizer" : "standard",
"filter" : [ "synonym" ]
},
"synonymAnalyzer_file" : {
"tokenizer" : "standard",
"filter" : [ "synonym_file" ]
},
"synonymAnalyzerWordnet" : {
"tokenizer" : "standard",
"filter" : [ "synonymWordnet" ]
},
"synonymAnalyzerWordnet_file" : {
"tokenizer" : "standard",
"filter" : [ "synonymWordnet_file" ]
}
},
"filter" : {
"synonym" : {
"type" : "synonym",
"synonyms" : [
"kimchy => shay",
"dude => elasticsearch",
"abides => man!"
]
},
"synonym_file" : {
"type" : "synonym",
"synonyms_path" : "org/elasticsearch/index/analysis/synonyms/synonyms.txt"
},
"synonymWordnet" : {
"type" : "synonym",
"format" : "wordnet",
"synonyms" : [
"s(100000001,1,'abstain',v,1,0).",
"s(100000001,2,'refrain',v,1,0).",
"s(100000001,3,'desist',v,1,0)."
]
},
"synonymWordnet_file" : {
"type" : "synonym",
"format" : "wordnet",
"synonyms_path" : "org/elasticsearch/index/analysis/synonyms/synonyms_wordnet.txt"
}
}
}
}
}

View File

@ -0,0 +1,3 @@
kimchy => shay
dude => elasticsearch
abides => man!

View File

@ -0,0 +1,3 @@
s(100000001,1,'abstain',v,1,0).
s(100000001,2,'refrain',v,1,0).
s(100000001,3,'desist',v,1,0).