Mapper: An analyzer mapper allowing to control the index analyzer of a document based on a document field, closes #485.

This commit is contained in:
kimchy 2010-11-07 18:18:24 +02:00
parent b45ade467e
commit 171fa4a7e8
16 changed files with 332 additions and 63 deletions

View File

@ -114,6 +114,7 @@
<w>rebalancing</w> <w>rebalancing</w>
<w>regex</w> <w>regex</w>
<w>reparse</w> <w>reparse</w>
<w>reparsed</w>
<w>retrans</w> <w>retrans</w>
<w>retval</w> <w>retval</w>
<w>routings</w> <w>routings</w>

View File

@ -164,11 +164,11 @@ public class SimpleEngineBenchmark {
String sId = Integer.toString(id); String sId = Integer.toString(id);
Document doc = doc().add(field("_id", sId)) Document doc = doc().add(field("_id", sId))
.add(field("content", contentItem)).build(); .add(field("content", contentItem)).build();
ParsedDocument pDoc = new ParsedDocument(sId, sId, "type", doc, TRANSLOG_PAYLOAD, false); ParsedDocument pDoc = new ParsedDocument(sId, sId, "type", doc, Lucene.STANDARD_ANALYZER, TRANSLOG_PAYLOAD, false);
if (create) { if (create) {
engine.create(new Engine.Create(pDoc, Lucene.STANDARD_ANALYZER)); engine.create(new Engine.Create(pDoc));
} else { } else {
engine.index(new Engine.Index(new Term("_id", sId), pDoc, Lucene.STANDARD_ANALYZER)); engine.index(new Engine.Index(new Term("_id", sId), pDoc));
} }
} }
engine.refresh(new Engine.Refresh(true)); engine.refresh(new Engine.Refresh(true));
@ -278,11 +278,11 @@ public class SimpleEngineBenchmark {
String sId = Integer.toString(id); String sId = Integer.toString(id);
Document doc = doc().add(field("_id", sId)) Document doc = doc().add(field("_id", sId))
.add(field("content", content(id))).build(); .add(field("content", content(id))).build();
ParsedDocument pDoc = new ParsedDocument(sId, sId, "type", doc, TRANSLOG_PAYLOAD, false); ParsedDocument pDoc = new ParsedDocument(sId, sId, "type", doc, Lucene.STANDARD_ANALYZER, TRANSLOG_PAYLOAD, false);
if (create) { if (create) {
engine.create(new Engine.Create(pDoc, Lucene.STANDARD_ANALYZER)); engine.create(new Engine.Create(pDoc));
} else { } else {
engine.index(new Engine.Index(new Term("_id", sId), pDoc, Lucene.STANDARD_ANALYZER)); engine.index(new Engine.Index(new Term("_id", sId), pDoc));
} }
} }
} catch (Exception e) { } catch (Exception e) {

View File

@ -42,6 +42,14 @@ public class FieldNameAnalyzer extends Analyzer {
this.defaultAnalyzer = defaultAnalyzer; this.defaultAnalyzer = defaultAnalyzer;
} }
public ImmutableMap<String, Analyzer> analyzers() {
return analyzers;
}
public Analyzer defaultAnalyzer() {
return defaultAnalyzer;
}
@Override public TokenStream tokenStream(String fieldName, Reader reader) { @Override public TokenStream tokenStream(String fieldName, Reader reader) {
return getAnalyzer(fieldName).tokenStream(fieldName, reader); return getAnalyzer(fieldName).tokenStream(fieldName, reader);
} }

View File

@ -264,12 +264,10 @@ public interface Engine extends IndexShardComponent, CloseableComponent {
static class Create implements Operation { static class Create implements Operation {
private final ParsedDocument doc; private final ParsedDocument doc;
private final Analyzer analyzer;
private boolean refresh; private boolean refresh;
public Create(ParsedDocument doc, Analyzer analyzer) { public Create(ParsedDocument doc) {
this.doc = doc; this.doc = doc;
this.analyzer = analyzer;
} }
@Override public Type opType() { @Override public Type opType() {
@ -293,7 +291,7 @@ public interface Engine extends IndexShardComponent, CloseableComponent {
} }
public Analyzer analyzer() { public Analyzer analyzer() {
return this.analyzer; return this.doc.analyzer();
} }
public byte[] source() { public byte[] source() {
@ -312,13 +310,11 @@ public interface Engine extends IndexShardComponent, CloseableComponent {
static class Index implements Operation { static class Index implements Operation {
private final Term uid; private final Term uid;
private final ParsedDocument doc; private final ParsedDocument doc;
private final Analyzer analyzer;
private boolean refresh; private boolean refresh;
public Index(Term uid, ParsedDocument doc, Analyzer analyzer) { public Index(Term uid, ParsedDocument doc) {
this.uid = uid; this.uid = uid;
this.doc = doc; this.doc = doc;
this.analyzer = analyzer;
} }
@Override public Type opType() { @Override public Type opType() {
@ -338,7 +334,7 @@ public interface Engine extends IndexShardComponent, CloseableComponent {
} }
public Analyzer analyzer() { public Analyzer analyzer() {
return this.analyzer; return this.doc.analyzer();
} }
public String id() { public String id() {

View File

@ -151,6 +151,14 @@ public class DocumentFieldMappers implements Iterable<FieldMapper> {
return this.indexAnalyzer; return this.indexAnalyzer;
} }
/**
* A smart analyzer used for indexing that takes into account specific analyzers configured
* per {@link FieldMapper} with a custom default analyzer for no explicit field analyzer.
*/
public Analyzer indexAnalyzer(Analyzer defaultAnalyzer) {
return new FieldNameAnalyzer(indexAnalyzer.analyzers(), defaultAnalyzer);
}
/** /**
* A smart analyzer used for searching that takes into account specific analyzers configured * A smart analyzer used for searching that takes into account specific analyzers configured
* per {@link FieldMapper}. * per {@link FieldMapper}.

View File

@ -19,6 +19,7 @@
package org.elasticsearch.index.mapper; package org.elasticsearch.index.mapper;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
/** /**
@ -34,16 +35,19 @@ public class ParsedDocument {
private final Document document; private final Document document;
private final Analyzer analyzer;
private final byte[] source; private final byte[] source;
private boolean mappersAdded; private boolean mappersAdded;
public ParsedDocument(String uid, String id, String type, Document document, byte[] source, boolean mappersAdded) { public ParsedDocument(String uid, String id, String type, Document document, Analyzer analyzer, byte[] source, boolean mappersAdded) {
this.uid = uid; this.uid = uid;
this.id = id; this.id = id;
this.type = type; this.type = type;
this.document = document; this.document = document;
this.source = source; this.source = source;
this.analyzer = analyzer;
this.mappersAdded = mappersAdded; this.mappersAdded = mappersAdded;
} }
@ -63,6 +67,10 @@ public class ParsedDocument {
return this.document; return this.document;
} }
public Analyzer analyzer() {
return this.analyzer;
}
public byte[] source() { public byte[] source() {
return this.source; return this.source;
} }

View File

@ -29,7 +29,6 @@ import org.elasticsearch.common.lucene.all.AllField;
import org.elasticsearch.common.lucene.all.AllTermQuery; import org.elasticsearch.common.lucene.all.AllTermQuery;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeMappingException;
import java.io.IOException; import java.io.IOException;
@ -118,14 +117,14 @@ public class AllFieldMapper extends AbstractFieldMapper<Void> implements org.ela
// reset the entries // reset the entries
context.allEntries().reset(); context.allEntries().reset();
Analyzer analyzer = findAnalyzer(context.docMapper()); Analyzer analyzer = findAnalyzer(context);
return new AllField(names.indexName(), store, termVector, context.allEntries(), analyzer); return new AllField(names.indexName(), store, termVector, context.allEntries(), analyzer);
} }
private Analyzer findAnalyzer(DocumentMapper docMapper) { private Analyzer findAnalyzer(ParseContext context) {
Analyzer analyzer = indexAnalyzer; Analyzer analyzer = indexAnalyzer;
if (analyzer == null) { if (analyzer == null) {
analyzer = docMapper.indexAnalyzer(); analyzer = context.analyzer();
if (analyzer == null) { if (analyzer == null) {
analyzer = Lucene.STANDARD_ANALYZER; analyzer = Lucene.STANDARD_ANALYZER;
} }

View File

@ -0,0 +1,112 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent;
import org.apache.lucene.analysis.Analyzer;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.FieldMapperListener;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeMappingException;
import java.io.IOException;
/**
* @author kimchy (shay.banon)
*/
public class AnalyzerMapper implements XContentMapper {
public static final String CONTENT_TYPE = "_analyzer";
public static class Builder extends XContentMapper.Builder<Builder, AnalyzerMapper> {
private String field = null;
public Builder() {
super(CONTENT_TYPE);
this.builder = this;
}
public Builder field(String field) {
this.field = field;
return this;
}
@Override public AnalyzerMapper build(BuilderContext context) {
return new AnalyzerMapper(field);
}
}
// for now, it is parsed directly in the document parser, need to move this internal types parsing to be done here as well...
// public static class TypeParser implements XContentMapper.TypeParser {
// @Override public XContentMapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
// AnalyzerMapper.Builder builder = analyzer();
// for (Map.Entry<String, Object> entry : node.entrySet()) {
// String fieldName = Strings.toUnderscoreCase(entry.getKey());
// Object fieldNode = entry.getValue();
// if ("field".equals(fieldName)) {
// builder.field(fieldNode.toString());
// }
// }
// return builder;
// }
// }
private final String field;
public AnalyzerMapper(String field) {
this.field = field;
}
@Override public String name() {
return CONTENT_TYPE;
}
@Override public void parse(ParseContext context) throws IOException {
Analyzer analyzer = context.docMapper().mappers().indexAnalyzer();
if (field != null) {
String value = context.doc().get(field);
if (value != null) {
analyzer = context.analysisService().analyzer(value);
if (analyzer == null) {
throw new MapperParsingException("No analyzer found for [" + value + "] from field [" + field + "]");
}
analyzer = context.docMapper().mappers().indexAnalyzer(analyzer);
}
}
context.analyzer(analyzer);
}
@Override public void merge(XContentMapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
}
@Override public void traverse(FieldMapperListener fieldMapperListener) {
}
@Override public void toXContent(XContentBuilder builder, Params params) throws IOException {
if (field == null) {
return;
}
builder.startObject(CONTENT_TYPE);
if (field != null) {
builder.field("field", field);
}
builder.endObject();
}
}

View File

@ -19,10 +19,12 @@
package org.elasticsearch.index.mapper.xcontent; package org.elasticsearch.index.mapper.xcontent;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.elasticsearch.common.lucene.all.AllEntries; import org.elasticsearch.common.lucene.all.AllEntries;
import org.elasticsearch.common.util.concurrent.NotThreadSafe; import org.elasticsearch.common.util.concurrent.NotThreadSafe;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapper;
/** /**
@ -41,6 +43,8 @@ public class ParseContext {
private Document document; private Document document;
private Analyzer analyzer;
private String index; private String index;
private String type; private String type;
@ -75,6 +79,7 @@ public class ParseContext {
public void reset(XContentParser parser, Document document, String type, byte[] source, DocumentMapper.ParseListener listener) { public void reset(XContentParser parser, Document document, String type, byte[] source, DocumentMapper.ParseListener listener) {
this.parser = parser; this.parser = parser;
this.document = document; this.document = document;
this.analyzer = null;
this.type = type; this.type = type;
this.source = source; this.source = source;
this.path.reset(); this.path.reset();
@ -132,6 +137,10 @@ public class ParseContext {
return this.docMapper; return this.docMapper;
} }
public AnalysisService analysisService() {
return docMapperParser.analysisService;
}
public String id() { public String id() {
return id; return id;
} }
@ -166,6 +175,14 @@ public class ParseContext {
return this.allEntries; return this.allEntries;
} }
public Analyzer analyzer() {
return this.analyzer;
}
public void analyzer(Analyzer analyzer) {
this.analyzer = analyzer;
}
public void externalValue(Object externalValue) { public void externalValue(Object externalValue) {
this.externalValueSet = true; this.externalValueSet = true;
this.externalValue = externalValue; this.externalValue = externalValue;

View File

@ -58,6 +58,8 @@ public class XContentDocumentMapper implements DocumentMapper, ToXContent {
private AllFieldMapper allFieldMapper = new AllFieldMapper(); private AllFieldMapper allFieldMapper = new AllFieldMapper();
private AnalyzerMapper analyzerMapper = new AnalyzerMapper(null);
private NamedAnalyzer indexAnalyzer; private NamedAnalyzer indexAnalyzer;
private NamedAnalyzer searchAnalyzer; private NamedAnalyzer searchAnalyzer;
@ -115,6 +117,11 @@ public class XContentDocumentMapper implements DocumentMapper, ToXContent {
return this; return this;
} }
public Builder analyzerField(AnalyzerMapper.Builder builder) {
this.analyzerMapper = builder.build(builderContext);
return this;
}
public Builder indexAnalyzer(NamedAnalyzer indexAnalyzer) { public Builder indexAnalyzer(NamedAnalyzer indexAnalyzer) {
this.indexAnalyzer = indexAnalyzer; this.indexAnalyzer = indexAnalyzer;
return this; return this;
@ -136,7 +143,7 @@ public class XContentDocumentMapper implements DocumentMapper, ToXContent {
public XContentDocumentMapper build(XContentDocumentMapperParser docMapperParser) { public XContentDocumentMapper build(XContentDocumentMapperParser docMapperParser) {
Preconditions.checkNotNull(rootObjectMapper, "Mapper builder must have the root object mapper set"); Preconditions.checkNotNull(rootObjectMapper, "Mapper builder must have the root object mapper set");
return new XContentDocumentMapper(index, docMapperParser, rootObjectMapper, attributes, uidFieldMapper, idFieldMapper, typeFieldMapper, indexFieldMapper, return new XContentDocumentMapper(index, docMapperParser, rootObjectMapper, attributes, uidFieldMapper, idFieldMapper, typeFieldMapper, indexFieldMapper,
sourceFieldMapper, allFieldMapper, indexAnalyzer, searchAnalyzer, boostFieldMapper); sourceFieldMapper, allFieldMapper, analyzerMapper, indexAnalyzer, searchAnalyzer, boostFieldMapper);
} }
} }
@ -171,6 +178,8 @@ public class XContentDocumentMapper implements DocumentMapper, ToXContent {
private final AllFieldMapper allFieldMapper; private final AllFieldMapper allFieldMapper;
private final AnalyzerMapper analyzerMapper;
private final RootObjectMapper rootObjectMapper; private final RootObjectMapper rootObjectMapper;
private final NamedAnalyzer indexAnalyzer; private final NamedAnalyzer indexAnalyzer;
@ -194,6 +203,7 @@ public class XContentDocumentMapper implements DocumentMapper, ToXContent {
IndexFieldMapper indexFieldMapper, IndexFieldMapper indexFieldMapper,
SourceFieldMapper sourceFieldMapper, SourceFieldMapper sourceFieldMapper,
AllFieldMapper allFieldMapper, AllFieldMapper allFieldMapper,
AnalyzerMapper analyzerMapper,
NamedAnalyzer indexAnalyzer, NamedAnalyzer searchAnalyzer, NamedAnalyzer indexAnalyzer, NamedAnalyzer searchAnalyzer,
@Nullable BoostFieldMapper boostFieldMapper) { @Nullable BoostFieldMapper boostFieldMapper) {
this.index = index; this.index = index;
@ -207,6 +217,7 @@ public class XContentDocumentMapper implements DocumentMapper, ToXContent {
this.indexFieldMapper = indexFieldMapper; this.indexFieldMapper = indexFieldMapper;
this.sourceFieldMapper = sourceFieldMapper; this.sourceFieldMapper = sourceFieldMapper;
this.allFieldMapper = allFieldMapper; this.allFieldMapper = allFieldMapper;
this.analyzerMapper = analyzerMapper;
this.boostFieldMapper = boostFieldMapper; this.boostFieldMapper = boostFieldMapper;
this.indexAnalyzer = indexAnalyzer; this.indexAnalyzer = indexAnalyzer;
@ -374,6 +385,7 @@ public class XContentDocumentMapper implements DocumentMapper, ToXContent {
context.parsedId(ParseContext.ParsedIdState.EXTERNAL); context.parsedId(ParseContext.ParsedIdState.EXTERNAL);
idFieldMapper.parse(context); idFieldMapper.parse(context);
} }
analyzerMapper.parse(context);
allFieldMapper.parse(context); allFieldMapper.parse(context);
} catch (IOException e) { } catch (IOException e) {
throw new MapperParsingException("Failed to parse", e); throw new MapperParsingException("Failed to parse", e);
@ -382,7 +394,7 @@ public class XContentDocumentMapper implements DocumentMapper, ToXContent {
parser.close(); parser.close();
} }
} }
return new ParsedDocument(context.uid(), context.id(), context.type(), context.doc(), source, context.mappersAdded()); return new ParsedDocument(context.uid(), context.id(), context.type(), context.doc(), context.analyzer(), source, context.mappersAdded());
} }
void addFieldMapper(FieldMapper fieldMapper) { void addFieldMapper(FieldMapper fieldMapper) {
@ -463,6 +475,6 @@ public class XContentDocumentMapper implements DocumentMapper, ToXContent {
} }
// no need to pass here id and boost, since they are added to the root object mapper // no need to pass here id and boost, since they are added to the root object mapper
// in the constructor // in the constructor
}, indexFieldMapper, typeFieldMapper, allFieldMapper, sourceFieldMapper); }, indexFieldMapper, typeFieldMapper, allFieldMapper, analyzerMapper, sourceFieldMapper);
} }
} }

View File

@ -51,7 +51,7 @@ import static org.elasticsearch.index.mapper.xcontent.XContentTypeParsers.*;
*/ */
public class XContentDocumentMapperParser extends AbstractIndexComponent implements DocumentMapperParser { public class XContentDocumentMapperParser extends AbstractIndexComponent implements DocumentMapperParser {
private final AnalysisService analysisService; final AnalysisService analysisService;
private final RootObjectMapper.TypeParser rootObjectTypeParser = new RootObjectMapper.TypeParser(); private final RootObjectMapper.TypeParser rootObjectTypeParser = new RootObjectMapper.TypeParser();
@ -148,6 +148,8 @@ public class XContentDocumentMapperParser extends AbstractIndexComponent impleme
docBuilder.boostField(parseBoostField((Map<String, Object>) fieldNode, parserContext)); docBuilder.boostField(parseBoostField((Map<String, Object>) fieldNode, parserContext));
} else if (AllFieldMapper.CONTENT_TYPE.equals(fieldName) || "allField".equals(fieldName)) { } else if (AllFieldMapper.CONTENT_TYPE.equals(fieldName) || "allField".equals(fieldName)) {
docBuilder.allField(parseAllField((Map<String, Object>) fieldNode, parserContext)); docBuilder.allField(parseAllField((Map<String, Object>) fieldNode, parserContext));
} else if (AnalyzerMapper.CONTENT_TYPE.equals(fieldName)) {
docBuilder.analyzerField(parseAnalyzerField((Map<String, Object>) fieldNode, parserContext));
} else if ("index_analyzer".equals(fieldName)) { } else if ("index_analyzer".equals(fieldName)) {
docBuilder.indexAnalyzer(analysisService.analyzer(fieldNode.toString())); docBuilder.indexAnalyzer(analysisService.analyzer(fieldNode.toString()));
} else if ("search_analyzer".equals(fieldName)) { } else if ("search_analyzer".equals(fieldName)) {
@ -209,6 +211,18 @@ public class XContentDocumentMapperParser extends AbstractIndexComponent impleme
return builder; return builder;
} }
private AnalyzerMapper.Builder parseAnalyzerField(Map<String, Object> analyzerNode, XContentMapper.TypeParser.ParserContext parserContext) {
AnalyzerMapper.Builder builder = analyzer();
for (Map.Entry<String, Object> entry : analyzerNode.entrySet()) {
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("field")) {
builder.field(fieldNode.toString());
}
}
return builder;
}
private AllFieldMapper.Builder parseAllField(Map<String, Object> allNode, XContentMapper.TypeParser.ParserContext parserContext) { private AllFieldMapper.Builder parseAllField(Map<String, Object> allNode, XContentMapper.TypeParser.ParserContext parserContext) {
AllFieldMapper.Builder builder = all(); AllFieldMapper.Builder builder = all();
parseField(builder, builder.name, allNode, parserContext); parseField(builder, builder.name, allNode, parserContext);

View File

@ -60,6 +60,10 @@ public final class XContentMapperBuilders {
return new AllFieldMapper.Builder(); return new AllFieldMapper.Builder();
} }
public static AnalyzerMapper.Builder analyzer() {
return new AnalyzerMapper.Builder();
}
public static MultiFieldMapper.Builder multiField(String name) { public static MultiFieldMapper.Builder multiField(String name) {
return new MultiFieldMapper.Builder(name); return new MultiFieldMapper.Builder(name);
} }

View File

@ -214,7 +214,7 @@ public class InternalIndexShard extends AbstractIndexShardComponent implements I
throw new DocumentMapperNotFoundException("No mapper found for type [" + type + "]"); throw new DocumentMapperNotFoundException("No mapper found for type [" + type + "]");
} }
ParsedDocument doc = docMapper.parse(type, id, source); ParsedDocument doc = docMapper.parse(type, id, source);
return new Engine.Create(doc, docMapper.mappers().indexAnalyzer()); return new Engine.Create(doc);
} }
@Override public ParsedDocument create(String type, String id, byte[] source) throws ElasticSearchException { @Override public ParsedDocument create(String type, String id, byte[] source) throws ElasticSearchException {
@ -236,7 +236,7 @@ public class InternalIndexShard extends AbstractIndexShardComponent implements I
throw new DocumentMapperNotFoundException("No mapper found for type [" + type + "]"); throw new DocumentMapperNotFoundException("No mapper found for type [" + type + "]");
} }
ParsedDocument doc = docMapper.parse(type, id, source); ParsedDocument doc = docMapper.parse(type, id, source);
return new Engine.Index(docMapper.uidMapper().term(doc.uid()), doc, docMapper.mappers().indexAnalyzer()); return new Engine.Index(docMapper.uidMapper().term(doc.uid()), doc);
} }
@Override public ParsedDocument index(String type, String id, byte[] source) throws ElasticSearchException { @Override public ParsedDocument index(String type, String id, byte[] source) throws ElasticSearchException {

View File

@ -118,8 +118,8 @@ public abstract class AbstractSimpleEngineTests {
searchResult.release(); searchResult.release();
// create a document // create a document
ParsedDocument doc = new ParsedDocument("1", "1", "test", doc().add(field("_uid", "1")).add(field("value", "test")).build(), B_1, false); ParsedDocument doc = new ParsedDocument("1", "1", "test", doc().add(field("_uid", "1")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, B_1, false);
engine.create(new Engine.Create(doc, Lucene.STANDARD_ANALYZER)); engine.create(new Engine.Create(doc));
// its not there... // its not there...
searchResult = engine.searcher(); searchResult = engine.searcher();
@ -137,8 +137,8 @@ public abstract class AbstractSimpleEngineTests {
searchResult.release(); searchResult.release();
// now do an update // now do an update
doc = new ParsedDocument("1", "1", "test", doc().add(field("_uid", "1")).add(field("value", "test1")).build(), B_1, false); doc = new ParsedDocument("1", "1", "test", doc().add(field("_uid", "1")).add(field("value", "test1")).build(), Lucene.STANDARD_ANALYZER, B_1, false);
engine.index(new Engine.Index(newUid("1"), doc, Lucene.STANDARD_ANALYZER)); engine.index(new Engine.Index(newUid("1"), doc));
// its not updated yet... // its not updated yet...
searchResult = engine.searcher(); searchResult = engine.searcher();
@ -176,8 +176,8 @@ public abstract class AbstractSimpleEngineTests {
searchResult.release(); searchResult.release();
// add it back // add it back
doc = new ParsedDocument("1", "1", "test", doc().add(field("_uid", "1")).add(field("value", "test")).build(), B_1, false); doc = new ParsedDocument("1", "1", "test", doc().add(field("_uid", "1")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, B_1, false);
engine.create(new Engine.Create(doc, Lucene.STANDARD_ANALYZER)); engine.create(new Engine.Create(doc));
// its not there... // its not there...
searchResult = engine.searcher(); searchResult = engine.searcher();
@ -201,8 +201,8 @@ public abstract class AbstractSimpleEngineTests {
// make sure we can still work with the engine // make sure we can still work with the engine
// now do an update // now do an update
doc = new ParsedDocument("1", "1", "test", doc().add(field("_uid", "1")).add(field("value", "test1")).build(), B_1, false); doc = new ParsedDocument("1", "1", "test", doc().add(field("_uid", "1")).add(field("value", "test1")).build(), Lucene.STANDARD_ANALYZER, B_1, false);
engine.index(new Engine.Index(newUid("1"), doc, Lucene.STANDARD_ANALYZER)); engine.index(new Engine.Index(newUid("1"), doc));
// its not updated yet... // its not updated yet...
searchResult = engine.searcher(); searchResult = engine.searcher();
@ -229,14 +229,14 @@ public abstract class AbstractSimpleEngineTests {
searchResult.release(); searchResult.release();
List<Engine.Operation> ops = Lists.newArrayList(); List<Engine.Operation> ops = Lists.newArrayList();
ParsedDocument doc = new ParsedDocument("1", "1", "test", doc().add(field("_uid", "1")).add(field("value", "1_test")).build(), B_1, false); ParsedDocument doc = new ParsedDocument("1", "1", "test", doc().add(field("_uid", "1")).add(field("value", "1_test")).build(), Lucene.STANDARD_ANALYZER, B_1, false);
ops.add(new Engine.Create(doc, Lucene.STANDARD_ANALYZER)); ops.add(new Engine.Create(doc));
doc = new ParsedDocument("2", "2", "test", doc().add(field("_uid", "2")).add(field("value", "2_test")).build(), B_2, false); doc = new ParsedDocument("2", "2", "test", doc().add(field("_uid", "2")).add(field("value", "2_test")).build(), Lucene.STANDARD_ANALYZER, B_2, false);
ops.add(new Engine.Create(doc, Lucene.STANDARD_ANALYZER)); ops.add(new Engine.Create(doc));
doc = new ParsedDocument("3", "3", "test", doc().add(field("_uid", "3")).add(field("value", "3_test")).build(), B_3, false); doc = new ParsedDocument("3", "3", "test", doc().add(field("_uid", "3")).add(field("value", "3_test")).build(), Lucene.STANDARD_ANALYZER, B_3, false);
ops.add(new Engine.Create(doc, Lucene.STANDARD_ANALYZER)); ops.add(new Engine.Create(doc));
doc = new ParsedDocument("1", "1", "test", doc().add(field("_uid", "1")).add(field("value", "1_test1")).build(), B_1, false); doc = new ParsedDocument("1", "1", "test", doc().add(field("_uid", "1")).add(field("value", "1_test1")).build(), Lucene.STANDARD_ANALYZER, B_1, false);
ops.add(new Engine.Index(newUid("1"), doc, Lucene.STANDARD_ANALYZER)); ops.add(new Engine.Index(newUid("1"), doc));
ops.add(new Engine.Delete(newUid("2"))); ops.add(new Engine.Delete(newUid("2")));
EngineException[] failures = engine.bulk(new Engine.Bulk(ops.toArray(new Engine.Operation[ops.size()]))); EngineException[] failures = engine.bulk(new Engine.Bulk(ops.toArray(new Engine.Operation[ops.size()])));
@ -261,8 +261,8 @@ public abstract class AbstractSimpleEngineTests {
searchResult.release(); searchResult.release();
// create a document // create a document
ParsedDocument doc = new ParsedDocument("1", "1", "test", doc().add(field("_uid", "1")).add(field("value", "test")).build(), B_1, false); ParsedDocument doc = new ParsedDocument("1", "1", "test", doc().add(field("_uid", "1")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, B_1, false);
engine.create(new Engine.Create(doc, Lucene.STANDARD_ANALYZER)); engine.create(new Engine.Create(doc));
// its not there... // its not there...
searchResult = engine.searcher(); searchResult = engine.searcher();
@ -294,8 +294,8 @@ public abstract class AbstractSimpleEngineTests {
@Test public void testSimpleSnapshot() throws Exception { @Test public void testSimpleSnapshot() throws Exception {
// create a document // create a document
ParsedDocument doc1 = new ParsedDocument("1", "1", "test", doc().add(field("_uid", "1")).add(field("value", "test")).build(), B_1, false); ParsedDocument doc1 = new ParsedDocument("1", "1", "test", doc().add(field("_uid", "1")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, B_1, false);
engine.create(new Engine.Create(doc1, Lucene.STANDARD_ANALYZER)); engine.create(new Engine.Create(doc1));
final ExecutorService executorService = Executors.newCachedThreadPool(); final ExecutorService executorService = Executors.newCachedThreadPool();
@ -310,11 +310,11 @@ public abstract class AbstractSimpleEngineTests {
Future<Object> future = executorService.submit(new Callable<Object>() { Future<Object> future = executorService.submit(new Callable<Object>() {
@Override public Object call() throws Exception { @Override public Object call() throws Exception {
engine.flush(new Engine.Flush()); engine.flush(new Engine.Flush());
ParsedDocument doc2 = new ParsedDocument("2", "2", "test", doc().add(field("_uid", "2")).add(field("value", "test")).build(), B_2, false); ParsedDocument doc2 = new ParsedDocument("2", "2", "test", doc().add(field("_uid", "2")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, B_2, false);
engine.create(new Engine.Create(doc2, Lucene.STANDARD_ANALYZER)); engine.create(new Engine.Create(doc2));
engine.flush(new Engine.Flush()); engine.flush(new Engine.Flush());
ParsedDocument doc3 = new ParsedDocument("3", "3", "test", doc().add(field("_uid", "3")).add(field("value", "test")).build(), B_3, false); ParsedDocument doc3 = new ParsedDocument("3", "3", "test", doc().add(field("_uid", "3")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, B_3, false);
engine.create(new Engine.Create(doc3, Lucene.STANDARD_ANALYZER)); engine.create(new Engine.Create(doc3));
return null; return null;
} }
}); });
@ -348,8 +348,8 @@ public abstract class AbstractSimpleEngineTests {
} }
@Test public void testSimpleRecover() throws Exception { @Test public void testSimpleRecover() throws Exception {
ParsedDocument doc = new ParsedDocument("1", "1", "test", doc().add(field("_uid", "1")).add(field("value", "test")).build(), B_1, false); ParsedDocument doc = new ParsedDocument("1", "1", "test", doc().add(field("_uid", "1")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, B_1, false);
engine.create(new Engine.Create(doc, Lucene.STANDARD_ANALYZER)); engine.create(new Engine.Create(doc));
engine.flush(new Engine.Flush()); engine.flush(new Engine.Flush());
engine.recover(new Engine.RecoveryHandler() { engine.recover(new Engine.RecoveryHandler() {
@ -389,11 +389,11 @@ public abstract class AbstractSimpleEngineTests {
} }
@Test public void testRecoverWithOperationsBetweenPhase1AndPhase2() throws Exception { @Test public void testRecoverWithOperationsBetweenPhase1AndPhase2() throws Exception {
ParsedDocument doc1 = new ParsedDocument("1", "1", "test", doc().add(field("_uid", "1")).add(field("value", "test")).build(), B_1, false); ParsedDocument doc1 = new ParsedDocument("1", "1", "test", doc().add(field("_uid", "1")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, B_1, false);
engine.create(new Engine.Create(doc1, Lucene.STANDARD_ANALYZER)); engine.create(new Engine.Create(doc1));
engine.flush(new Engine.Flush()); engine.flush(new Engine.Flush());
ParsedDocument doc2 = new ParsedDocument("2", "2", "test", doc().add(field("_uid", "2")).add(field("value", "test")).build(), B_2, false); ParsedDocument doc2 = new ParsedDocument("2", "2", "test", doc().add(field("_uid", "2")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, B_2, false);
engine.create(new Engine.Create(doc2, Lucene.STANDARD_ANALYZER)); engine.create(new Engine.Create(doc2));
engine.recover(new Engine.RecoveryHandler() { engine.recover(new Engine.RecoveryHandler() {
@Override public void phase1(SnapshotIndexCommit snapshot) throws EngineException { @Override public void phase1(SnapshotIndexCommit snapshot) throws EngineException {
@ -416,11 +416,11 @@ public abstract class AbstractSimpleEngineTests {
} }
@Test public void testRecoverWithOperationsBetweenPhase1AndPhase2AndPhase3() throws Exception { @Test public void testRecoverWithOperationsBetweenPhase1AndPhase2AndPhase3() throws Exception {
ParsedDocument doc1 = new ParsedDocument("1", "1", "test", doc().add(field("_uid", "1")).add(field("value", "test")).build(), B_1, false); ParsedDocument doc1 = new ParsedDocument("1", "1", "test", doc().add(field("_uid", "1")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, B_1, false);
engine.create(new Engine.Create(doc1, Lucene.STANDARD_ANALYZER)); engine.create(new Engine.Create(doc1));
engine.flush(new Engine.Flush()); engine.flush(new Engine.Flush());
ParsedDocument doc2 = new ParsedDocument("2", "2", "test", doc().add(field("_uid", "2")).add(field("value", "test")).build(), B_2, false); ParsedDocument doc2 = new ParsedDocument("2", "2", "test", doc().add(field("_uid", "2")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, B_2, false);
engine.create(new Engine.Create(doc2, Lucene.STANDARD_ANALYZER)); engine.create(new Engine.Create(doc2));
engine.recover(new Engine.RecoveryHandler() { engine.recover(new Engine.RecoveryHandler() {
@Override public void phase1(SnapshotIndexCommit snapshot) throws EngineException { @Override public void phase1(SnapshotIndexCommit snapshot) throws EngineException {
@ -433,8 +433,8 @@ public abstract class AbstractSimpleEngineTests {
assertThat(create.source(), equalTo(B_2)); assertThat(create.source(), equalTo(B_2));
// add for phase3 // add for phase3
ParsedDocument doc3 = new ParsedDocument("3", "3", "test", doc().add(field("_uid", "3")).add(field("value", "test")).build(), B_3, false); ParsedDocument doc3 = new ParsedDocument("3", "3", "test", doc().add(field("_uid", "3")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, B_3, false);
engine.create(new Engine.Create(doc3, Lucene.STANDARD_ANALYZER)); engine.create(new Engine.Create(doc3));
} }
@Override public void phase3(Translog.Snapshot snapshot) throws EngineException { @Override public void phase3(Translog.Snapshot snapshot) throws EngineException {

View File

@ -19,11 +19,16 @@
package org.elasticsearch.index.mapper.xcontent; package org.elasticsearch.index.mapper.xcontent;
import org.elasticsearch.common.inject.Injector;
import org.elasticsearch.common.inject.ModulesBuilder;
import org.elasticsearch.common.settings.ImmutableSettings; import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.index.Index; import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexNameModule;
import org.elasticsearch.index.analysis.AnalysisModule;
import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.settings.IndexSettingsModule;
/** /**
* @author kimchy (shay.banon) * @author kimchy (shay.banon)
@ -31,10 +36,19 @@ import org.elasticsearch.index.mapper.MapperService;
public class MapperTests { public class MapperTests {
public static XContentDocumentMapperParser newParser() { public static XContentDocumentMapperParser newParser() {
return new XContentDocumentMapperParser(new Index("test"), new AnalysisService(new Index("test"))); return new XContentDocumentMapperParser(new Index("test"), newAnalysisService());
} }
public static MapperService newMapperService() { public static MapperService newMapperService() {
return new MapperService(new Index("test"), ImmutableSettings.Builder.EMPTY_SETTINGS, new Environment(), new AnalysisService(new Index("test"))); return new MapperService(new Index("test"), ImmutableSettings.Builder.EMPTY_SETTINGS, new Environment(), newAnalysisService());
}
public static AnalysisService newAnalysisService() {
Injector injector = new ModulesBuilder().add(
new IndexSettingsModule(ImmutableSettings.Builder.EMPTY_SETTINGS),
new IndexNameModule(new Index("test")),
new AnalysisModule(ImmutableSettings.Builder.EMPTY_SETTINGS)).createInjector();
return injector.getInstance(AnalysisService.class);
} }
} }

View File

@ -0,0 +1,76 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent.analyzer;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.analysis.FieldNameAnalyzer;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.xcontent.MapperTests;
import org.elasticsearch.index.mapper.xcontent.XContentDocumentMapper;
import org.testng.annotations.Test;
import static org.hamcrest.MatcherAssert.*;
import static org.hamcrest.Matchers.*;
/**
* @author kimchy (shay.banon)
*/
public class AnalyzerMapperTests {
@Test public void testLatLonValues() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_analyzer").field("field", "field_analyzer").endObject()
.startObject("properties")
.startObject("field_analyzer").field("type", "string").endObject()
.startObject("field1").field("type", "string").endObject()
.startObject("field2").field("type", "string").field("analyzer", "simple").endObject()
.endObject()
.endObject().endObject().string();
XContentDocumentMapper documentMapper = MapperTests.newParser().parse(mapping);
ParsedDocument doc = documentMapper.parse("type", "1", XContentFactory.jsonBuilder().startObject()
.field("field_analyzer", "whitespace")
.field("field1", "value1")
.field("field2", "value2")
.endObject().copiedBytes());
FieldNameAnalyzer analyzer = (FieldNameAnalyzer) doc.analyzer();
assertThat(((NamedAnalyzer) analyzer.defaultAnalyzer()).name(), equalTo("whitespace"));
assertThat(((NamedAnalyzer) analyzer.analyzers().get("field1")), nullValue());
assertThat(((NamedAnalyzer) analyzer.analyzers().get("field2")).name(), equalTo("simple"));
// check that it serializes and de-serializes correctly
XContentDocumentMapper reparsedMapper = MapperTests.newParser().parse(documentMapper.mappingSource().string());
doc = reparsedMapper.parse("type", "1", XContentFactory.jsonBuilder().startObject()
.field("field_analyzer", "whitespace")
.field("field1", "value1")
.field("field2", "value2")
.endObject().copiedBytes());
analyzer = (FieldNameAnalyzer) doc.analyzer();
assertThat(((NamedAnalyzer) analyzer.defaultAnalyzer()).name(), equalTo("whitespace"));
assertThat(((NamedAnalyzer) analyzer.analyzers().get("field1")), nullValue());
assertThat(((NamedAnalyzer) analyzer.analyzers().get("field2")).name(), equalTo("simple"));
}
}