internal refactoring simplifying the type mapping system

This commit is contained in:
kimchy 2011-06-04 03:55:59 +03:00
parent dcd2bbd9a9
commit abfc7f0db4
162 changed files with 2018 additions and 2232 deletions

View File

@ -30,10 +30,10 @@ import org.elasticsearch.common.collect.ImmutableMap;
import org.elasticsearch.common.io.FastStringReader;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.index.mapper.AllFieldMapper;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.FieldMappers;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
import org.elasticsearch.index.query.QueryParseContext;
import java.io.IOException;

View File

@ -40,7 +40,11 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.cache.bloom.BloomCache;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.mapper.*;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.FieldMappers;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.mapper.selector.FieldMappersFieldSelector;
import org.elasticsearch.index.service.IndexService;
import org.elasticsearch.index.shard.service.IndexShard;
import org.elasticsearch.indices.IndicesService;

View File

@ -40,8 +40,8 @@ import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.FieldMappers;
import org.elasticsearch.index.mapper.InternalMapper;
import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.index.mapper.SourceToParse;
import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.MoreLikeThisFieldQueryBuilder;
import org.elasticsearch.indices.IndicesService;

View File

@ -36,9 +36,9 @@ import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.cache.id.IdCache;
import org.elasticsearch.index.cache.id.IdReaderCache;
import org.elasticsearch.index.mapper.ParentFieldMapper;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.UidFieldMapper;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.settings.IndexSettings;
import java.util.ArrayList;

View File

@ -38,7 +38,7 @@ import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.deletionpolicy.SnapshotIndexCommit;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.UidFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.shard.IndexShardComponent;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.translog.Translog;

View File

@ -49,7 +49,7 @@ import org.elasticsearch.index.cache.bloom.BloomCache;
import org.elasticsearch.index.deletionpolicy.SnapshotDeletionPolicy;
import org.elasticsearch.index.deletionpolicy.SnapshotIndexCommit;
import org.elasticsearch.index.engine.*;
import org.elasticsearch.index.mapper.UidFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.merge.policy.EnableMergePolicy;
import org.elasticsearch.index.merge.policy.MergePolicyProvider;
import org.elasticsearch.index.merge.scheduler.MergeSchedulerProvider;

View File

@ -1,33 +0,0 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
/**
* @author kimchy (shay.banon)
*/
public interface AllFieldMapper extends FieldMapper<Void>, InternalMapper {
public static final String NAME = "_all";
/**
* Is the all field enabled or not.
*/
public boolean enabled();
}

View File

@ -1,30 +0,0 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
/**
* A field mapper that allows to control the boosting of a parsed document. Can be treated as
* any other field mapper by being stored and analyzed, though, by default, it does neither.
*
* @author kimchy (Shay Banon)
*/
public interface BoostFieldMapper extends FieldMapper<Float>, InternalMapper {
}

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent;
package org.elasticsearch.index.mapper;
import org.elasticsearch.common.util.concurrent.NotThreadSafe;

View File

@ -20,118 +20,38 @@
package org.elasticsearch.index.mapper;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.Fieldable;
import org.apache.lucene.search.Filter;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Preconditions;
import org.elasticsearch.common.collect.ImmutableMap;
import org.elasticsearch.common.compress.CompressedString;
import org.elasticsearch.common.util.concurrent.ThreadSafe;
import org.elasticsearch.common.compress.lzf.LZF;
import org.elasticsearch.common.io.stream.BytesStreamInput;
import org.elasticsearch.common.io.stream.CachedStreamInput;
import org.elasticsearch.common.io.stream.LZFStreamInput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.internal.*;
import org.elasticsearch.index.mapper.object.RootObjectMapper;
import java.io.IOException;
import java.util.List;
import static org.elasticsearch.common.collect.Lists.*;
/**
* @author kimchy (shay.banon)
*/
@ThreadSafe
public interface DocumentMapper {
void close();
String type();
/**
* When constructed by parsing a mapping definition, will return it. Otherwise,
* returns <tt>null</tt>.
*/
CompressedString mappingSource();
/**
* Attributes of this type mappings.
*/
ImmutableMap<String, Object> meta();
/**
* Generates the source of the mapper based on the current mappings.
*/
void refreshSource() throws FailedToGenerateSourceMapperException;
UidFieldMapper uidMapper();
IdFieldMapper idMapper();
TypeFieldMapper typeMapper();
IndexFieldMapper indexMapper();
SourceFieldMapper sourceMapper();
BoostFieldMapper boostMapper();
AllFieldMapper allFieldMapper();
RoutingFieldMapper routingFieldMapper();
ParentFieldMapper parentFieldMapper();
DocumentFieldMappers mappers();
/**
* The default index analyzer to be used. Note, the {@link DocumentFieldMappers#indexAnalyzer()} should
* probably be used instead.
*/
Analyzer indexAnalyzer();
/**
* The default search analyzer to be used. Note, the {@link DocumentFieldMappers#searchAnalyzer()} should
* probably be used instead.
*/
Analyzer searchAnalyzer();
/**
* A filter based on the type of the field.
*/
Filter typeFilter();
/**
* Parses the source into a parsed document.
*
* <p>Validates that the source has the provided id and type. Note, most times
* we will already have the id and the type even though they exist in the source as well.
*/
ParsedDocument parse(byte[] source) throws MapperParsingException;
/**
* Parses the source into a parsed document.
*
* <p>Validates that the source has the provided id and type. Note, most times
* we will already have the id and the type even though they exist in the source as well.
*/
ParsedDocument parse(String type, String id, byte[] source) throws MapperParsingException;
/**
* Parses the source into a parsed document.
*
* <p>Validates that the source has the provided id and type. Note, most times
* we will already have the id and the type even though they exist in the source as well.
*/
ParsedDocument parse(SourceToParse source) throws MapperParsingException;
/**
* Parses the source into a parsed document.
*
* <p>Validates that the source has the provided id and type. Note, most times
* we will already have the id and the type even though they exist in the source as well.
*/
ParsedDocument parse(SourceToParse source, @Nullable ParseListener listener) throws MapperParsingException;
/**
* Merges this document mapper with the provided document mapper. If there are conflicts, the
* {@link MergeResult} will hold them.
*/
MergeResult merge(DocumentMapper mergeWith, MergeFlags mergeFlags) throws MergeMappingException;
/**
* Adds a field mapper listener.
*/
void addFieldMapperListener(FieldMapperListener fieldMapperListener, boolean includeExisting);
public class DocumentMapper implements ToXContent {
/**
* A result of a merge.
@ -203,4 +123,556 @@ public interface DocumentMapper {
return true;
}
}
public static class Builder {
private UidFieldMapper uidFieldMapper = new UidFieldMapper();
private IdFieldMapper idFieldMapper = new IdFieldMapper();
private TypeFieldMapper typeFieldMapper = new TypeFieldMapper();
private IndexFieldMapper indexFieldMapper = new IndexFieldMapper();
private SourceFieldMapper sourceFieldMapper = new SourceFieldMapper();
private SizeFieldMapper sizeFieldMapper = new SizeFieldMapper();
private RoutingFieldMapper routingFieldMapper = new RoutingFieldMapper();
private BoostFieldMapper boostFieldMapper = new BoostFieldMapper();
private AllFieldMapper allFieldMapper = new AllFieldMapper();
private AnalyzerMapper analyzerMapper = new AnalyzerMapper();
private ParentFieldMapper parentFieldMapper = null;
private NamedAnalyzer indexAnalyzer;
private NamedAnalyzer searchAnalyzer;
private final String index;
private final RootObjectMapper rootObjectMapper;
private ImmutableMap<String, Object> meta = ImmutableMap.of();
private Mapper.BuilderContext builderContext = new Mapper.BuilderContext(new ContentPath(1));
public Builder(String index, @Nullable Settings indexSettings, RootObjectMapper.Builder builder) {
this.index = index;
this.rootObjectMapper = builder.build(builderContext);
if (indexSettings != null) {
String idIndexed = indexSettings.get("index.mapping._id.indexed");
if (idIndexed != null && Booleans.parseBoolean(idIndexed, false)) {
idFieldMapper = new IdFieldMapper(Field.Index.NOT_ANALYZED);
}
}
}
public Builder meta(ImmutableMap<String, Object> meta) {
this.meta = meta;
return this;
}
public Builder sourceField(SourceFieldMapper.Builder builder) {
this.sourceFieldMapper = builder.build(builderContext);
return this;
}
public Builder sizeField(SizeFieldMapper.Builder builder) {
this.sizeFieldMapper = builder.build(builderContext);
return this;
}
public Builder idField(IdFieldMapper.Builder builder) {
this.idFieldMapper = builder.build(builderContext);
return this;
}
public Builder uidField(UidFieldMapper.Builder builder) {
this.uidFieldMapper = builder.build(builderContext);
return this;
}
public Builder typeField(TypeFieldMapper.Builder builder) {
this.typeFieldMapper = builder.build(builderContext);
return this;
}
public Builder indexField(IndexFieldMapper.Builder builder) {
this.indexFieldMapper = builder.build(builderContext);
return this;
}
public Builder routingField(RoutingFieldMapper.Builder builder) {
this.routingFieldMapper = builder.build(builderContext);
return this;
}
public Builder parentFiled(ParentFieldMapper.Builder builder) {
this.parentFieldMapper = builder.build(builderContext);
return this;
}
public Builder boostField(BoostFieldMapper.Builder builder) {
this.boostFieldMapper = builder.build(builderContext);
return this;
}
public Builder allField(AllFieldMapper.Builder builder) {
this.allFieldMapper = builder.build(builderContext);
return this;
}
public Builder analyzerField(AnalyzerMapper.Builder builder) {
this.analyzerMapper = builder.build(builderContext);
return this;
}
public Builder indexAnalyzer(NamedAnalyzer indexAnalyzer) {
this.indexAnalyzer = indexAnalyzer;
return this;
}
public boolean hasIndexAnalyzer() {
return indexAnalyzer != null;
}
public Builder searchAnalyzer(NamedAnalyzer searchAnalyzer) {
this.searchAnalyzer = searchAnalyzer;
return this;
}
public boolean hasSearchAnalyzer() {
return searchAnalyzer != null;
}
public DocumentMapper build(DocumentMapperParser docMapperParser) {
Preconditions.checkNotNull(rootObjectMapper, "Mapper builder must have the root object mapper set");
return new DocumentMapper(index, docMapperParser, rootObjectMapper, meta, uidFieldMapper, idFieldMapper, typeFieldMapper, indexFieldMapper,
sourceFieldMapper, sizeFieldMapper, parentFieldMapper, routingFieldMapper, allFieldMapper, analyzerMapper, indexAnalyzer, searchAnalyzer, boostFieldMapper);
}
}
private ThreadLocal<ParseContext> cache = new ThreadLocal<ParseContext>() {
@Override protected ParseContext initialValue() {
return new ParseContext(index, docMapperParser, DocumentMapper.this, new ContentPath(0));
}
};
private final String index;
private final String type;
private final DocumentMapperParser docMapperParser;
private volatile ImmutableMap<String, Object> meta;
private volatile CompressedString mappingSource;
private final UidFieldMapper uidFieldMapper;
private final IdFieldMapper idFieldMapper;
private final TypeFieldMapper typeFieldMapper;
private final IndexFieldMapper indexFieldMapper;
private final SourceFieldMapper sourceFieldMapper;
private final SizeFieldMapper sizeFieldMapper;
private final RoutingFieldMapper routingFieldMapper;
private final ParentFieldMapper parentFieldMapper;
private final BoostFieldMapper boostFieldMapper;
private final AllFieldMapper allFieldMapper;
private final AnalyzerMapper analyzerMapper;
private final RootObjectMapper rootObjectMapper;
private final NamedAnalyzer indexAnalyzer;
private final NamedAnalyzer searchAnalyzer;
private volatile DocumentFieldMappers fieldMappers;
private final List<FieldMapperListener> fieldMapperListeners = newArrayList();
private final Filter typeFilter;
private final Object mutex = new Object();
public DocumentMapper(String index, DocumentMapperParser docMapperParser,
RootObjectMapper rootObjectMapper,
ImmutableMap<String, Object> meta,
UidFieldMapper uidFieldMapper,
IdFieldMapper idFieldMapper,
TypeFieldMapper typeFieldMapper,
IndexFieldMapper indexFieldMapper,
SourceFieldMapper sourceFieldMapper,
SizeFieldMapper sizeFieldMapper,
@Nullable ParentFieldMapper parentFieldMapper,
RoutingFieldMapper routingFieldMapper,
AllFieldMapper allFieldMapper,
AnalyzerMapper analyzerMapper,
NamedAnalyzer indexAnalyzer, NamedAnalyzer searchAnalyzer,
@Nullable BoostFieldMapper boostFieldMapper) {
this.index = index;
this.type = rootObjectMapper.name();
this.docMapperParser = docMapperParser;
this.meta = meta;
this.rootObjectMapper = rootObjectMapper;
this.uidFieldMapper = uidFieldMapper;
this.idFieldMapper = idFieldMapper;
this.typeFieldMapper = typeFieldMapper;
this.indexFieldMapper = indexFieldMapper;
this.sourceFieldMapper = sourceFieldMapper;
this.sizeFieldMapper = sizeFieldMapper;
this.parentFieldMapper = parentFieldMapper;
this.routingFieldMapper = routingFieldMapper;
this.allFieldMapper = allFieldMapper;
this.analyzerMapper = analyzerMapper;
this.boostFieldMapper = boostFieldMapper;
this.indexAnalyzer = indexAnalyzer;
this.searchAnalyzer = searchAnalyzer;
this.typeFilter = typeMapper().fieldFilter(type);
rootObjectMapper.putMapper(idFieldMapper);
if (boostFieldMapper != null) {
rootObjectMapper.putMapper(boostFieldMapper);
}
if (parentFieldMapper != null) {
rootObjectMapper.putMapper(parentFieldMapper);
// also, mark the routing as required!
routingFieldMapper.markAsRequired();
}
rootObjectMapper.putMapper(routingFieldMapper);
final List<FieldMapper> tempFieldMappers = newArrayList();
// add the basic ones
if (indexFieldMapper.enabled()) {
tempFieldMappers.add(indexFieldMapper);
}
tempFieldMappers.add(typeFieldMapper);
tempFieldMappers.add(sourceFieldMapper);
tempFieldMappers.add(sizeFieldMapper);
tempFieldMappers.add(uidFieldMapper);
tempFieldMappers.add(allFieldMapper);
// now traverse and get all the statically defined ones
rootObjectMapper.traverse(new FieldMapperListener() {
@Override public void fieldMapper(FieldMapper fieldMapper) {
tempFieldMappers.add(fieldMapper);
}
});
this.fieldMappers = new DocumentFieldMappers(this, tempFieldMappers);
refreshSource();
}
public String type() {
return this.type;
}
public ImmutableMap<String, Object> meta() {
return this.meta;
}
public CompressedString mappingSource() {
return this.mappingSource;
}
public RootObjectMapper root() {
return this.rootObjectMapper;
}
public UidFieldMapper uidMapper() {
return this.uidFieldMapper;
}
public IdFieldMapper idMapper() {
return this.idFieldMapper;
}
public IndexFieldMapper indexMapper() {
return this.indexFieldMapper;
}
public TypeFieldMapper typeMapper() {
return this.typeFieldMapper;
}
public SourceFieldMapper sourceMapper() {
return this.sourceFieldMapper;
}
public BoostFieldMapper boostMapper() {
return this.boostFieldMapper;
}
public AllFieldMapper allFieldMapper() {
return this.allFieldMapper;
}
public RoutingFieldMapper routingFieldMapper() {
return this.routingFieldMapper;
}
public ParentFieldMapper parentFieldMapper() {
return this.parentFieldMapper;
}
public Analyzer indexAnalyzer() {
return this.indexAnalyzer;
}
public Analyzer searchAnalyzer() {
return this.searchAnalyzer;
}
public Filter typeFilter() {
return this.typeFilter;
}
public DocumentFieldMappers mappers() {
return this.fieldMappers;
}
public ParsedDocument parse(byte[] source) throws MapperParsingException {
return parse(SourceToParse.source(source));
}
public ParsedDocument parse(String type, String id, byte[] source) throws MapperParsingException {
return parse(SourceToParse.source(source).type(type).id(id));
}
public ParsedDocument parse(SourceToParse source) throws MapperParsingException {
return parse(source, null);
}
public ParsedDocument parse(SourceToParse source, @Nullable ParseListener listener) throws MapperParsingException {
ParseContext context = cache.get();
if (source.type() != null && !source.type().equals(this.type)) {
throw new MapperParsingException("Type mismatch, provide type [" + source.type() + "] but mapper is of type [" + this.type + "]");
}
source.type(this.type);
XContentParser parser = source.parser();
try {
if (parser == null) {
if (LZF.isCompressed(source.source())) {
BytesStreamInput siBytes = new BytesStreamInput(source.source());
LZFStreamInput siLzf = CachedStreamInput.cachedLzf(siBytes);
XContentType contentType = XContentFactory.xContentType(siLzf);
siLzf.resetToBufferStart();
parser = XContentFactory.xContent(contentType).createParser(siLzf);
} else {
parser = XContentFactory.xContent(source.source()).createParser(source.source());
}
}
context.reset(parser, new Document(), type, source.source(), source.flyweight(), listener);
// will result in START_OBJECT
int countDownTokens = 0;
XContentParser.Token token = parser.nextToken();
if (token != XContentParser.Token.START_OBJECT) {
throw new MapperParsingException("Malformed content, must start with an object");
}
token = parser.nextToken();
if (token != XContentParser.Token.FIELD_NAME) {
throw new MapperParsingException("Malformed content, after first object, either the type field or the actual properties should exist");
}
if (parser.currentName().equals(type)) {
// first field is the same as the type, this might be because the type is provided, and the object exists within it
// or because there is a valid field that by chance is named as the type
// Note, in this case, we only handle plain value types, an object type will be analyzed as if it was the type itself
// and other same level fields will be ignored
token = parser.nextToken();
countDownTokens++;
// commented out, allow for same type with START_OBJECT, we do our best to handle it except for the above corner case
// if (token != XContentParser.Token.START_OBJECT) {
// throw new MapperException("Malformed content, a field with the same name as the type must be an object with the properties/fields within it");
// }
}
if (sizeFieldMapper.enabled()) {
context.externalValue(source.source().length);
sizeFieldMapper.parse(context);
}
if (sourceFieldMapper.enabled()) {
sourceFieldMapper.parse(context);
}
// set the id if we have it so we can validate it later on, also, add the uid if we can
if (source.id() != null) {
context.id(source.id());
uidFieldMapper.parse(context);
}
typeFieldMapper.parse(context);
if (source.routing() != null) {
context.externalValue(source.routing());
routingFieldMapper.parse(context);
}
indexFieldMapper.parse(context);
rootObjectMapper.parse(context);
for (int i = 0; i < countDownTokens; i++) {
parser.nextToken();
}
// if we did not get the id, we need to parse the uid into the document now, after it was added
if (source.id() == null) {
if (context.id() == null) {
if (!source.flyweight()) {
throw new MapperParsingException("No id found while parsing the content source");
}
} else {
uidFieldMapper.parse(context);
}
}
if (context.parsedIdState() != ParseContext.ParsedIdState.PARSED) {
if (context.id() == null) {
if (!source.flyweight()) {
throw new MapperParsingException("No id mapping with [_id] found in the content, and not explicitly set");
}
} else {
// mark it as external, so we can parse it
context.parsedId(ParseContext.ParsedIdState.EXTERNAL);
idFieldMapper.parse(context);
}
}
if (parentFieldMapper != null) {
context.externalValue(source.parent());
parentFieldMapper.parse(context);
}
analyzerMapper.parse(context);
allFieldMapper.parse(context);
// validate aggregated mappers (TODO: need to be added as a phase to any field mapper)
routingFieldMapper.validate(context, source.routing());
} catch (IOException e) {
throw new MapperParsingException("Failed to parse", e);
} finally {
// only close the parser when its not provided externally
if (source.parser() == null && parser != null) {
parser.close();
}
}
ParsedDocument doc = new ParsedDocument(context.uid(), context.id(), context.type(), source.routing(), context.doc(), context.analyzer(),
context.source(), context.mappersAdded()).parent(source.parent());
// reset the context to free up memory
context.reset(null, null, null, null, false, null);
return doc;
}
public void addFieldMapper(FieldMapper fieldMapper) {
synchronized (mutex) {
fieldMappers = fieldMappers.concat(this, fieldMapper);
for (FieldMapperListener listener : fieldMapperListeners) {
listener.fieldMapper(fieldMapper);
}
}
}
public void addFieldMapperListener(FieldMapperListener fieldMapperListener, boolean includeExisting) {
synchronized (mutex) {
fieldMapperListeners.add(fieldMapperListener);
if (includeExisting) {
if (indexFieldMapper.enabled()) {
fieldMapperListener.fieldMapper(indexFieldMapper);
}
fieldMapperListener.fieldMapper(sourceFieldMapper);
fieldMapperListener.fieldMapper(sizeFieldMapper);
fieldMapperListener.fieldMapper(typeFieldMapper);
fieldMapperListener.fieldMapper(uidFieldMapper);
fieldMapperListener.fieldMapper(allFieldMapper);
rootObjectMapper.traverse(fieldMapperListener);
}
}
}
public synchronized MergeResult merge(DocumentMapper mergeWith, MergeFlags mergeFlags) {
DocumentMapper xContentMergeWith = (DocumentMapper) mergeWith;
MergeContext mergeContext = new MergeContext(this, mergeFlags);
rootObjectMapper.merge(xContentMergeWith.rootObjectMapper, mergeContext);
allFieldMapper.merge(xContentMergeWith.allFieldMapper, mergeContext);
analyzerMapper.merge(xContentMergeWith.analyzerMapper, mergeContext);
sourceFieldMapper.merge(xContentMergeWith.sourceFieldMapper, mergeContext);
sizeFieldMapper.merge(xContentMergeWith.sizeFieldMapper, mergeContext);
if (!mergeFlags.simulate()) {
// let the merge with attributes to override the attributes
meta = mergeWith.meta();
// update the source of the merged one
refreshSource();
}
return new MergeResult(mergeContext.buildConflicts());
}
public void refreshSource() throws FailedToGenerateSourceMapperException {
try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.startObject();
toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.endObject();
this.mappingSource = new CompressedString(builder.string());
} catch (Exception e) {
throw new FailedToGenerateSourceMapperException(e.getMessage(), e);
}
}
public void close() {
cache.remove();
rootObjectMapper.close();
idFieldMapper.close();
indexFieldMapper.close();
typeFieldMapper.close();
allFieldMapper.close();
analyzerMapper.close();
sourceFieldMapper.close();
sizeFieldMapper.close();
}
@Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
rootObjectMapper.toXContent(builder, params, new ToXContent() {
@Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (indexAnalyzer != null && searchAnalyzer != null && indexAnalyzer.name().equals(searchAnalyzer.name()) && !indexAnalyzer.name().startsWith("_")) {
if (!indexAnalyzer.name().equals("default")) {
// same analyzers, output it once
builder.field("analyzer", indexAnalyzer.name());
}
} else {
if (indexAnalyzer != null && !indexAnalyzer.name().startsWith("_")) {
if (!indexAnalyzer.name().equals("default")) {
builder.field("index_analyzer", indexAnalyzer.name());
}
}
if (searchAnalyzer != null && !searchAnalyzer.name().startsWith("_")) {
if (!searchAnalyzer.name().equals("default")) {
builder.field("search_analyzer", searchAnalyzer.name());
}
}
}
if (meta != null && !meta.isEmpty()) {
builder.field("_meta", meta());
}
return builder;
}
// no need to pass here id and boost, since they are added to the root object mapper
// in the constructor
}, indexFieldMapper, typeFieldMapper, allFieldMapper, analyzerMapper, sourceFieldMapper, sizeFieldMapper);
return builder;
}
}

View File

@ -20,26 +20,339 @@
package org.elasticsearch.index.mapper;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.ImmutableMap;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.collect.Maps;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.mapper.core.*;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.mapper.internal.*;
import org.elasticsearch.index.mapper.ip.IpFieldMapper;
import org.elasticsearch.index.mapper.multifield.MultiFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.mapper.object.RootObjectMapper;
import org.elasticsearch.index.settings.IndexSettings;
import java.io.IOException;
import java.util.Map;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.*;
import static org.elasticsearch.index.mapper.MapperBuilders.*;
import static org.elasticsearch.index.mapper.core.TypeParsers.*;
/**
* @author kimchy (shay.banon)
*/
public interface DocumentMapperParser {
public class DocumentMapperParser extends AbstractIndexComponent {
/**
* Parses the source mapping definition into a document mapper.
*/
DocumentMapper parse(String mappingSource) throws MapperParsingException;
final AnalysisService analysisService;
/**
* Parses the source mapping definition into a document mapper with the specified
* type (overriding the one defined in the source mapping).
*/
DocumentMapper parse(@Nullable String type, String mappingSource) throws MapperParsingException;
private final RootObjectMapper.TypeParser rootObjectTypeParser = new RootObjectMapper.TypeParser();
/**
* Parses the source mapping definition into a document mapper with the specified
* type (overriding the one defined in the source mapping).
*/
DocumentMapper parse(@Nullable String type, String mappingSource, String defaultMappingSource) throws MapperParsingException;
private final Object typeParsersMutex = new Object();
private volatile ImmutableMap<String, Mapper.TypeParser> typeParsers;
public DocumentMapperParser(Index index, AnalysisService analysisService) {
this(index, ImmutableSettings.Builder.EMPTY_SETTINGS, analysisService);
}
public DocumentMapperParser(Index index, @IndexSettings Settings indexSettings, AnalysisService analysisService) {
super(index, indexSettings);
this.analysisService = analysisService;
typeParsers = new MapBuilder<String, Mapper.TypeParser>()
.put(ByteFieldMapper.CONTENT_TYPE, new ByteFieldMapper.TypeParser())
.put(ShortFieldMapper.CONTENT_TYPE, new ShortFieldMapper.TypeParser())
.put(IntegerFieldMapper.CONTENT_TYPE, new IntegerFieldMapper.TypeParser())
.put(LongFieldMapper.CONTENT_TYPE, new LongFieldMapper.TypeParser())
.put(FloatFieldMapper.CONTENT_TYPE, new FloatFieldMapper.TypeParser())
.put(DoubleFieldMapper.CONTENT_TYPE, new DoubleFieldMapper.TypeParser())
.put(BooleanFieldMapper.CONTENT_TYPE, new BooleanFieldMapper.TypeParser())
.put(BinaryFieldMapper.CONTENT_TYPE, new BinaryFieldMapper.TypeParser())
.put(DateFieldMapper.CONTENT_TYPE, new DateFieldMapper.TypeParser())
.put(IpFieldMapper.CONTENT_TYPE, new IpFieldMapper.TypeParser())
.put(StringFieldMapper.CONTENT_TYPE, new StringFieldMapper.TypeParser())
.put(ObjectMapper.CONTENT_TYPE, new ObjectMapper.TypeParser())
.put(MultiFieldMapper.CONTENT_TYPE, new MultiFieldMapper.TypeParser())
.put(GeoPointFieldMapper.CONTENT_TYPE, new GeoPointFieldMapper.TypeParser())
.immutableMap();
}
public void putTypeParser(String type, Mapper.TypeParser typeParser) {
synchronized (typeParsersMutex) {
typeParsers = new MapBuilder<String, Mapper.TypeParser>()
.putAll(typeParsers)
.put(type, typeParser)
.immutableMap();
}
}
public Mapper.TypeParser.ParserContext parserContext() {
return new Mapper.TypeParser.ParserContext(analysisService, typeParsers);
}
public DocumentMapper parse(String source) throws MapperParsingException {
return parse(null, source);
}
public DocumentMapper parse(@Nullable String type, String source) throws MapperParsingException {
return parse(type, source, null);
}
@SuppressWarnings({"unchecked"})
public DocumentMapper parse(@Nullable String type, String source, String defaultSource) throws MapperParsingException {
Map<String, Object> mapping = null;
if (source != null) {
Tuple<String, Map<String, Object>> t = extractMapping(type, source);
type = t.v1();
mapping = t.v2();
}
if (mapping == null) {
mapping = Maps.newHashMap();
}
if (type == null) {
throw new MapperParsingException("Failed to derive type");
}
if (defaultSource != null) {
Tuple<String, Map<String, Object>> t = extractMapping(MapperService.DEFAULT_MAPPING, defaultSource);
if (t.v2() != null) {
XContentHelper.mergeDefaults(mapping, t.v2());
}
}
Mapper.TypeParser.ParserContext parserContext = new Mapper.TypeParser.ParserContext(analysisService, typeParsers);
DocumentMapper.Builder docBuilder = doc(index.name(), indexSettings, (RootObjectMapper.Builder) rootObjectTypeParser.parse(type, mapping, parserContext));
for (Map.Entry<String, Object> entry : mapping.entrySet()) {
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (SourceFieldMapper.CONTENT_TYPE.equals(fieldName) || "sourceField".equals(fieldName)) {
docBuilder.sourceField(parseSourceField((Map<String, Object>) fieldNode, parserContext));
} else if (SizeFieldMapper.CONTENT_TYPE.equals(fieldName)) {
docBuilder.sizeField(parseSizeField((Map<String, Object>) fieldNode, parserContext));
} else if (IdFieldMapper.CONTENT_TYPE.equals(fieldName) || "idField".equals(fieldName)) {
docBuilder.idField(parseIdField((Map<String, Object>) fieldNode, parserContext));
} else if (IndexFieldMapper.CONTENT_TYPE.equals(fieldName) || "indexField".equals(fieldName)) {
docBuilder.indexField(parseIndexField((Map<String, Object>) fieldNode, parserContext));
} else if (TypeFieldMapper.CONTENT_TYPE.equals(fieldName) || "typeField".equals(fieldName)) {
docBuilder.typeField(parseTypeField((Map<String, Object>) fieldNode, parserContext));
} else if (UidFieldMapper.CONTENT_TYPE.equals(fieldName) || "uidField".equals(fieldName)) {
docBuilder.uidField(parseUidField((Map<String, Object>) fieldNode, parserContext));
} else if (RoutingFieldMapper.CONTENT_TYPE.equals(fieldName)) {
docBuilder.routingField(parseRoutingField((Map<String, Object>) fieldNode, parserContext));
} else if (ParentFieldMapper.CONTENT_TYPE.equals(fieldName)) {
docBuilder.parentFiled(parseParentField((Map<String, Object>) fieldNode, parserContext));
} else if (BoostFieldMapper.CONTENT_TYPE.equals(fieldName) || "boostField".equals(fieldName)) {
docBuilder.boostField(parseBoostField((Map<String, Object>) fieldNode, parserContext));
} else if (AllFieldMapper.CONTENT_TYPE.equals(fieldName) || "allField".equals(fieldName)) {
docBuilder.allField(parseAllField((Map<String, Object>) fieldNode, parserContext));
} else if (AnalyzerMapper.CONTENT_TYPE.equals(fieldName)) {
docBuilder.analyzerField(parseAnalyzerField((Map<String, Object>) fieldNode, parserContext));
} else if ("index_analyzer".equals(fieldName)) {
docBuilder.indexAnalyzer(analysisService.analyzer(fieldNode.toString()));
} else if ("search_analyzer".equals(fieldName)) {
docBuilder.searchAnalyzer(analysisService.analyzer(fieldNode.toString()));
} else if ("analyzer".equals(fieldName)) {
docBuilder.indexAnalyzer(analysisService.analyzer(fieldNode.toString()));
docBuilder.searchAnalyzer(analysisService.analyzer(fieldNode.toString()));
}
}
if (!docBuilder.hasIndexAnalyzer()) {
docBuilder.indexAnalyzer(analysisService.defaultIndexAnalyzer());
}
if (!docBuilder.hasSearchAnalyzer()) {
docBuilder.searchAnalyzer(analysisService.defaultSearchAnalyzer());
}
ImmutableMap<String, Object> attributes = ImmutableMap.of();
if (mapping.containsKey("_meta")) {
attributes = ImmutableMap.copyOf((Map<String, Object>) mapping.get("_meta"));
}
docBuilder.meta(attributes);
DocumentMapper documentMapper = docBuilder.build(this);
// update the source with the generated one
documentMapper.refreshSource();
return documentMapper;
}
private UidFieldMapper.Builder parseUidField(Map<String, Object> uidNode, Mapper.TypeParser.ParserContext parserContext) {
UidFieldMapper.Builder builder = uid();
return builder;
}
private BoostFieldMapper.Builder parseBoostField(Map<String, Object> boostNode, Mapper.TypeParser.ParserContext parserContext) {
String name = boostNode.get("name") == null ? BoostFieldMapper.Defaults.NAME : boostNode.get("name").toString();
BoostFieldMapper.Builder builder = boost(name);
parseNumberField(builder, name, boostNode, parserContext);
for (Map.Entry<String, Object> entry : boostNode.entrySet()) {
String propName = Strings.toUnderscoreCase(entry.getKey());
Object propNode = entry.getValue();
if (propName.equals("null_value")) {
builder.nullValue(nodeFloatValue(propNode));
}
}
return builder;
}
private TypeFieldMapper.Builder parseTypeField(Map<String, Object> typeNode, Mapper.TypeParser.ParserContext parserContext) {
TypeFieldMapper.Builder builder = type();
parseField(builder, builder.name, typeNode, parserContext);
return builder;
}
private IdFieldMapper.Builder parseIdField(Map<String, Object> idNode, Mapper.TypeParser.ParserContext parserContext) {
IdFieldMapper.Builder builder = id();
parseField(builder, builder.name, idNode, parserContext);
return builder;
}
// NOTE, we also parse this in MappingMetaData
private RoutingFieldMapper.Builder parseRoutingField(Map<String, Object> routingNode, Mapper.TypeParser.ParserContext parserContext) {
RoutingFieldMapper.Builder builder = routing();
parseField(builder, builder.name, routingNode, parserContext);
for (Map.Entry<String, Object> entry : routingNode.entrySet()) {
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("required")) {
builder.required(nodeBooleanValue(fieldNode));
} else if (fieldName.equals("path")) {
builder.path(fieldNode.toString());
}
}
return builder;
}
private ParentFieldMapper.Builder parseParentField(Map<String, Object> parentNode, Mapper.TypeParser.ParserContext parserContext) {
ParentFieldMapper.Builder builder = new ParentFieldMapper.Builder();
for (Map.Entry<String, Object> entry : parentNode.entrySet()) {
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("type")) {
builder.type(fieldNode.toString());
}
}
return builder;
}
private AnalyzerMapper.Builder parseAnalyzerField(Map<String, Object> analyzerNode, Mapper.TypeParser.ParserContext parserContext) {
AnalyzerMapper.Builder builder = analyzer();
for (Map.Entry<String, Object> entry : analyzerNode.entrySet()) {
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("path")) {
builder.field(fieldNode.toString());
}
}
return builder;
}
private AllFieldMapper.Builder parseAllField(Map<String, Object> allNode, Mapper.TypeParser.ParserContext parserContext) {
AllFieldMapper.Builder builder = all();
parseField(builder, builder.name, allNode, parserContext);
for (Map.Entry<String, Object> entry : allNode.entrySet()) {
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("enabled")) {
builder.enabled(nodeBooleanValue(fieldNode));
}
}
return builder;
}
private SizeFieldMapper.Builder parseSizeField(Map<String, Object> node, Mapper.TypeParser.ParserContext parserContext) {
SizeFieldMapper.Builder builder = new SizeFieldMapper.Builder();
for (Map.Entry<String, Object> entry : node.entrySet()) {
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("enabled")) {
builder.enabled(nodeBooleanValue(fieldNode));
} else if (fieldName.equals("store")) {
builder.store(parseStore(fieldName, fieldNode.toString()));
}
}
return builder;
}
private SourceFieldMapper.Builder parseSourceField(Map<String, Object> sourceNode, Mapper.TypeParser.ParserContext parserContext) {
SourceFieldMapper.Builder builder = source();
for (Map.Entry<String, Object> entry : sourceNode.entrySet()) {
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("enabled")) {
builder.enabled(nodeBooleanValue(fieldNode));
} else if (fieldName.equals("compress") && fieldNode != null) {
builder.compress(nodeBooleanValue(fieldNode));
} else if (fieldName.equals("compress_threshold") && fieldNode != null) {
if (fieldNode instanceof Number) {
builder.compressThreshold(((Number) fieldNode).longValue());
builder.compress(true);
} else {
builder.compressThreshold(ByteSizeValue.parseBytesSizeValue(fieldNode.toString()).bytes());
builder.compress(true);
}
}
}
return builder;
}
private IndexFieldMapper.Builder parseIndexField(Map<String, Object> indexNode, Mapper.TypeParser.ParserContext parserContext) {
IndexFieldMapper.Builder builder = MapperBuilders.index();
parseField(builder, builder.name, indexNode, parserContext);
for (Map.Entry<String, Object> entry : indexNode.entrySet()) {
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("enabled")) {
builder.enabled(nodeBooleanValue(fieldNode));
}
}
return builder;
}
@SuppressWarnings({"unchecked"})
private Tuple<String, Map<String, Object>> extractMapping(String type, String source) throws MapperParsingException {
Map<String, Object> root;
XContentParser xContentParser = null;
try {
xContentParser = XContentFactory.xContent(source).createParser(source);
root = xContentParser.map();
} catch (IOException e) {
throw new MapperParsingException("Failed to parse mapping definition", e);
} finally {
if (xContentParser != null) {
xContentParser.close();
}
}
// we always assume the first and single key is the mapping type root
if (root.keySet().size() != 1) {
throw new MapperParsingException("Mapping must have the `type` as the root object");
}
String rootName = root.keySet().iterator().next();
if (type == null) {
type = rootName;
}
return new Tuple<String, Map<String, Object>>(type, (Map<String, Object>) root.get(rootName));
}
}

View File

@ -1,32 +0,0 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.document.Document;
/**
* @author kimchy (Shay Banon)
*/
public interface IdFieldMapper extends FieldMapper<String>, InternalMapper {
public static final String NAME = "_id";
String value(Document document);
}

View File

@ -1,41 +0,0 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.Term;
import org.elasticsearch.common.util.concurrent.ThreadSafe;
/**
* A mapper that maps the index name of the resource into the document.
*
* @author kimchy (shay.banon)
*/
@ThreadSafe
public interface IndexFieldMapper extends FieldMapper<String>, InternalMapper {
public static final String NAME = "_index";
boolean enabled();
String value(Document document);
Term term(String value);
}

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent;
package org.elasticsearch.index.mapper;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.ImmutableMap;
@ -25,9 +25,6 @@ import org.elasticsearch.common.util.concurrent.NotThreadSafe;
import org.elasticsearch.common.util.concurrent.ThreadSafe;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.mapper.FieldMapperListener;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeMappingException;
import java.io.IOException;
import java.util.Map;
@ -36,9 +33,9 @@ import java.util.Map;
* @author kimchy (shay.banon)
*/
@ThreadSafe
public interface XContentMapper extends ToXContent {
public interface Mapper extends ToXContent {
public static final XContentMapper[] EMPTY_ARRAY = new XContentMapper[0];
public static final Mapper[] EMPTY_ARRAY = new Mapper[0];
@NotThreadSafe
public static class BuilderContext {
@ -54,7 +51,7 @@ public interface XContentMapper extends ToXContent {
}
@NotThreadSafe
public static abstract class Builder<T extends Builder, Y extends XContentMapper> {
public static abstract class Builder<T extends Builder, Y extends Mapper> {
protected String name;
@ -64,6 +61,10 @@ public interface XContentMapper extends ToXContent {
this.name = name;
}
public String name() {
return this.name;
}
public abstract Y build(BuilderContext context);
}
@ -89,14 +90,14 @@ public interface XContentMapper extends ToXContent {
}
}
XContentMapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException;
Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException;
}
String name();
void parse(ParseContext context) throws IOException;
void merge(XContentMapper mergeWith, MergeContext mergeContext) throws MergeMappingException;
void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException;
void traverse(FieldMapperListener fieldMapperListener);

View File

@ -17,27 +17,40 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent;
package org.elasticsearch.index.mapper;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.mapper.xcontent.ip.IpFieldMapper;
import org.elasticsearch.index.mapper.core.*;
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
import org.elasticsearch.index.mapper.internal.AnalyzerMapper;
import org.elasticsearch.index.mapper.internal.BoostFieldMapper;
import org.elasticsearch.index.mapper.internal.IdFieldMapper;
import org.elasticsearch.index.mapper.internal.IndexFieldMapper;
import org.elasticsearch.index.mapper.internal.RoutingFieldMapper;
import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.mapper.ip.IpFieldMapper;
import org.elasticsearch.index.mapper.multifield.MultiFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.mapper.object.RootObjectMapper;
/**
* @author kimchy (shay.banon)
*/
public final class XContentMapperBuilders {
public final class MapperBuilders {
private XContentMapperBuilders() {
private MapperBuilders() {
}
public static XContentDocumentMapper.Builder doc(String index, RootObjectMapper.Builder objectBuilder) {
return new XContentDocumentMapper.Builder(index, null, objectBuilder);
public static DocumentMapper.Builder doc(String index, RootObjectMapper.Builder objectBuilder) {
return new DocumentMapper.Builder(index, null, objectBuilder);
}
public static XContentDocumentMapper.Builder doc(String index, @Nullable Settings settings, RootObjectMapper.Builder objectBuilder) {
return new XContentDocumentMapper.Builder(index, settings, objectBuilder);
public static DocumentMapper.Builder doc(String index, @Nullable Settings settings, RootObjectMapper.Builder objectBuilder) {
return new DocumentMapper.Builder(index, settings, objectBuilder);
}
public static SourceFieldMapper.Builder source() {

View File

@ -43,7 +43,7 @@ import org.elasticsearch.env.FailedToResolveConfigException;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.mapper.xcontent.XContentDocumentMapperParser;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.indices.InvalidTypeNameException;
import org.elasticsearch.indices.TypeMissingException;
@ -85,7 +85,6 @@ public class MapperService extends AbstractIndexComponent implements Iterable<Do
private volatile ImmutableMap<String, FieldMappers> indexNameFieldMappers = ImmutableMap.of();
private volatile ImmutableMap<String, FieldMappers> fullNameFieldMappers = ImmutableMap.of();
// for now, just use the xcontent one. Can work on it more to support custom ones
private final DocumentMapperParser documentParser;
private final InternalFieldMapperListener fieldMapperListener = new InternalFieldMapperListener();
@ -95,7 +94,7 @@ public class MapperService extends AbstractIndexComponent implements Iterable<Do
@Inject public MapperService(Index index, @IndexSettings Settings indexSettings, Environment environment, AnalysisService analysisService) {
super(index, indexSettings);
this.analysisService = analysisService;
this.documentParser = new XContentDocumentMapperParser(index, indexSettings, analysisService);
this.documentParser = new DocumentMapperParser(index, indexSettings, analysisService);
this.searchAnalyzer = new SmartIndexNameSearchAnalyzer(analysisService.defaultSearchAnalyzer());
this.dynamic = componentSettings.getAsBoolean("dynamic", true);
@ -106,7 +105,7 @@ public class MapperService extends AbstractIndexComponent implements Iterable<Do
defaultMappingUrl = environment.resolveConfig("default-mapping.json");
} catch (FailedToResolveConfigException e) {
// not there, default to the built in one
defaultMappingUrl = indexSettings.getClassLoader().getResource("org/elasticsearch/index/mapper/xcontent/default-mapping.json");
defaultMappingUrl = indexSettings.getClassLoader().getResource("org/elasticsearch/index/mapper/default-mapping.json");
}
} else {
try {

View File

@ -17,10 +17,9 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent;
package org.elasticsearch.index.mapper;
import org.elasticsearch.common.collect.Lists;
import org.elasticsearch.index.mapper.DocumentMapper;
import java.util.List;
@ -29,18 +28,18 @@ import java.util.List;
*/
public class MergeContext {
private final XContentDocumentMapper documentMapper;
private final DocumentMapper documentMapper;
private final DocumentMapper.MergeFlags mergeFlags;
private final List<String> mergeConflicts = Lists.newArrayList();
public MergeContext(XContentDocumentMapper documentMapper, DocumentMapper.MergeFlags mergeFlags) {
public MergeContext(DocumentMapper documentMapper, DocumentMapper.MergeFlags mergeFlags) {
this.documentMapper = documentMapper;
this.mergeFlags = mergeFlags;
}
public XContentDocumentMapper docMapper() {
public DocumentMapper docMapper() {
return documentMapper;
}

View File

@ -1,41 +0,0 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.index.Term;
import org.elasticsearch.common.util.concurrent.ThreadSafe;
/**
* @author kimchy (shay.banon)
*/
@ThreadSafe
public interface ParentFieldMapper extends FieldMapper<Uid>, InternalMapper {
public static final String NAME = "_parent";
/**
* The type of the parent doc.
*/
String type();
Term term(String type, String id);
Term term(String uid);
}

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent;
package org.elasticsearch.index.mapper;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.Document;
@ -25,7 +25,7 @@ import org.elasticsearch.common.lucene.all.AllEntries;
import org.elasticsearch.common.util.concurrent.NotThreadSafe;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.object.RootObjectMapper;
import java.util.HashMap;
import java.util.Map;
@ -36,9 +36,9 @@ import java.util.Map;
@NotThreadSafe
public class ParseContext {
private final XContentDocumentMapper docMapper;
private final DocumentMapper docMapper;
private final XContentDocumentMapperParser docMapperParser;
private final DocumentMapperParser docMapperParser;
private final ContentPath path;
@ -76,7 +76,7 @@ public class ParseContext {
private AllEntries allEntries = new AllEntries();
public ParseContext(String index, XContentDocumentMapperParser docMapperParser, XContentDocumentMapper docMapper, ContentPath path) {
public ParseContext(String index, DocumentMapperParser docMapperParser, DocumentMapper docMapper, ContentPath path) {
this.index = index;
this.docMapper = docMapper;
this.docMapperParser = docMapperParser;
@ -104,7 +104,7 @@ public class ParseContext {
return this.flyweight;
}
public XContentDocumentMapperParser docMapperParser() {
public DocumentMapperParser docMapperParser() {
return this.docMapperParser;
}
@ -153,7 +153,7 @@ public class ParseContext {
return docMapper.root();
}
public XContentDocumentMapper docMapper() {
public DocumentMapper docMapper() {
return this.docMapper;
}
@ -200,7 +200,7 @@ public class ParseContext {
}
/**
* Is all included or not. Will always disable it if {@link org.elasticsearch.index.mapper.AllFieldMapper#enabled()}
* Is all included or not. Will always disable it if {@link org.elasticsearch.index.mapper.internal.AllFieldMapper#enabled()}
* is <tt>false</tt>. If its enabled, then will return <tt>true</tt> only if the specific flag is <tt>null</tt> or
* its actual value (so, if not set, defaults to "true").
*/

View File

@ -1,34 +0,0 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.document.Document;
/**
* @author kimchy (shay.banon)
*/
public interface RoutingFieldMapper extends FieldMapper<String>, InternalMapper {
boolean required();
String path();
String value(Document document);
}

View File

@ -1,53 +0,0 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.FieldSelector;
import org.apache.lucene.document.Fieldable;
import org.elasticsearch.common.util.concurrent.ThreadSafe;
/**
* A mapper that maps the actual source of a generated document.
*
* @author kimchy (shay.banon)
*/
@ThreadSafe
public interface SourceFieldMapper extends FieldMapper<byte[]>, InternalMapper {
public final String NAME = "_source";
/**
* Returns <tt>true</tt> if the source field mapper is enabled or not.
*/
boolean enabled();
/**
* Returns the native source value, if its compressed, then the compressed value is returned.
*/
byte[] nativeValue(Fieldable field);
byte[] value(Document document);
/**
* A field selector that loads just the source field.
*/
FieldSelector fieldSelector();
}

View File

@ -1,41 +0,0 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.Term;
import org.elasticsearch.common.util.concurrent.ThreadSafe;
/**
* A mapper that maps the type of the resource into the document.
*
* @author kimchy (Shay Banon)
*/
@ThreadSafe
public interface TypeFieldMapper extends FieldMapper<String>, InternalMapper {
public static final String NAME = "_type";
public static final Term TERM_FACTORY = new Term(NAME, "");
String value(Document document);
Term term(String value);
}

View File

@ -1,38 +0,0 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.index.Term;
import org.elasticsearch.common.util.concurrent.ThreadSafe;
/**
* @author kimchy (Shay Banon)
*/
@ThreadSafe
public interface UidFieldMapper extends FieldMapper<Uid>, InternalMapper {
public static final String NAME = "_uid";
public static final Term TERM_FACTORY = new Term(NAME, "");
Term term(String type, String id);
Term term(String uid);
}

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent;
package org.elasticsearch.index.mapper.core;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.Field;
@ -36,8 +36,11 @@ import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.FieldMapperListener;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.query.QueryParseContext;
import java.io.IOException;
@ -45,7 +48,7 @@ import java.io.IOException;
/**
* @author kimchy (shay.banon)
*/
public abstract class AbstractFieldMapper<T> implements FieldMapper<T>, XContentMapper {
public abstract class AbstractFieldMapper<T> implements FieldMapper<T>, Mapper {
public static class Defaults {
public static final Field.Index INDEX = Field.Index.ANALYZED;
@ -99,7 +102,7 @@ public abstract class AbstractFieldMapper<T> implements FieldMapper<T>, XContent
}
}
public abstract static class Builder<T extends Builder, Y extends AbstractFieldMapper> extends XContentMapper.Builder<T, Y> {
public abstract static class Builder<T extends Builder, Y extends AbstractFieldMapper> extends Mapper.Builder<T, Y> {
protected Field.Index index = Defaults.INDEX;
@ -365,7 +368,7 @@ public abstract class AbstractFieldMapper<T> implements FieldMapper<T>, XContent
includeLower, includeUpper);
}
@Override public void merge(XContentMapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
@Override public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
if (!this.getClass().equals(mergeWith.getClass())) {
String mergedType = mergeWith.getClass().getSimpleName();
if (mergeWith instanceof AbstractFieldMapper) {

View File

@ -17,19 +17,21 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent;
package org.elasticsearch.index.mapper.core;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.Fieldable;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.ParseContext;
import java.io.IOException;
import java.util.Map;
import static org.elasticsearch.index.mapper.xcontent.XContentMapperBuilders.*;
import static org.elasticsearch.index.mapper.xcontent.XContentTypeParsers.*;
import static org.elasticsearch.index.mapper.MapperBuilders.*;
import static org.elasticsearch.index.mapper.core.TypeParsers.*;
/**
* @author kimchy (shay.banon)
@ -54,8 +56,8 @@ public class BinaryFieldMapper extends AbstractFieldMapper<byte[]> {
}
}
public static class TypeParser implements XContentMapper.TypeParser {
@Override public XContentMapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
public static class TypeParser implements Mapper.TypeParser {
@Override public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
BinaryFieldMapper.Builder builder = binaryField(name);
parseField(builder, name, node, parserContext);
return builder;

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent;
package org.elasticsearch.index.mapper.core;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.Fieldable;
@ -26,14 +26,16 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.ParseContext;
import java.io.IOException;
import java.util.Map;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.*;
import static org.elasticsearch.index.mapper.xcontent.XContentMapperBuilders.*;
import static org.elasticsearch.index.mapper.xcontent.XContentTypeParsers.*;
import static org.elasticsearch.index.mapper.MapperBuilders.*;
import static org.elasticsearch.index.mapper.core.TypeParsers.*;
/**
* @author kimchy (shay.banon)
@ -93,8 +95,8 @@ public class BooleanFieldMapper extends AbstractFieldMapper<Boolean> {
}
}
public static class TypeParser implements XContentMapper.TypeParser {
@Override public XContentMapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
public static class TypeParser implements Mapper.TypeParser {
@Override public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
BooleanFieldMapper.Builder builder = booleanField(name);
parseField(builder, name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent;
package org.elasticsearch.index.mapper.core;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Field;
@ -34,16 +34,19 @@ import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.NumericIntegerAnalyzer;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.search.NumericRangeFieldDataFilter;
import java.io.IOException;
import java.util.Map;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.*;
import static org.elasticsearch.index.mapper.xcontent.XContentMapperBuilders.*;
import static org.elasticsearch.index.mapper.xcontent.XContentTypeParsers.*;
import static org.elasticsearch.index.mapper.MapperBuilders.*;
import static org.elasticsearch.index.mapper.core.TypeParsers.*;
/**
* @author kimchy (shay.banon)
@ -78,8 +81,8 @@ public class ByteFieldMapper extends NumberFieldMapper<Byte> {
}
}
public static class TypeParser implements XContentMapper.TypeParser {
@Override public XContentMapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
public static class TypeParser implements Mapper.TypeParser {
@Override public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
ByteFieldMapper.Builder builder = byteField(name);
parseNumberField(builder, name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
@ -214,7 +217,7 @@ public class ByteFieldMapper extends NumberFieldMapper<Byte> {
return CONTENT_TYPE;
}
@Override public void merge(XContentMapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
@Override public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
super.merge(mergeWith, mergeContext);
if (!this.getClass().equals(mergeWith.getClass())) {
return;

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent;
package org.elasticsearch.index.mapper.core;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.Fieldable;
@ -37,15 +37,18 @@ import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.NumericDateAnalyzer;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.search.NumericRangeFieldDataFilter;
import java.io.IOException;
import java.util.Map;
import static org.elasticsearch.index.mapper.xcontent.XContentMapperBuilders.*;
import static org.elasticsearch.index.mapper.xcontent.XContentTypeParsers.*;
import static org.elasticsearch.index.mapper.MapperBuilders.*;
import static org.elasticsearch.index.mapper.core.TypeParsers.*;
/**
* @author kimchy (shay.banon)
@ -89,8 +92,8 @@ public class DateFieldMapper extends NumberFieldMapper<Long> {
}
}
public static class TypeParser implements XContentMapper.TypeParser {
@Override public XContentMapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
public static class TypeParser implements Mapper.TypeParser {
@Override public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
DateFieldMapper.Builder builder = dateField(name);
parseNumberField(builder, name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
@ -259,7 +262,7 @@ public class DateFieldMapper extends NumberFieldMapper<Long> {
return CONTENT_TYPE;
}
@Override public void merge(XContentMapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
@Override public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
super.merge(mergeWith, mergeContext);
if (!this.getClass().equals(mergeWith.getClass())) {
return;

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent;
package org.elasticsearch.index.mapper.core;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Field;
@ -34,16 +34,19 @@ import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.NumericDoubleAnalyzer;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.search.NumericRangeFieldDataFilter;
import java.io.IOException;
import java.util.Map;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.*;
import static org.elasticsearch.index.mapper.xcontent.XContentMapperBuilders.*;
import static org.elasticsearch.index.mapper.xcontent.XContentTypeParsers.*;
import static org.elasticsearch.index.mapper.MapperBuilders.*;
import static org.elasticsearch.index.mapper.core.TypeParsers.*;
/**
* @author kimchy (shay.banon)
@ -78,8 +81,8 @@ public class DoubleFieldMapper extends NumberFieldMapper<Double> {
}
}
public static class TypeParser implements XContentMapper.TypeParser {
@Override public XContentMapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
public static class TypeParser implements Mapper.TypeParser {
@Override public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
DoubleFieldMapper.Builder builder = doubleField(name);
parseNumberField(builder, name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
@ -211,7 +214,7 @@ public class DoubleFieldMapper extends NumberFieldMapper<Double> {
return CONTENT_TYPE;
}
@Override public void merge(XContentMapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
@Override public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
super.merge(mergeWith, mergeContext);
if (!this.getClass().equals(mergeWith.getClass())) {
return;

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent;
package org.elasticsearch.index.mapper.core;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Field;
@ -35,16 +35,19 @@ import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.NumericFloatAnalyzer;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.search.NumericRangeFieldDataFilter;
import java.io.IOException;
import java.util.Map;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.*;
import static org.elasticsearch.index.mapper.xcontent.XContentMapperBuilders.*;
import static org.elasticsearch.index.mapper.xcontent.XContentTypeParsers.*;
import static org.elasticsearch.index.mapper.MapperBuilders.*;
import static org.elasticsearch.index.mapper.core.TypeParsers.*;
/**
* @author kimchy (shay.banon)
@ -79,8 +82,8 @@ public class FloatFieldMapper extends NumberFieldMapper<Float> {
}
}
public static class TypeParser implements XContentMapper.TypeParser {
@Override public XContentMapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
public static class TypeParser implements Mapper.TypeParser {
@Override public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
FloatFieldMapper.Builder builder = floatField(name);
parseNumberField(builder, name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
@ -210,7 +213,7 @@ public class FloatFieldMapper extends NumberFieldMapper<Float> {
return CONTENT_TYPE;
}
@Override public void merge(XContentMapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
@Override public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
super.merge(mergeWith, mergeContext);
if (!this.getClass().equals(mergeWith.getClass())) {
return;

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent;
package org.elasticsearch.index.mapper.core;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Field;
@ -35,16 +35,19 @@ import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.NumericIntegerAnalyzer;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.search.NumericRangeFieldDataFilter;
import java.io.IOException;
import java.util.Map;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.*;
import static org.elasticsearch.index.mapper.xcontent.XContentMapperBuilders.*;
import static org.elasticsearch.index.mapper.xcontent.XContentTypeParsers.*;
import static org.elasticsearch.index.mapper.MapperBuilders.*;
import static org.elasticsearch.index.mapper.core.TypeParsers.*;
/**
* @author kimchy (shay.banon)
@ -79,8 +82,8 @@ public class IntegerFieldMapper extends NumberFieldMapper<Integer> {
}
}
public static class TypeParser implements XContentMapper.TypeParser {
@Override public XContentMapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
public static class TypeParser implements Mapper.TypeParser {
@Override public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
IntegerFieldMapper.Builder builder = integerField(name);
parseNumberField(builder, name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
@ -215,7 +218,7 @@ public class IntegerFieldMapper extends NumberFieldMapper<Integer> {
return CONTENT_TYPE;
}
@Override public void merge(XContentMapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
@Override public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
super.merge(mergeWith, mergeContext);
if (!this.getClass().equals(mergeWith.getClass())) {
return;

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent;
package org.elasticsearch.index.mapper.core;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Field;
@ -35,16 +35,19 @@ import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.NumericLongAnalyzer;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.search.NumericRangeFieldDataFilter;
import java.io.IOException;
import java.util.Map;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.*;
import static org.elasticsearch.index.mapper.xcontent.XContentMapperBuilders.*;
import static org.elasticsearch.index.mapper.xcontent.XContentTypeParsers.*;
import static org.elasticsearch.index.mapper.MapperBuilders.*;
import static org.elasticsearch.index.mapper.core.TypeParsers.*;
/**
* @author kimchy (shay.banon)
@ -79,8 +82,8 @@ public class LongFieldMapper extends NumberFieldMapper<Long> {
}
}
public static class TypeParser implements XContentMapper.TypeParser {
@Override public XContentMapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
public static class TypeParser implements Mapper.TypeParser {
@Override public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
LongFieldMapper.Builder builder = longField(name);
parseNumberField(builder, name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
@ -214,7 +217,7 @@ public class LongFieldMapper extends NumberFieldMapper<Long> {
return CONTENT_TYPE;
}
@Override public void merge(XContentMapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
@Override public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
super.merge(mergeWith, mergeContext);
if (!this.getClass().equals(mergeWith.getClass())) {
return;

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent;
package org.elasticsearch.index.mapper.core;
import org.apache.lucene.analysis.NumericTokenStream;
import org.apache.lucene.document.AbstractField;
@ -30,7 +30,10 @@ import org.elasticsearch.common.Nullable;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
import org.elasticsearch.index.query.QueryParseContext;
import java.io.Reader;
@ -38,7 +41,7 @@ import java.io.Reader;
/**
* @author kimchy (shay.banon)
*/
public abstract class NumberFieldMapper<T extends Number> extends AbstractFieldMapper<T> implements IncludeInAllMapper {
public abstract class NumberFieldMapper<T extends Number> extends AbstractFieldMapper<T> implements AllFieldMapper.IncludeInAll {
public static class Defaults extends AbstractFieldMapper.Defaults {
public static final int PRECISION_STEP = NumericUtils.PRECISION_STEP_DEFAULT;
@ -189,7 +192,7 @@ public abstract class NumberFieldMapper<T extends Number> extends AbstractFieldM
return num == null ? null : num.toString();
}
@Override public void merge(XContentMapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
@Override public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
super.merge(mergeWith, mergeContext);
if (!this.getClass().equals(mergeWith.getClass())) {
return;

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent;
package org.elasticsearch.index.mapper.core;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Field;
@ -35,16 +35,19 @@ import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.NumericIntegerAnalyzer;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.search.NumericRangeFieldDataFilter;
import java.io.IOException;
import java.util.Map;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.*;
import static org.elasticsearch.index.mapper.xcontent.XContentMapperBuilders.*;
import static org.elasticsearch.index.mapper.xcontent.XContentTypeParsers.*;
import static org.elasticsearch.index.mapper.MapperBuilders.*;
import static org.elasticsearch.index.mapper.core.TypeParsers.*;
/**
* @author kimchy (shay.banon)
@ -79,8 +82,8 @@ public class ShortFieldMapper extends NumberFieldMapper<Short> {
}
}
public static class TypeParser implements XContentMapper.TypeParser {
@Override public XContentMapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
public static class TypeParser implements Mapper.TypeParser {
@Override public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
ShortFieldMapper.Builder builder = shortField(name);
parseNumberField(builder, name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
@ -214,7 +217,7 @@ public class ShortFieldMapper extends NumberFieldMapper<Short> {
return CONTENT_TYPE;
}
@Override public void merge(XContentMapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
@Override public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
super.merge(mergeWith, mergeContext);
if (!this.getClass().equals(mergeWith.getClass())) {
return;

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent;
package org.elasticsearch.index.mapper.core;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.Fieldable;
@ -25,19 +25,23 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
import java.io.IOException;
import java.util.Map;
import static org.elasticsearch.index.mapper.xcontent.XContentMapperBuilders.*;
import static org.elasticsearch.index.mapper.xcontent.XContentTypeParsers.*;
import static org.elasticsearch.index.mapper.MapperBuilders.*;
import static org.elasticsearch.index.mapper.core.TypeParsers.*;
/**
* @author kimchy (shay.banon)
*/
public class StringFieldMapper extends AbstractFieldMapper<String> implements IncludeInAllMapper {
public class StringFieldMapper extends AbstractFieldMapper<String> implements AllFieldMapper.IncludeInAll {
public static final String CONTENT_TYPE = "string";
@ -74,8 +78,8 @@ public class StringFieldMapper extends AbstractFieldMapper<String> implements In
}
}
public static class TypeParser implements XContentMapper.TypeParser {
@Override public XContentMapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
public static class TypeParser implements Mapper.TypeParser {
@Override public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
StringFieldMapper.Builder builder = stringField(name);
parseField(builder, name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
@ -178,7 +182,7 @@ public class StringFieldMapper extends AbstractFieldMapper<String> implements In
return CONTENT_TYPE;
}
@Override public void merge(XContentMapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
@Override public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
super.merge(mergeWith, mergeContext);
if (!this.getClass().equals(mergeWith.getClass())) {
return;

View File

@ -17,13 +17,15 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent;
package org.elasticsearch.index.mapper.core;
import org.apache.lucene.document.Field;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
import org.elasticsearch.common.joda.Joda;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import java.util.Map;
@ -33,9 +35,9 @@ import static org.elasticsearch.common.xcontent.support.XContentMapValues.*;
/**
* @author kimchy (shay.banon)
*/
public class XContentTypeParsers {
public class TypeParsers {
public static void parseNumberField(NumberFieldMapper.Builder builder, String name, Map<String, Object> numberNode, XContentMapper.TypeParser.ParserContext parserContext) {
public static void parseNumberField(NumberFieldMapper.Builder builder, String name, Map<String, Object> numberNode, Mapper.TypeParser.ParserContext parserContext) {
parseField(builder, name, numberNode, parserContext);
for (Map.Entry<String, Object> entry : numberNode.entrySet()) {
String propName = Strings.toUnderscoreCase(entry.getKey());
@ -48,7 +50,7 @@ public class XContentTypeParsers {
}
}
public static void parseField(AbstractFieldMapper.Builder builder, String name, Map<String, Object> fieldNode, XContentMapper.TypeParser.ParserContext parserContext) {
public static void parseField(AbstractFieldMapper.Builder builder, String name, Map<String, Object> fieldNode, Mapper.TypeParser.ParserContext parserContext) {
for (Map.Entry<String, Object> entry : fieldNode.entrySet()) {
String propName = Strings.toUnderscoreCase(entry.getKey());
Object propNode = entry.getValue();

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent.geo;
package org.elasticsearch.index.mapper.geo;
import org.elasticsearch.index.search.geo.GeoHashUtils;

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent.geo;
package org.elasticsearch.index.mapper.geo;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.index.field.data.DocFieldData;

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent.geo;
package org.elasticsearch.index.mapper.geo;
import org.apache.lucene.index.IndexReader;
import org.elasticsearch.common.RamUsage;

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent.geo;
package org.elasticsearch.index.mapper.geo;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.FieldComparator;

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent.geo;
package org.elasticsearch.index.mapper.geo;
import org.apache.lucene.document.Field;
import org.elasticsearch.common.Strings;
@ -26,17 +26,24 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.FieldMapperListener;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.xcontent.*;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.mapper.object.ArrayValueMapperParser;
import org.elasticsearch.index.search.geo.GeoHashUtils;
import java.io.IOException;
import java.util.Map;
import static org.elasticsearch.index.mapper.xcontent.XContentMapperBuilders.*;
import static org.elasticsearch.index.mapper.xcontent.XContentTypeParsers.*;
import static org.elasticsearch.index.mapper.MapperBuilders.*;
import static org.elasticsearch.index.mapper.core.TypeParsers.*;
/**
* Parsing: We handle:
@ -50,7 +57,7 @@ import static org.elasticsearch.index.mapper.xcontent.XContentTypeParsers.*;
*
* @author kimchy (shay.banon)
*/
public class GeoPointFieldMapper implements XContentMapper, ArrayValueMapperParser {
public class GeoPointFieldMapper implements Mapper, ArrayValueMapperParser {
public static final String CONTENT_TYPE = "geo_point";
@ -71,7 +78,7 @@ public class GeoPointFieldMapper implements XContentMapper, ArrayValueMapperPars
public static final int PRECISION = GeoHashUtils.PRECISION;
}
public static class Builder extends XContentMapper.Builder<Builder, GeoPointFieldMapper> {
public static class Builder extends Mapper.Builder<Builder, GeoPointFieldMapper> {
private ContentPath.Type pathType = Defaults.PATH_TYPE;
@ -154,8 +161,8 @@ public class GeoPointFieldMapper implements XContentMapper, ArrayValueMapperPars
}
}
public static class TypeParser implements XContentMapper.TypeParser {
@Override public XContentMapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
public static class TypeParser implements Mapper.TypeParser {
@Override public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
Builder builder = new Builder(name);
for (Map.Entry<String, Object> entry : node.entrySet()) {
@ -354,7 +361,7 @@ public class GeoPointFieldMapper implements XContentMapper, ArrayValueMapperPars
}
}
@Override public void merge(XContentMapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
@Override public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
// TODO
}

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent.geo;
package org.elasticsearch.index.mapper.geo;
import org.elasticsearch.common.RamUsage;
import org.elasticsearch.common.thread.ThreadLocals;

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent.geo;
package org.elasticsearch.index.mapper.geo;
import org.elasticsearch.common.RamUsage;
import org.elasticsearch.common.thread.ThreadLocals;

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent;
package org.elasticsearch.index.mapper.internal;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.Field;
@ -29,7 +29,12 @@ import org.elasticsearch.common.lucene.all.AllField;
import org.elasticsearch.common.lucene.all.AllTermQuery;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.InternalMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
import org.elasticsearch.index.query.QueryParseContext;
import java.io.IOException;
@ -37,17 +42,25 @@ import java.io.IOException;
/**
* @author kimchy (shay.banon)
*/
public class AllFieldMapper extends AbstractFieldMapper<Void> implements org.elasticsearch.index.mapper.AllFieldMapper {
public class AllFieldMapper extends AbstractFieldMapper<Void> implements InternalMapper {
public interface IncludeInAll extends Mapper {
void includeInAll(Boolean includeInAll);
void includeInAllIfNotSet(Boolean includeInAll);
}
public static final String NAME = "_all";
public static final String CONTENT_TYPE = "_all";
public static class Defaults extends AbstractFieldMapper.Defaults {
public static final String NAME = org.elasticsearch.index.mapper.AllFieldMapper.NAME;
public static final String INDEX_NAME = org.elasticsearch.index.mapper.AllFieldMapper.NAME;
public static final String NAME = AllFieldMapper.NAME;
public static final String INDEX_NAME = AllFieldMapper.NAME;
public static final boolean ENABLED = true;
}
public static class Builder extends AbstractFieldMapper.Builder<Builder, AllFieldMapper> {
private boolean enabled = Defaults.ENABLED;
@ -183,7 +196,7 @@ public class AllFieldMapper extends AbstractFieldMapper<Void> implements org.ela
return builder;
}
@Override public void merge(XContentMapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
@Override public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
// do nothing here, no merging, but also no exception
}
}

View File

@ -17,20 +17,24 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent;
package org.elasticsearch.index.mapper.internal;
import org.apache.lucene.analysis.Analyzer;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.FieldMapperListener;
import org.elasticsearch.index.mapper.InternalMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import java.io.IOException;
/**
* @author kimchy (shay.banon)
*/
public class AnalyzerMapper implements XContentMapper {
public class AnalyzerMapper implements Mapper, InternalMapper {
public static final String CONTENT_TYPE = "_analyzer";
@ -38,7 +42,7 @@ public class AnalyzerMapper implements XContentMapper {
public static final String PATH = "_analyzer";
}
public static class Builder extends XContentMapper.Builder<Builder, AnalyzerMapper> {
public static class Builder extends Mapper.Builder<Builder, AnalyzerMapper> {
private String field = Defaults.PATH;
@ -104,7 +108,7 @@ public class AnalyzerMapper implements XContentMapper {
context.analyzer(analyzer);
}
@Override public void merge(XContentMapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
@Override public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
}
@Override public void traverse(FieldMapperListener fieldMapperListener) {

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent;
package org.elasticsearch.index.mapper.internal;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.Fieldable;
@ -33,7 +33,13 @@ import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.NumericFloatAnalyzer;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.mapper.InternalMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.core.FloatFieldMapper;
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
import org.elasticsearch.index.search.NumericRangeFieldDataFilter;
import java.io.IOException;
@ -41,7 +47,7 @@ import java.io.IOException;
/**
* @author kimchy (shay.banon)
*/
public class BoostFieldMapper extends NumberFieldMapper<Float> implements org.elasticsearch.index.mapper.BoostFieldMapper {
public class BoostFieldMapper extends NumberFieldMapper<Float> implements InternalMapper {
public static final String CONTENT_TYPE = "_boost";
@ -77,7 +83,7 @@ public class BoostFieldMapper extends NumberFieldMapper<Float> implements org.el
private final Float nullValue;
protected BoostFieldMapper() {
public BoostFieldMapper() {
this(Defaults.NAME, Defaults.NAME);
}
@ -209,7 +215,7 @@ public class BoostFieldMapper extends NumberFieldMapper<Float> implements org.el
return builder;
}
@Override public void merge(XContentMapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
@Override public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
// do nothing here, no merging, but also no exception
}
}

View File

@ -17,28 +17,35 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent;
package org.elasticsearch.index.mapper.internal;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.Fieldable;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.InternalMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
import java.io.IOException;
/**
* @author kimchy (shay.banon)
*/
public class IdFieldMapper extends AbstractFieldMapper<String> implements org.elasticsearch.index.mapper.IdFieldMapper {
public class IdFieldMapper extends AbstractFieldMapper<String> implements InternalMapper {
public static final String NAME = "_id";
public static final String CONTENT_TYPE = "_id";
public static class Defaults extends AbstractFieldMapper.Defaults {
public static final String NAME = "_id";
public static final String INDEX_NAME = "_id";
public static final String NAME = IdFieldMapper.NAME;
public static final String INDEX_NAME = IdFieldMapper.NAME;
public static final Field.Index INDEX = Field.Index.NO;
public static final Field.Store STORE = Field.Store.NO;
public static final boolean OMIT_NORMS = true;
@ -61,11 +68,11 @@ public class IdFieldMapper extends AbstractFieldMapper<String> implements org.el
}
}
protected IdFieldMapper() {
public IdFieldMapper() {
this(Defaults.NAME, Defaults.INDEX_NAME, Defaults.INDEX);
}
protected IdFieldMapper(Field.Index index) {
public IdFieldMapper(Field.Index index) {
this(Defaults.NAME, Defaults.INDEX_NAME, index);
}
@ -80,7 +87,7 @@ public class IdFieldMapper extends AbstractFieldMapper<String> implements org.el
Lucene.KEYWORD_ANALYZER, Lucene.KEYWORD_ANALYZER);
}
@Override public String value(Document document) {
public String value(Document document) {
Fieldable field = document.getFieldable(names.indexName());
return field == null ? null : value(field);
}
@ -146,7 +153,7 @@ public class IdFieldMapper extends AbstractFieldMapper<String> implements org.el
return builder;
}
@Override public void merge(XContentMapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
@Override public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
// do nothing here, no merging, but also no exception
}
}

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent;
package org.elasticsearch.index.mapper.internal;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
@ -25,20 +25,27 @@ import org.apache.lucene.document.Fieldable;
import org.apache.lucene.index.Term;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.InternalMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
import java.io.IOException;
/**
* @author kimchy (shay.banon)
*/
public class IndexFieldMapper extends AbstractFieldMapper<String> implements org.elasticsearch.index.mapper.IndexFieldMapper {
public class IndexFieldMapper extends AbstractFieldMapper<String> implements InternalMapper {
public static final String NAME = "_index";
public static final String CONTENT_TYPE = "_index";
public static class Defaults extends AbstractFieldMapper.Defaults {
public static final String NAME = org.elasticsearch.index.mapper.IndexFieldMapper.NAME;
public static final String INDEX_NAME = org.elasticsearch.index.mapper.IndexFieldMapper.NAME;
public static final String NAME = IndexFieldMapper.NAME;
public static final String INDEX_NAME = IndexFieldMapper.NAME;
public static final Field.Index INDEX = Field.Index.NOT_ANALYZED;
public static final Field.Store STORE = Field.Store.NO;
public static final boolean OMIT_NORMS = true;
@ -71,7 +78,7 @@ public class IndexFieldMapper extends AbstractFieldMapper<String> implements org
private final boolean enabled;
protected IndexFieldMapper() {
public IndexFieldMapper() {
this(Defaults.NAME, Defaults.INDEX_NAME);
}
@ -87,11 +94,11 @@ public class IndexFieldMapper extends AbstractFieldMapper<String> implements org
this.enabled = enabled;
}
@Override public boolean enabled() {
public boolean enabled() {
return this.enabled;
}
@Override public String value(Document document) {
public String value(Document document) {
Fieldable field = document.getFieldable(names.indexName());
return field == null ? null : value(field);
}
@ -112,7 +119,7 @@ public class IndexFieldMapper extends AbstractFieldMapper<String> implements org
return value;
}
@Override public Term term(String value) {
public Term term(String value) {
return termFactory.createTerm(value);
}
@ -143,7 +150,7 @@ public class IndexFieldMapper extends AbstractFieldMapper<String> implements org
return builder;
}
@Override public void merge(XContentMapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
@Override public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
// do nothing here, no merging, but also no exception
}
}

View File

@ -17,34 +17,41 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent;
package org.elasticsearch.index.mapper.internal;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.Fieldable;
import org.apache.lucene.index.Term;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.InternalMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
import java.io.IOException;
/**
* @author kimchy (shay.banon)
*/
public class ParentFieldMapper extends AbstractFieldMapper<Uid> implements org.elasticsearch.index.mapper.ParentFieldMapper {
public class ParentFieldMapper extends AbstractFieldMapper<Uid> implements InternalMapper {
public static final String NAME = "_parent";
public static final String CONTENT_TYPE = "_parent";
public static class Defaults extends AbstractFieldMapper.Defaults {
public static final String NAME = org.elasticsearch.index.mapper.ParentFieldMapper.NAME;
public static final String NAME = ParentFieldMapper.NAME;
public static final Field.Index INDEX = Field.Index.NOT_ANALYZED;
public static final boolean OMIT_NORMS = true;
public static final boolean OMIT_TERM_FREQ_AND_POSITIONS = true;
}
public static class Builder extends XContentMapper.Builder<Builder, ParentFieldMapper> {
public static class Builder extends Mapper.Builder<Builder, ParentFieldMapper> {
protected String indexName;
@ -76,7 +83,7 @@ public class ParentFieldMapper extends AbstractFieldMapper<Uid> implements org.e
this.type = type;
}
@Override public String type() {
public String type() {
return type;
}
@ -135,11 +142,11 @@ public class ParentFieldMapper extends AbstractFieldMapper<Uid> implements org.e
return value;
}
@Override public Term term(String type, String id) {
public Term term(String type, String id) {
return term(Uid.createUid(type, id));
}
@Override public Term term(String uid) {
public Term term(String uid) {
return termFactory.createTerm(uid);
}
@ -154,7 +161,7 @@ public class ParentFieldMapper extends AbstractFieldMapper<Uid> implements org.e
return builder;
}
@Override public void merge(XContentMapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
@Override public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
// do nothing here, no merging, but also no exception
}
}

View File

@ -17,22 +17,27 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent;
package org.elasticsearch.index.mapper.internal;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.Fieldable;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.InternalMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
import java.io.IOException;
/**
* @author kimchy (shay.banon)
*/
public class RoutingFieldMapper extends AbstractFieldMapper<String> implements org.elasticsearch.index.mapper.RoutingFieldMapper {
public class RoutingFieldMapper extends AbstractFieldMapper<String> implements InternalMapper {
public static final String CONTENT_TYPE = "_routing";
@ -77,7 +82,7 @@ public class RoutingFieldMapper extends AbstractFieldMapper<String> implements o
private final String path;
protected RoutingFieldMapper() {
public RoutingFieldMapper() {
this(Defaults.STORE, Defaults.INDEX, Defaults.REQUIRED, Defaults.PATH);
}
@ -92,15 +97,15 @@ public class RoutingFieldMapper extends AbstractFieldMapper<String> implements o
this.required = true;
}
@Override public boolean required() {
public boolean required() {
return this.required;
}
@Override public String path() {
public String path() {
return this.path;
}
@Override public String value(Document document) {
public String value(Document document) {
Fieldable field = document.getFieldable(names.indexName());
return field == null ? null : value(field);
}
@ -175,7 +180,7 @@ public class RoutingFieldMapper extends AbstractFieldMapper<String> implements o
return builder;
}
@Override public void merge(XContentMapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
@Override public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
// do nothing here, no merging, but also no exception
}
}

View File

@ -17,12 +17,16 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent;
package org.elasticsearch.index.mapper.internal;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.Fieldable;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.core.IntegerFieldMapper;
import java.io.IOException;
@ -35,7 +39,7 @@ public class SizeFieldMapper extends IntegerFieldMapper {
public static final boolean ENABLED = false;
}
public static class Builder extends XContentMapper.Builder<Builder, IntegerFieldMapper> {
public static class Builder extends Mapper.Builder<Builder, IntegerFieldMapper> {
protected boolean enabled = Defaults.ENABLED;
@ -103,7 +107,7 @@ public class SizeFieldMapper extends IntegerFieldMapper {
return builder;
}
@Override public void merge(XContentMapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
@Override public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
// maybe allow to change enabled? But then we need to figure out null for default value
}
}

View File

@ -17,9 +17,13 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent;
package org.elasticsearch.index.mapper.internal;
import org.apache.lucene.document.*;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldSelector;
import org.apache.lucene.document.FieldSelectorResult;
import org.apache.lucene.document.Fieldable;
import org.elasticsearch.ElasticSearchParseException;
import org.elasticsearch.common.compress.lzf.LZF;
import org.elasticsearch.common.compress.lzf.LZFDecoder;
@ -27,19 +31,26 @@ import org.elasticsearch.common.compress.lzf.LZFEncoder;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.InternalMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
import java.io.IOException;
/**
* @author kimchy (shay.banon)
*/
public class SourceFieldMapper extends AbstractFieldMapper<byte[]> implements org.elasticsearch.index.mapper.SourceFieldMapper {
public class SourceFieldMapper extends AbstractFieldMapper<byte[]> implements InternalMapper {
public static final String NAME = "_source";
public static final String CONTENT_TYPE = "_source";
public static class Defaults extends AbstractFieldMapper.Defaults {
public static final String NAME = org.elasticsearch.index.mapper.SourceFieldMapper.NAME;
public static final String NAME = SourceFieldMapper.NAME;
public static final boolean ENABLED = true;
public static final long COMPRESS_THRESHOLD = -1;
public static final Field.Index INDEX = Field.Index.NO;
@ -48,7 +59,7 @@ public class SourceFieldMapper extends AbstractFieldMapper<byte[]> implements or
public static final boolean OMIT_TERM_FREQ_AND_POSITIONS = true;
}
public static class Builder extends XContentMapper.Builder<Builder, SourceFieldMapper> {
public static class Builder extends Mapper.Builder<Builder, SourceFieldMapper> {
private boolean enabled = Defaults.ENABLED;
@ -88,7 +99,7 @@ public class SourceFieldMapper extends AbstractFieldMapper<byte[]> implements or
private final SourceFieldSelector fieldSelector;
protected SourceFieldMapper() {
public SourceFieldMapper() {
this(Defaults.NAME, Defaults.ENABLED, null, -1);
}
@ -129,12 +140,12 @@ public class SourceFieldMapper extends AbstractFieldMapper<byte[]> implements or
return new Field(names().indexName(), data);
}
@Override public byte[] value(Document document) {
public byte[] value(Document document) {
Fieldable field = document.getFieldable(names.indexName());
return field == null ? null : value(field);
}
@Override public byte[] nativeValue(Fieldable field) {
public byte[] nativeValue(Fieldable field) {
return field.getBinaryValue();
}
@ -204,7 +215,7 @@ public class SourceFieldMapper extends AbstractFieldMapper<byte[]> implements or
return builder;
}
@Override public void merge(XContentMapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
@Override public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
SourceFieldMapper sourceMergeWith = (SourceFieldMapper) mergeWith;
if (!mergeContext.mergeFlags().simulate()) {
if (sourceMergeWith.compress != null) {

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.index.mapper;
package org.elasticsearch.index.mapper.internal;
import org.apache.lucene.document.FieldSelector;
import org.apache.lucene.document.FieldSelectorResult;

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent;
package org.elasticsearch.index.mapper.internal;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
@ -30,9 +30,13 @@ import org.apache.lucene.search.Query;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.search.TermFilter;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.InternalMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.UidFieldMapper;
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
import org.elasticsearch.index.query.QueryParseContext;
import java.io.IOException;
@ -40,13 +44,17 @@ import java.io.IOException;
/**
* @author kimchy (shay.banon)
*/
public class TypeFieldMapper extends AbstractFieldMapper<String> implements org.elasticsearch.index.mapper.TypeFieldMapper {
public class TypeFieldMapper extends AbstractFieldMapper<String> implements InternalMapper {
public static final String NAME = "_type";
public static final Term TERM_FACTORY = new Term(NAME, "");
public static final String CONTENT_TYPE = "_type";
public static class Defaults extends AbstractFieldMapper.Defaults {
public static final String NAME = org.elasticsearch.index.mapper.TypeFieldMapper.NAME;
public static final String INDEX_NAME = org.elasticsearch.index.mapper.TypeFieldMapper.NAME;
public static final String NAME = TypeFieldMapper.NAME;
public static final String INDEX_NAME = TypeFieldMapper.NAME;
public static final Field.Index INDEX = Field.Index.NOT_ANALYZED;
public static final Field.Store STORE = Field.Store.NO;
public static final boolean OMIT_NORMS = true;
@ -69,7 +77,7 @@ public class TypeFieldMapper extends AbstractFieldMapper<String> implements org.
}
}
protected TypeFieldMapper() {
public TypeFieldMapper() {
this(Defaults.NAME, Defaults.INDEX_NAME);
}
@ -84,7 +92,7 @@ public class TypeFieldMapper extends AbstractFieldMapper<String> implements org.
Lucene.KEYWORD_ANALYZER, Lucene.KEYWORD_ANALYZER);
}
@Override public String value(Document document) {
public String value(Document document) {
Fieldable field = document.getFieldable(names.indexName());
return field == null ? null : value(field);
}
@ -105,7 +113,7 @@ public class TypeFieldMapper extends AbstractFieldMapper<String> implements org.
return value;
}
@Override public Term term(String value) {
public Term term(String value) {
return termFactory.createTerm(value);
}
@ -151,7 +159,7 @@ public class TypeFieldMapper extends AbstractFieldMapper<String> implements org.
return builder;
}
@Override public void merge(XContentMapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
@Override public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
// do nothing here, no merging, but also no exception
}
}

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent;
package org.elasticsearch.index.mapper.internal;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.Fieldable;
@ -25,27 +25,36 @@ import org.apache.lucene.index.Term;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.uid.UidField;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.InternalMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
import java.io.IOException;
/**
* @author kimchy (shay.banon)
*/
public class UidFieldMapper extends AbstractFieldMapper<Uid> implements org.elasticsearch.index.mapper.UidFieldMapper {
public class UidFieldMapper extends AbstractFieldMapper<Uid> implements InternalMapper {
public static final String NAME = "_uid";
public static final Term TERM_FACTORY = new Term(NAME, "");
public static final String CONTENT_TYPE = "_uid";
public static class Defaults extends AbstractFieldMapper.Defaults {
public static final String NAME = org.elasticsearch.index.mapper.UidFieldMapper.NAME;
public static final String NAME = UidFieldMapper.NAME;
public static final Field.Index INDEX = Field.Index.NOT_ANALYZED;
public static final boolean OMIT_NORMS = true;
public static final boolean OMIT_TERM_FREQ_AND_POSITIONS = false; // we store payload
}
public static class Builder extends XContentMapper.Builder<Builder, UidFieldMapper> {
public static class Builder extends Mapper.Builder<Builder, UidFieldMapper> {
protected String indexName;
@ -65,7 +74,7 @@ public class UidFieldMapper extends AbstractFieldMapper<Uid> implements org.elas
}
};
protected UidFieldMapper() {
public UidFieldMapper() {
this(Defaults.NAME);
}
@ -107,11 +116,11 @@ public class UidFieldMapper extends AbstractFieldMapper<Uid> implements org.elas
return value;
}
@Override public Term term(String type, String id) {
public Term term(String type, String id) {
return term(Uid.createUid(type, id));
}
@Override public Term term(String uid) {
public Term term(String uid) {
return termFactory.createTerm(uid);
}
@ -128,7 +137,7 @@ public class UidFieldMapper extends AbstractFieldMapper<Uid> implements org.elas
return builder;
}
@Override public void merge(XContentMapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
@Override public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
// do nothing here, no merging, but also no exception
}
}

View File

@ -17,12 +17,16 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent.ip;
package org.elasticsearch.index.mapper.ip;
import org.apache.lucene.analysis.NumericTokenStream;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.Fieldable;
import org.apache.lucene.search.*;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.FuzzyQuery;
import org.apache.lucene.search.NumericRangeFilter;
import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.ElasticSearchIllegalArgumentException;
import org.elasticsearch.common.Numbers;
@ -34,9 +38,13 @@ import org.elasticsearch.index.analysis.NumericAnalyzer;
import org.elasticsearch.index.analysis.NumericTokenizer;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.xcontent.*;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.core.LongFieldMapper;
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
import org.elasticsearch.index.search.NumericRangeFieldDataFilter;
import java.io.IOException;
@ -44,8 +52,8 @@ import java.io.Reader;
import java.util.Map;
import java.util.regex.Pattern;
import static org.elasticsearch.index.mapper.xcontent.XContentMapperBuilders.*;
import static org.elasticsearch.index.mapper.xcontent.XContentTypeParsers.*;
import static org.elasticsearch.index.mapper.MapperBuilders.*;
import static org.elasticsearch.index.mapper.core.TypeParsers.*;
/**
* @author kimchy (shay.banon)
@ -106,8 +114,8 @@ public class IpFieldMapper extends NumberFieldMapper<Long> {
}
}
public static class TypeParser implements XContentMapper.TypeParser {
@Override public XContentMapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
public static class TypeParser implements Mapper.TypeParser {
@Override public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
IpFieldMapper.Builder builder = ipField(name);
parseNumberField(builder, name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
@ -236,7 +244,7 @@ public class IpFieldMapper extends NumberFieldMapper<Long> {
return CONTENT_TYPE;
}
@Override public void merge(XContentMapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
@Override public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
super.merge(mergeWith, mergeContext);
if (!this.getClass().equals(mergeWith.getClass())) {
return;

View File

@ -17,15 +17,21 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent;
package org.elasticsearch.index.mapper.multifield;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.ImmutableMap;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.FieldMapperListener;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
import java.io.IOException;
import java.util.HashMap;
@ -34,13 +40,13 @@ import java.util.Map;
import static org.elasticsearch.common.collect.Lists.*;
import static org.elasticsearch.common.collect.MapBuilder.*;
import static org.elasticsearch.index.mapper.xcontent.XContentMapperBuilders.*;
import static org.elasticsearch.index.mapper.xcontent.XContentTypeParsers.*;
import static org.elasticsearch.index.mapper.MapperBuilders.*;
import static org.elasticsearch.index.mapper.core.TypeParsers.*;
/**
* @author kimchy (shay.banon)
*/
public class MultiFieldMapper implements XContentMapper, IncludeInAllMapper {
public class MultiFieldMapper implements Mapper, AllFieldMapper.IncludeInAll {
public static final String CONTENT_TYPE = "multi_field";
@ -48,13 +54,13 @@ public class MultiFieldMapper implements XContentMapper, IncludeInAllMapper {
public static final ContentPath.Type PATH_TYPE = ContentPath.Type.FULL;
}
public static class Builder extends XContentMapper.Builder<Builder, MultiFieldMapper> {
public static class Builder extends Mapper.Builder<Builder, MultiFieldMapper> {
private ContentPath.Type pathType = Defaults.PATH_TYPE;
private final List<XContentMapper.Builder> mappersBuilders = newArrayList();
private final List<Mapper.Builder> mappersBuilders = newArrayList();
private XContentMapper.Builder defaultMapperBuilder;
private Mapper.Builder defaultMapperBuilder;
public Builder(String name) {
super(name);
@ -66,8 +72,8 @@ public class MultiFieldMapper implements XContentMapper, IncludeInAllMapper {
return this;
}
public Builder add(XContentMapper.Builder builder) {
if (builder.name.equals(name)) {
public Builder add(Mapper.Builder builder) {
if (builder.name().equals(name)) {
defaultMapperBuilder = builder;
} else {
mappersBuilders.add(builder);
@ -79,15 +85,15 @@ public class MultiFieldMapper implements XContentMapper, IncludeInAllMapper {
ContentPath.Type origPathType = context.path().pathType();
context.path().pathType(pathType);
XContentMapper defaultMapper = null;
Mapper defaultMapper = null;
if (defaultMapperBuilder != null) {
defaultMapper = defaultMapperBuilder.build(context);
}
context.path().add(name);
Map<String, XContentMapper> mappers = new HashMap<String, XContentMapper>();
for (XContentMapper.Builder builder : mappersBuilders) {
XContentMapper mapper = builder.build(context);
Map<String, Mapper> mappers = new HashMap<String, Mapper>();
for (Mapper.Builder builder : mappersBuilders) {
Mapper mapper = builder.build(context);
mappers.put(mapper.name(), mapper);
}
context.path().remove();
@ -98,8 +104,8 @@ public class MultiFieldMapper implements XContentMapper, IncludeInAllMapper {
}
}
public static class TypeParser implements XContentMapper.TypeParser {
@Override public XContentMapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
public static class TypeParser implements Mapper.TypeParser {
@Override public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
MultiFieldMapper.Builder builder = multiField(name);
for (Map.Entry<String, Object> entry : node.entrySet()) {
@ -121,7 +127,7 @@ public class MultiFieldMapper implements XContentMapper, IncludeInAllMapper {
throw new MapperParsingException("No type specified for property [" + propName + "]");
}
XContentMapper.TypeParser typeParser = parserContext.typeParser(type);
Mapper.TypeParser typeParser = parserContext.typeParser(type);
if (typeParser == null) {
throw new MapperParsingException("No handler for type [" + type + "] declared on field [" + fieldName + "]");
}
@ -139,24 +145,24 @@ public class MultiFieldMapper implements XContentMapper, IncludeInAllMapper {
private final Object mutex = new Object();
private volatile ImmutableMap<String, XContentMapper> mappers = ImmutableMap.of();
private volatile ImmutableMap<String, Mapper> mappers = ImmutableMap.of();
private volatile XContentMapper defaultMapper;
private volatile Mapper defaultMapper;
public MultiFieldMapper(String name, ContentPath.Type pathType, XContentMapper defaultMapper) {
this(name, pathType, new HashMap<String, XContentMapper>(), defaultMapper);
public MultiFieldMapper(String name, ContentPath.Type pathType, Mapper defaultMapper) {
this(name, pathType, new HashMap<String, Mapper>(), defaultMapper);
}
public MultiFieldMapper(String name, ContentPath.Type pathType, Map<String, XContentMapper> mappers, XContentMapper defaultMapper) {
public MultiFieldMapper(String name, ContentPath.Type pathType, Map<String, Mapper> mappers, Mapper defaultMapper) {
this.name = name;
this.pathType = pathType;
this.mappers = ImmutableMap.copyOf(mappers);
this.defaultMapper = defaultMapper;
// we disable the all in mappers, only the default one can be added
for (XContentMapper mapper : mappers.values()) {
if (mapper instanceof IncludeInAllMapper) {
((IncludeInAllMapper) mapper).includeInAll(false);
for (Mapper mapper : mappers.values()) {
if (mapper instanceof AllFieldMapper.IncludeInAll) {
((AllFieldMapper.IncludeInAll) mapper).includeInAll(false);
}
}
}
@ -166,14 +172,14 @@ public class MultiFieldMapper implements XContentMapper, IncludeInAllMapper {
}
@Override public void includeInAll(Boolean includeInAll) {
if (includeInAll != null && defaultMapper != null && (defaultMapper instanceof IncludeInAllMapper)) {
((IncludeInAllMapper) defaultMapper).includeInAll(includeInAll);
if (includeInAll != null && defaultMapper != null && (defaultMapper instanceof AllFieldMapper.IncludeInAll)) {
((AllFieldMapper.IncludeInAll) defaultMapper).includeInAll(includeInAll);
}
}
@Override public void includeInAllIfNotSet(Boolean includeInAll) {
if (includeInAll != null && defaultMapper != null && (defaultMapper instanceof IncludeInAllMapper)) {
((IncludeInAllMapper) defaultMapper).includeInAllIfNotSet(includeInAll);
if (includeInAll != null && defaultMapper != null && (defaultMapper instanceof AllFieldMapper.IncludeInAll)) {
((AllFieldMapper.IncludeInAll) defaultMapper).includeInAllIfNotSet(includeInAll);
}
}
@ -181,11 +187,11 @@ public class MultiFieldMapper implements XContentMapper, IncludeInAllMapper {
return pathType;
}
public XContentMapper defaultMapper() {
public Mapper defaultMapper() {
return this.defaultMapper;
}
public ImmutableMap<String, XContentMapper> mappers() {
public ImmutableMap<String, Mapper> mappers() {
return this.mappers;
}
@ -199,7 +205,7 @@ public class MultiFieldMapper implements XContentMapper, IncludeInAllMapper {
}
context.path().add(name);
for (XContentMapper mapper : mappers.values()) {
for (Mapper mapper : mappers.values()) {
mapper.parse(context);
}
context.path().remove();
@ -207,7 +213,7 @@ public class MultiFieldMapper implements XContentMapper, IncludeInAllMapper {
context.path().pathType(origPathType);
}
@Override public void merge(XContentMapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
@Override public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
if (!(mergeWith instanceof MultiFieldMapper) && !(mergeWith instanceof AbstractFieldMapper)) {
mergeContext.addConflict("Can't merge a non multi_field / non simple mapping [" + mergeWith.name() + "] with a multi_field mapping [" + name() + "]");
return;
@ -238,14 +244,14 @@ public class MultiFieldMapper implements XContentMapper, IncludeInAllMapper {
}
// merge all the other mappers
for (XContentMapper mergeWithMapper : mergeWithMultiField.mappers.values()) {
XContentMapper mergeIntoMapper = mappers.get(mergeWithMapper.name());
for (Mapper mergeWithMapper : mergeWithMultiField.mappers.values()) {
Mapper mergeIntoMapper = mappers.get(mergeWithMapper.name());
if (mergeIntoMapper == null) {
// no mapping, simply add it if not simulating
if (!mergeContext.mergeFlags().simulate()) {
// disable the mapper from being in all, only the default mapper is in all
if (mergeWithMapper instanceof IncludeInAllMapper) {
((IncludeInAllMapper) mergeWithMapper).includeInAll(false);
if (mergeWithMapper instanceof AllFieldMapper.IncludeInAll) {
((AllFieldMapper.IncludeInAll) mergeWithMapper).includeInAll(false);
}
mappers = newMapBuilder(mappers).put(mergeWithMapper.name(), mergeWithMapper).immutableMap();
if (mergeWithMapper instanceof AbstractFieldMapper) {
@ -264,7 +270,7 @@ public class MultiFieldMapper implements XContentMapper, IncludeInAllMapper {
if (defaultMapper != null) {
defaultMapper.close();
}
for (XContentMapper mapper : mappers.values()) {
for (Mapper mapper : mappers.values()) {
mapper.close();
}
}
@ -273,7 +279,7 @@ public class MultiFieldMapper implements XContentMapper, IncludeInAllMapper {
if (defaultMapper != null) {
defaultMapper.traverse(fieldMapperListener);
}
for (XContentMapper mapper : mappers.values()) {
for (Mapper mapper : mappers.values()) {
mapper.traverse(fieldMapperListener);
}
}
@ -289,7 +295,7 @@ public class MultiFieldMapper implements XContentMapper, IncludeInAllMapper {
if (defaultMapper != null) {
defaultMapper.toXContent(builder, params);
}
for (XContentMapper mapper : mappers.values()) {
for (Mapper mapper : mappers.values()) {
mapper.toXContent(builder, params);
}
builder.endObject();

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent;
package org.elasticsearch.index.mapper.object;
/**
* A marker interface indicating that this mapper can handle array value, and the array

View File

@ -17,12 +17,13 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent;
package org.elasticsearch.index.mapper.object;
import org.elasticsearch.ElasticSearchIllegalArgumentException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Maps;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.MapperParsingException;
import java.util.ArrayList;

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent;
package org.elasticsearch.index.mapper.object;
import org.elasticsearch.ElasticSearchIllegalStateException;
import org.elasticsearch.common.Strings;
@ -28,6 +28,9 @@ import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.*;
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
import org.elasticsearch.index.mapper.multifield.MultiFieldMapper;
import java.io.IOException;
import java.util.HashMap;
@ -39,14 +42,14 @@ import static org.elasticsearch.common.collect.ImmutableMap.*;
import static org.elasticsearch.common.collect.Lists.*;
import static org.elasticsearch.common.collect.MapBuilder.*;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.*;
import static org.elasticsearch.index.mapper.xcontent.XContentMapperBuilders.*;
import static org.elasticsearch.index.mapper.xcontent.XContentTypeParsers.*;
import static org.elasticsearch.index.mapper.MapperBuilders.*;
import static org.elasticsearch.index.mapper.core.TypeParsers.*;
/**
* @author kimchy (shay.banon)
*/
@ThreadSafe
public class ObjectMapper implements XContentMapper, IncludeInAllMapper {
public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll {
public static final String CONTENT_TYPE = "object";
@ -62,7 +65,7 @@ public class ObjectMapper implements XContentMapper, IncludeInAllMapper {
STRICT
}
public static class Builder<T extends Builder, Y extends ObjectMapper> extends XContentMapper.Builder<T, Y> {
public static class Builder<T extends Builder, Y extends ObjectMapper> extends Mapper.Builder<T, Y> {
protected boolean enabled = Defaults.ENABLED;
@ -72,7 +75,7 @@ public class ObjectMapper implements XContentMapper, IncludeInAllMapper {
protected Boolean includeInAll;
protected final List<XContentMapper.Builder> mappersBuilders = newArrayList();
protected final List<Mapper.Builder> mappersBuilders = newArrayList();
public Builder(String name) {
super(name);
@ -99,7 +102,7 @@ public class ObjectMapper implements XContentMapper, IncludeInAllMapper {
return builder;
}
public T add(XContentMapper.Builder builder) {
public T add(Mapper.Builder builder) {
mappersBuilders.add(builder);
return this.builder;
}
@ -109,9 +112,9 @@ public class ObjectMapper implements XContentMapper, IncludeInAllMapper {
context.path().pathType(pathType);
context.path().add(name);
Map<String, XContentMapper> mappers = new HashMap<String, XContentMapper>();
for (XContentMapper.Builder builder : mappersBuilders) {
XContentMapper mapper = builder.build(context);
Map<String, Mapper> mappers = new HashMap<String, Mapper>();
for (Mapper.Builder builder : mappersBuilders) {
Mapper mapper = builder.build(context);
mappers.put(mapper.name(), mapper);
}
ObjectMapper objectMapper = createMapper(name, enabled, dynamic, pathType, mappers);
@ -124,13 +127,13 @@ public class ObjectMapper implements XContentMapper, IncludeInAllMapper {
return (Y) objectMapper;
}
protected ObjectMapper createMapper(String name, boolean enabled, Dynamic dynamic, ContentPath.Type pathType, Map<String, XContentMapper> mappers) {
protected ObjectMapper createMapper(String name, boolean enabled, Dynamic dynamic, ContentPath.Type pathType, Map<String, Mapper> mappers) {
return new ObjectMapper(name, enabled, dynamic, pathType, mappers);
}
}
public static class TypeParser implements XContentMapper.TypeParser {
@Override public XContentMapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
public static class TypeParser implements Mapper.TypeParser {
@Override public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
Map<String, Object> objectNode = node;
ObjectMapper.Builder builder = createBuilder(name);
@ -185,7 +188,7 @@ public class ObjectMapper implements XContentMapper, IncludeInAllMapper {
}
}
XContentMapper.TypeParser typeParser = parserContext.typeParser(type);
Mapper.TypeParser typeParser = parserContext.typeParser(type);
if (typeParser == null) {
throw new MapperParsingException("No handler for type [" + type + "] declared on field [" + propName + "]");
}
@ -212,7 +215,7 @@ public class ObjectMapper implements XContentMapper, IncludeInAllMapper {
private Boolean includeInAll;
private volatile ImmutableMap<String, XContentMapper> mappers = ImmutableMap.of();
private volatile ImmutableMap<String, Mapper> mappers = ImmutableMap.of();
private final Object mutex = new Object();
@ -225,7 +228,7 @@ public class ObjectMapper implements XContentMapper, IncludeInAllMapper {
this(name, enabled, dynamic, pathType, null);
}
ObjectMapper(String name, boolean enabled, Dynamic dynamic, ContentPath.Type pathType, Map<String, XContentMapper> mappers) {
ObjectMapper(String name, boolean enabled, Dynamic dynamic, ContentPath.Type pathType, Map<String, Mapper> mappers) {
this.name = name;
this.enabled = enabled;
this.dynamic = dynamic;
@ -245,9 +248,9 @@ public class ObjectMapper implements XContentMapper, IncludeInAllMapper {
}
this.includeInAll = includeInAll;
// when called from outside, apply this on all the inner mappers
for (XContentMapper mapper : mappers.values()) {
if (mapper instanceof IncludeInAllMapper) {
((IncludeInAllMapper) mapper).includeInAll(includeInAll);
for (Mapper mapper : mappers.values()) {
if (mapper instanceof AllFieldMapper.IncludeInAll) {
((AllFieldMapper.IncludeInAll) mapper).includeInAll(includeInAll);
}
}
}
@ -257,16 +260,16 @@ public class ObjectMapper implements XContentMapper, IncludeInAllMapper {
this.includeInAll = includeInAll;
}
// when called from outside, apply this on all the inner mappers
for (XContentMapper mapper : mappers.values()) {
if (mapper instanceof IncludeInAllMapper) {
((IncludeInAllMapper) mapper).includeInAllIfNotSet(includeInAll);
for (Mapper mapper : mappers.values()) {
if (mapper instanceof AllFieldMapper.IncludeInAll) {
((AllFieldMapper.IncludeInAll) mapper).includeInAllIfNotSet(includeInAll);
}
}
}
public ObjectMapper putMapper(XContentMapper mapper) {
if (mapper instanceof IncludeInAllMapper) {
((IncludeInAllMapper) mapper).includeInAllIfNotSet(includeInAll);
public ObjectMapper putMapper(Mapper mapper) {
if (mapper instanceof AllFieldMapper.IncludeInAll) {
((AllFieldMapper.IncludeInAll) mapper).includeInAllIfNotSet(includeInAll);
}
synchronized (mutex) {
mappers = newMapBuilder(mappers).put(mapper.name(), mapper).immutableMap();
@ -275,7 +278,7 @@ public class ObjectMapper implements XContentMapper, IncludeInAllMapper {
}
@Override public void traverse(FieldMapperListener fieldMapperListener) {
for (XContentMapper mapper : mappers.values()) {
for (Mapper mapper : mappers.values()) {
mapper.traverse(fieldMapperListener);
}
}
@ -330,7 +333,7 @@ public class ObjectMapper implements XContentMapper, IncludeInAllMapper {
private void serializeNullValue(ParseContext context, String lastFieldName) throws IOException {
// we can only handle null values if we have mappings for them
XContentMapper mapper = mappers.get(lastFieldName);
Mapper mapper = mappers.get(lastFieldName);
if (mapper != null) {
mapper.parse(context);
}
@ -339,7 +342,7 @@ public class ObjectMapper implements XContentMapper, IncludeInAllMapper {
private void serializeObject(final ParseContext context, String currentFieldName) throws IOException {
context.path().add(currentFieldName);
XContentMapper objectMapper = mappers.get(currentFieldName);
Mapper objectMapper = mappers.get(currentFieldName);
if (objectMapper != null) {
objectMapper.parse(context);
} else {
@ -357,9 +360,9 @@ public class ObjectMapper implements XContentMapper, IncludeInAllMapper {
if (objectMapper != null) {
objectMapper.parse(context);
} else {
XContentMapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "object");
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "object");
if (builder == null) {
builder = XContentMapperBuilders.object(currentFieldName).enabled(true).dynamic(dynamic).pathType(pathType);
builder = MapperBuilders.object(currentFieldName).enabled(true).dynamic(dynamic).pathType(pathType);
}
// remove the current field name from path, since the object builder adds it as well...
context.path().remove();
@ -391,7 +394,7 @@ public class ObjectMapper implements XContentMapper, IncludeInAllMapper {
}
private void serializeArray(ParseContext context, String lastFieldName) throws IOException {
XContentMapper mapper = mappers.get(lastFieldName);
Mapper mapper = mappers.get(lastFieldName);
if (mapper != null && mapper instanceof ArrayValueMapperParser) {
mapper.parse(context);
} else {
@ -414,7 +417,7 @@ public class ObjectMapper implements XContentMapper, IncludeInAllMapper {
}
private void serializeValue(final ParseContext context, String currentFieldName, XContentParser.Token token) throws IOException {
XContentMapper mapper = mappers.get(currentFieldName);
Mapper mapper = mappers.get(currentFieldName);
if (mapper != null) {
mapper.parse(context);
return;
@ -449,7 +452,7 @@ public class ObjectMapper implements XContentMapper, IncludeInAllMapper {
for (FormatDateTimeFormatter dateTimeFormatter : context.root().dateTimeFormatters()) {
try {
dateTimeFormatter.parser().parseMillis(text);
XContentMapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "date");
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "date");
if (builder == null) {
builder = dateField(currentFieldName).dateTimeFormatter(dateTimeFormatter);
}
@ -477,7 +480,7 @@ public class ObjectMapper implements XContentMapper, IncludeInAllMapper {
// }
// }
if (!resolved) {
XContentMapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "string");
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "string");
if (builder == null) {
builder = stringField(currentFieldName);
}
@ -487,53 +490,53 @@ public class ObjectMapper implements XContentMapper, IncludeInAllMapper {
XContentParser.NumberType numberType = context.parser().numberType();
if (numberType == XContentParser.NumberType.INT) {
if (context.parser().estimatedNumberType()) {
XContentMapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "long");
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "long");
if (builder == null) {
builder = longField(currentFieldName);
}
mapper = builder.build(builderContext);
} else {
XContentMapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "integer");
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "integer");
if (builder == null) {
builder = integerField(currentFieldName);
}
mapper = builder.build(builderContext);
}
} else if (numberType == XContentParser.NumberType.LONG) {
XContentMapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "long");
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "long");
if (builder == null) {
builder = longField(currentFieldName);
}
mapper = builder.build(builderContext);
} else if (numberType == XContentParser.NumberType.FLOAT) {
if (context.parser().estimatedNumberType()) {
XContentMapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double");
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double");
if (builder == null) {
builder = doubleField(currentFieldName);
}
mapper = builder.build(builderContext);
} else {
XContentMapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "float");
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "float");
if (builder == null) {
builder = floatField(currentFieldName);
}
mapper = builder.build(builderContext);
}
} else if (numberType == XContentParser.NumberType.DOUBLE) {
XContentMapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double");
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double");
if (builder == null) {
builder = doubleField(currentFieldName);
}
mapper = builder.build(builderContext);
}
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
XContentMapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "boolean");
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "boolean");
if (builder == null) {
builder = booleanField(currentFieldName);
}
mapper = builder.build(builderContext);
} else {
XContentMapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, null);
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, null);
if (builder != null) {
mapper = builder.build(builderContext);
} else {
@ -553,7 +556,7 @@ public class ObjectMapper implements XContentMapper, IncludeInAllMapper {
}
}
@Override public void merge(XContentMapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
@Override public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
if (!(mergeWith instanceof ObjectMapper)) {
mergeContext.addConflict("Can't merge a non object mapping [" + mergeWith.name() + "] with an object mapping [" + name() + "]");
return;
@ -563,8 +566,8 @@ public class ObjectMapper implements XContentMapper, IncludeInAllMapper {
doMerge(mergeWithObject, mergeContext);
synchronized (mutex) {
for (XContentMapper mergeWithMapper : mergeWithObject.mappers.values()) {
XContentMapper mergeIntoMapper = mappers.get(mergeWithMapper.name());
for (Mapper mergeWithMapper : mergeWithObject.mappers.values()) {
Mapper mergeIntoMapper = mappers.get(mergeWithMapper.name());
if (mergeIntoMapper == null) {
// no mapping, simply add it if not simulating
if (!mergeContext.mergeFlags().simulate()) {
@ -580,7 +583,7 @@ public class ObjectMapper implements XContentMapper, IncludeInAllMapper {
if (!mergeContext.mergeFlags().simulate()) {
putMapper(mergeWithMultiField);
// now, raise events for all mappers
for (XContentMapper mapper : mergeWithMultiField.mappers().values()) {
for (Mapper mapper : mergeWithMultiField.mappers().values()) {
if (mapper instanceof AbstractFieldMapper) {
mergeContext.docMapper().addFieldMapper((FieldMapper) mapper);
}
@ -599,17 +602,17 @@ public class ObjectMapper implements XContentMapper, IncludeInAllMapper {
}
@Override public void close() {
for (XContentMapper mapper : mappers.values()) {
for (Mapper mapper : mappers.values()) {
mapper.close();
}
}
@Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
toXContent(builder, params, null, XContentMapper.EMPTY_ARRAY);
toXContent(builder, params, null, Mapper.EMPTY_ARRAY);
return builder;
}
public void toXContent(XContentBuilder builder, Params params, ToXContent custom, XContentMapper... additionalMappers) throws IOException {
public void toXContent(XContentBuilder builder, Params params, ToXContent custom, Mapper... additionalMappers) throws IOException {
builder.startObject(name);
if (mappers.isEmpty()) { // only write the object content type if there are no properties, otherwise, it is automatically detected
builder.field("type", CONTENT_TYPE);
@ -642,23 +645,23 @@ public class ObjectMapper implements XContentMapper, IncludeInAllMapper {
doXContent(builder, params);
// sort the mappers so we get consistent serialization format
TreeMap<String, XContentMapper> sortedMappers = new TreeMap<String, XContentMapper>(mappers);
TreeMap<String, Mapper> sortedMappers = new TreeMap<String, Mapper>(mappers);
// check internal mappers first (this is only relevant for root object)
for (XContentMapper mapper : sortedMappers.values()) {
for (Mapper mapper : sortedMappers.values()) {
if (mapper instanceof InternalMapper) {
mapper.toXContent(builder, params);
}
}
if (additionalMappers != null) {
for (XContentMapper mapper : additionalMappers) {
for (Mapper mapper : additionalMappers) {
mapper.toXContent(builder, params);
}
}
if (!mappers.isEmpty()) {
builder.startObject("properties");
for (XContentMapper mapper : sortedMappers.values()) {
for (Mapper mapper : sortedMappers.values()) {
if (!(mapper instanceof InternalMapper)) {
mapper.toXContent(builder, params);
}

View File

@ -17,13 +17,19 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent;
package org.elasticsearch.index.mapper.object;
import org.elasticsearch.common.collect.Lists;
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
import org.elasticsearch.common.joda.Joda;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.core.DateFieldMapper;
import java.io.IOException;
import java.util.Arrays;
@ -31,7 +37,7 @@ import java.util.List;
import java.util.Map;
import static org.elasticsearch.common.collect.Lists.*;
import static org.elasticsearch.index.mapper.xcontent.XContentTypeParsers.*;
import static org.elasticsearch.index.mapper.core.TypeParsers.*;
/**
* @author kimchy (shay.banon)
@ -92,7 +98,7 @@ public class RootObjectMapper extends ObjectMapper {
}
@Override protected ObjectMapper createMapper(String name, boolean enabled, Dynamic dynamic, ContentPath.Type pathType, Map<String, XContentMapper> mappers) {
@Override protected ObjectMapper createMapper(String name, boolean enabled, Dynamic dynamic, ContentPath.Type pathType, Map<String, Mapper> mappers) {
FormatDateTimeFormatter[] dates = null;
if (dateTimeFormatters == null) {
dates = new FormatDateTimeFormatter[0];
@ -162,7 +168,7 @@ public class RootObjectMapper extends ObjectMapper {
private volatile DynamicTemplate dynamicTemplates[];
RootObjectMapper(String name, boolean enabled, Dynamic dynamic, ContentPath.Type pathType, Map<String, XContentMapper> mappers,
RootObjectMapper(String name, boolean enabled, Dynamic dynamic, ContentPath.Type pathType, Map<String, Mapper> mappers,
FormatDateTimeFormatter[] dateTimeFormatters, DynamicTemplate dynamicTemplates[]) {
super(name, enabled, dynamic, pathType, mappers);
this.dynamicTemplates = dynamicTemplates;
@ -173,12 +179,12 @@ public class RootObjectMapper extends ObjectMapper {
return dateTimeFormatters;
}
public XContentMapper.Builder findTemplateBuilder(ParseContext context, String name, String dynamicType) {
public Mapper.Builder findTemplateBuilder(ParseContext context, String name, String dynamicType) {
DynamicTemplate dynamicTemplate = findTemplate(context.path(), name, dynamicType);
if (dynamicTemplate == null) {
return null;
}
XContentMapper.TypeParser.ParserContext parserContext = context.docMapperParser().parserContext();
Mapper.TypeParser.ParserContext parserContext = context.docMapperParser().parserContext();
return parserContext.typeParser(dynamicTemplate.mappingType(dynamicType)).parse(name, dynamicTemplate.mappingForName(name, dynamicType), parserContext);
}
@ -212,7 +218,7 @@ public class RootObjectMapper extends ObjectMapper {
}
}
@Override protected void doXContent(XContentBuilder builder, Params params) throws IOException {
@Override protected void doXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
if (dateTimeFormatters != Defaults.DATE_TIME_FORMATTERS) {
if (dateTimeFormatters.length > 0) {
builder.startArray("date_formats");

View File

@ -17,10 +17,11 @@
* under the License.
*/
package org.elasticsearch.index.mapper;
package org.elasticsearch.index.mapper.selector;
import org.apache.lucene.document.FieldSelector;
import org.apache.lucene.document.FieldSelectorResult;
import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
/**
* An optimized field selector that loads just the uid and the source.

View File

@ -17,10 +17,12 @@
* under the License.
*/
package org.elasticsearch.index.mapper;
package org.elasticsearch.index.mapper.selector;
import org.apache.lucene.document.FieldSelector;
import org.apache.lucene.document.FieldSelectorResult;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.FieldMappers;
import java.util.HashSet;

View File

@ -17,10 +17,12 @@
* under the License.
*/
package org.elasticsearch.index.mapper;
package org.elasticsearch.index.mapper.selector;
import org.apache.lucene.document.FieldSelector;
import org.apache.lucene.document.FieldSelectorResult;
import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
/**
* An optimized field selector that loads just the uid and the source.

View File

@ -17,10 +17,11 @@
* under the License.
*/
package org.elasticsearch.index.mapper;
package org.elasticsearch.index.mapper.selector;
import org.apache.lucene.document.FieldSelector;
import org.apache.lucene.document.FieldSelectorResult;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
/**
* An optimized field selector that loads just the uid.

View File

@ -1,30 +0,0 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent;
/**
* @author kimchy (shay.banon)
*/
public interface IncludeInAllMapper extends XContentMapper {
void includeInAll(Boolean includeInAll);
void includeInAllIfNotSet(Boolean includeInAll);
}

View File

@ -1,601 +0,0 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.search.Filter;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Preconditions;
import org.elasticsearch.common.collect.ImmutableMap;
import org.elasticsearch.common.compress.CompressedString;
import org.elasticsearch.common.compress.lzf.LZF;
import org.elasticsearch.common.io.stream.BytesStreamInput;
import org.elasticsearch.common.io.stream.CachedStreamInput;
import org.elasticsearch.common.io.stream.LZFStreamInput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.*;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.*;
import java.io.IOException;
import java.util.List;
import static org.elasticsearch.common.collect.Lists.*;
/**
* @author kimchy (shay.banon)
*/
public class XContentDocumentMapper implements DocumentMapper, ToXContent {
public static class Builder {
private UidFieldMapper uidFieldMapper = new UidFieldMapper();
private IdFieldMapper idFieldMapper = new IdFieldMapper();
private TypeFieldMapper typeFieldMapper = new TypeFieldMapper();
private IndexFieldMapper indexFieldMapper = new IndexFieldMapper();
private SourceFieldMapper sourceFieldMapper = new SourceFieldMapper();
private SizeFieldMapper sizeFieldMapper = new SizeFieldMapper();
private RoutingFieldMapper routingFieldMapper = new RoutingFieldMapper();
private BoostFieldMapper boostFieldMapper = new BoostFieldMapper();
private AllFieldMapper allFieldMapper = new AllFieldMapper();
private AnalyzerMapper analyzerMapper = new AnalyzerMapper();
private ParentFieldMapper parentFieldMapper = null;
private NamedAnalyzer indexAnalyzer;
private NamedAnalyzer searchAnalyzer;
private final String index;
private final RootObjectMapper rootObjectMapper;
private ImmutableMap<String, Object> meta = ImmutableMap.of();
private XContentMapper.BuilderContext builderContext = new XContentMapper.BuilderContext(new ContentPath(1));
public Builder(String index, @Nullable Settings indexSettings, RootObjectMapper.Builder builder) {
this.index = index;
this.rootObjectMapper = builder.build(builderContext);
if (indexSettings != null) {
String idIndexed = indexSettings.get("index.mapping._id.indexed");
if (idIndexed != null && Booleans.parseBoolean(idIndexed, false)) {
idFieldMapper = new IdFieldMapper(Field.Index.NOT_ANALYZED);
}
}
}
public Builder meta(ImmutableMap<String, Object> meta) {
this.meta = meta;
return this;
}
public Builder sourceField(SourceFieldMapper.Builder builder) {
this.sourceFieldMapper = builder.build(builderContext);
return this;
}
public Builder sizeField(SizeFieldMapper.Builder builder) {
this.sizeFieldMapper = builder.build(builderContext);
return this;
}
public Builder idField(IdFieldMapper.Builder builder) {
this.idFieldMapper = builder.build(builderContext);
return this;
}
public Builder uidField(UidFieldMapper.Builder builder) {
this.uidFieldMapper = builder.build(builderContext);
return this;
}
public Builder typeField(TypeFieldMapper.Builder builder) {
this.typeFieldMapper = builder.build(builderContext);
return this;
}
public Builder indexField(IndexFieldMapper.Builder builder) {
this.indexFieldMapper = builder.build(builderContext);
return this;
}
public Builder routingField(RoutingFieldMapper.Builder builder) {
this.routingFieldMapper = builder.build(builderContext);
return this;
}
public Builder parentFiled(ParentFieldMapper.Builder builder) {
this.parentFieldMapper = builder.build(builderContext);
return this;
}
public Builder boostField(BoostFieldMapper.Builder builder) {
this.boostFieldMapper = builder.build(builderContext);
return this;
}
public Builder allField(AllFieldMapper.Builder builder) {
this.allFieldMapper = builder.build(builderContext);
return this;
}
public Builder analyzerField(AnalyzerMapper.Builder builder) {
this.analyzerMapper = builder.build(builderContext);
return this;
}
public Builder indexAnalyzer(NamedAnalyzer indexAnalyzer) {
this.indexAnalyzer = indexAnalyzer;
return this;
}
public boolean hasIndexAnalyzer() {
return indexAnalyzer != null;
}
public Builder searchAnalyzer(NamedAnalyzer searchAnalyzer) {
this.searchAnalyzer = searchAnalyzer;
return this;
}
public boolean hasSearchAnalyzer() {
return searchAnalyzer != null;
}
public XContentDocumentMapper build(XContentDocumentMapperParser docMapperParser) {
Preconditions.checkNotNull(rootObjectMapper, "Mapper builder must have the root object mapper set");
return new XContentDocumentMapper(index, docMapperParser, rootObjectMapper, meta, uidFieldMapper, idFieldMapper, typeFieldMapper, indexFieldMapper,
sourceFieldMapper, sizeFieldMapper, parentFieldMapper, routingFieldMapper, allFieldMapper, analyzerMapper, indexAnalyzer, searchAnalyzer, boostFieldMapper);
}
}
private ThreadLocal<ParseContext> cache = new ThreadLocal<ParseContext>() {
@Override protected ParseContext initialValue() {
return new ParseContext(index, docMapperParser, XContentDocumentMapper.this, new ContentPath(0));
}
};
private final String index;
private final String type;
private final XContentDocumentMapperParser docMapperParser;
private volatile ImmutableMap<String, Object> meta;
private volatile CompressedString mappingSource;
private final UidFieldMapper uidFieldMapper;
private final IdFieldMapper idFieldMapper;
private final TypeFieldMapper typeFieldMapper;
private final IndexFieldMapper indexFieldMapper;
private final SourceFieldMapper sourceFieldMapper;
private final SizeFieldMapper sizeFieldMapper;
private final RoutingFieldMapper routingFieldMapper;
private final ParentFieldMapper parentFieldMapper;
private final BoostFieldMapper boostFieldMapper;
private final AllFieldMapper allFieldMapper;
private final AnalyzerMapper analyzerMapper;
private final RootObjectMapper rootObjectMapper;
private final NamedAnalyzer indexAnalyzer;
private final NamedAnalyzer searchAnalyzer;
private volatile DocumentFieldMappers fieldMappers;
private final List<FieldMapperListener> fieldMapperListeners = newArrayList();
private final Filter typeFilter;
private final Object mutex = new Object();
public XContentDocumentMapper(String index, XContentDocumentMapperParser docMapperParser,
RootObjectMapper rootObjectMapper,
ImmutableMap<String, Object> meta,
UidFieldMapper uidFieldMapper,
IdFieldMapper idFieldMapper,
TypeFieldMapper typeFieldMapper,
IndexFieldMapper indexFieldMapper,
SourceFieldMapper sourceFieldMapper,
SizeFieldMapper sizeFieldMapper,
@Nullable ParentFieldMapper parentFieldMapper,
RoutingFieldMapper routingFieldMapper,
AllFieldMapper allFieldMapper,
AnalyzerMapper analyzerMapper,
NamedAnalyzer indexAnalyzer, NamedAnalyzer searchAnalyzer,
@Nullable BoostFieldMapper boostFieldMapper) {
this.index = index;
this.type = rootObjectMapper.name();
this.docMapperParser = docMapperParser;
this.meta = meta;
this.rootObjectMapper = rootObjectMapper;
this.uidFieldMapper = uidFieldMapper;
this.idFieldMapper = idFieldMapper;
this.typeFieldMapper = typeFieldMapper;
this.indexFieldMapper = indexFieldMapper;
this.sourceFieldMapper = sourceFieldMapper;
this.sizeFieldMapper = sizeFieldMapper;
this.parentFieldMapper = parentFieldMapper;
this.routingFieldMapper = routingFieldMapper;
this.allFieldMapper = allFieldMapper;
this.analyzerMapper = analyzerMapper;
this.boostFieldMapper = boostFieldMapper;
this.indexAnalyzer = indexAnalyzer;
this.searchAnalyzer = searchAnalyzer;
this.typeFilter = typeMapper().fieldFilter(type);
rootObjectMapper.putMapper(idFieldMapper);
if (boostFieldMapper != null) {
rootObjectMapper.putMapper(boostFieldMapper);
}
if (parentFieldMapper != null) {
rootObjectMapper.putMapper(parentFieldMapper);
// also, mark the routing as required!
routingFieldMapper.markAsRequired();
}
rootObjectMapper.putMapper(routingFieldMapper);
final List<FieldMapper> tempFieldMappers = newArrayList();
// add the basic ones
if (indexFieldMapper.enabled()) {
tempFieldMappers.add(indexFieldMapper);
}
tempFieldMappers.add(typeFieldMapper);
tempFieldMappers.add(sourceFieldMapper);
tempFieldMappers.add(sizeFieldMapper);
tempFieldMappers.add(uidFieldMapper);
tempFieldMappers.add(allFieldMapper);
// now traverse and get all the statically defined ones
rootObjectMapper.traverse(new FieldMapperListener() {
@Override public void fieldMapper(FieldMapper fieldMapper) {
tempFieldMappers.add(fieldMapper);
}
});
this.fieldMappers = new DocumentFieldMappers(this, tempFieldMappers);
refreshSource();
}
@Override public String type() {
return this.type;
}
@Override public ImmutableMap<String, Object> meta() {
return this.meta;
}
@Override public CompressedString mappingSource() {
return this.mappingSource;
}
public RootObjectMapper root() {
return this.rootObjectMapper;
}
@Override public org.elasticsearch.index.mapper.UidFieldMapper uidMapper() {
return this.uidFieldMapper;
}
@Override public org.elasticsearch.index.mapper.IdFieldMapper idMapper() {
return this.idFieldMapper;
}
@Override public org.elasticsearch.index.mapper.IndexFieldMapper indexMapper() {
return this.indexFieldMapper;
}
@Override public org.elasticsearch.index.mapper.TypeFieldMapper typeMapper() {
return this.typeFieldMapper;
}
@Override public org.elasticsearch.index.mapper.SourceFieldMapper sourceMapper() {
return this.sourceFieldMapper;
}
@Override public org.elasticsearch.index.mapper.BoostFieldMapper boostMapper() {
return this.boostFieldMapper;
}
@Override public org.elasticsearch.index.mapper.AllFieldMapper allFieldMapper() {
return this.allFieldMapper;
}
@Override public org.elasticsearch.index.mapper.RoutingFieldMapper routingFieldMapper() {
return this.routingFieldMapper;
}
@Override public org.elasticsearch.index.mapper.ParentFieldMapper parentFieldMapper() {
return this.parentFieldMapper;
}
@Override public Analyzer indexAnalyzer() {
return this.indexAnalyzer;
}
@Override public Analyzer searchAnalyzer() {
return this.searchAnalyzer;
}
@Override public Filter typeFilter() {
return this.typeFilter;
}
@Override public DocumentFieldMappers mappers() {
return this.fieldMappers;
}
@Override public ParsedDocument parse(byte[] source) throws MapperParsingException {
return parse(SourceToParse.source(source));
}
@Override public ParsedDocument parse(String type, String id, byte[] source) throws MapperParsingException {
return parse(SourceToParse.source(source).type(type).id(id));
}
@Override public ParsedDocument parse(SourceToParse source) throws MapperParsingException {
return parse(source, null);
}
@Override public ParsedDocument parse(SourceToParse source, @Nullable ParseListener listener) throws MapperParsingException {
ParseContext context = cache.get();
if (source.type() != null && !source.type().equals(this.type)) {
throw new MapperParsingException("Type mismatch, provide type [" + source.type() + "] but mapper is of type [" + this.type + "]");
}
source.type(this.type);
XContentParser parser = source.parser();
try {
if (parser == null) {
if (LZF.isCompressed(source.source())) {
BytesStreamInput siBytes = new BytesStreamInput(source.source());
LZFStreamInput siLzf = CachedStreamInput.cachedLzf(siBytes);
XContentType contentType = XContentFactory.xContentType(siLzf);
siLzf.resetToBufferStart();
parser = XContentFactory.xContent(contentType).createParser(siLzf);
} else {
parser = XContentFactory.xContent(source.source()).createParser(source.source());
}
}
context.reset(parser, new Document(), type, source.source(), source.flyweight(), listener);
// will result in START_OBJECT
int countDownTokens = 0;
XContentParser.Token token = parser.nextToken();
if (token != XContentParser.Token.START_OBJECT) {
throw new MapperParsingException("Malformed content, must start with an object");
}
token = parser.nextToken();
if (token != XContentParser.Token.FIELD_NAME) {
throw new MapperParsingException("Malformed content, after first object, either the type field or the actual properties should exist");
}
if (parser.currentName().equals(type)) {
// first field is the same as the type, this might be because the type is provided, and the object exists within it
// or because there is a valid field that by chance is named as the type
// Note, in this case, we only handle plain value types, an object type will be analyzed as if it was the type itself
// and other same level fields will be ignored
token = parser.nextToken();
countDownTokens++;
// commented out, allow for same type with START_OBJECT, we do our best to handle it except for the above corner case
// if (token != XContentParser.Token.START_OBJECT) {
// throw new MapperException("Malformed content, a field with the same name as the type must be an object with the properties/fields within it");
// }
}
if (sizeFieldMapper.enabled()) {
context.externalValue(source.source().length);
sizeFieldMapper.parse(context);
}
if (sourceFieldMapper.enabled()) {
sourceFieldMapper.parse(context);
}
// set the id if we have it so we can validate it later on, also, add the uid if we can
if (source.id() != null) {
context.id(source.id());
uidFieldMapper.parse(context);
}
typeFieldMapper.parse(context);
if (source.routing() != null) {
context.externalValue(source.routing());
routingFieldMapper.parse(context);
}
indexFieldMapper.parse(context);
rootObjectMapper.parse(context);
for (int i = 0; i < countDownTokens; i++) {
parser.nextToken();
}
// if we did not get the id, we need to parse the uid into the document now, after it was added
if (source.id() == null) {
if (context.id() == null) {
if (!source.flyweight()) {
throw new MapperParsingException("No id found while parsing the content source");
}
} else {
uidFieldMapper.parse(context);
}
}
if (context.parsedIdState() != ParseContext.ParsedIdState.PARSED) {
if (context.id() == null) {
if (!source.flyweight()) {
throw new MapperParsingException("No id mapping with [_id] found in the content, and not explicitly set");
}
} else {
// mark it as external, so we can parse it
context.parsedId(ParseContext.ParsedIdState.EXTERNAL);
idFieldMapper.parse(context);
}
}
if (parentFieldMapper != null) {
context.externalValue(source.parent());
parentFieldMapper.parse(context);
}
analyzerMapper.parse(context);
allFieldMapper.parse(context);
// validate aggregated mappers (TODO: need to be added as a phase to any field mapper)
routingFieldMapper.validate(context, source.routing());
} catch (IOException e) {
throw new MapperParsingException("Failed to parse", e);
} finally {
// only close the parser when its not provided externally
if (source.parser() == null && parser != null) {
parser.close();
}
}
ParsedDocument doc = new ParsedDocument(context.uid(), context.id(), context.type(), source.routing(), context.doc(), context.analyzer(),
context.source(), context.mappersAdded()).parent(source.parent());
// reset the context to free up memory
context.reset(null, null, null, null, false, null);
return doc;
}
void addFieldMapper(FieldMapper fieldMapper) {
synchronized (mutex) {
fieldMappers = fieldMappers.concat(this, fieldMapper);
for (FieldMapperListener listener : fieldMapperListeners) {
listener.fieldMapper(fieldMapper);
}
}
}
@Override public void addFieldMapperListener(FieldMapperListener fieldMapperListener, boolean includeExisting) {
synchronized (mutex) {
fieldMapperListeners.add(fieldMapperListener);
if (includeExisting) {
if (indexFieldMapper.enabled()) {
fieldMapperListener.fieldMapper(indexFieldMapper);
}
fieldMapperListener.fieldMapper(sourceFieldMapper);
fieldMapperListener.fieldMapper(sizeFieldMapper);
fieldMapperListener.fieldMapper(typeFieldMapper);
fieldMapperListener.fieldMapper(uidFieldMapper);
fieldMapperListener.fieldMapper(allFieldMapper);
rootObjectMapper.traverse(fieldMapperListener);
}
}
}
@Override public synchronized MergeResult merge(DocumentMapper mergeWith, MergeFlags mergeFlags) {
XContentDocumentMapper xContentMergeWith = (XContentDocumentMapper) mergeWith;
MergeContext mergeContext = new MergeContext(this, mergeFlags);
rootObjectMapper.merge(xContentMergeWith.rootObjectMapper, mergeContext);
allFieldMapper.merge(xContentMergeWith.allFieldMapper, mergeContext);
analyzerMapper.merge(xContentMergeWith.analyzerMapper, mergeContext);
sourceFieldMapper.merge(xContentMergeWith.sourceFieldMapper, mergeContext);
sizeFieldMapper.merge(xContentMergeWith.sizeFieldMapper, mergeContext);
if (!mergeFlags.simulate()) {
// let the merge with attributes to override the attributes
meta = mergeWith.meta();
// update the source of the merged one
refreshSource();
}
return new MergeResult(mergeContext.buildConflicts());
}
@Override public void refreshSource() throws FailedToGenerateSourceMapperException {
try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.startObject();
toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.endObject();
this.mappingSource = new CompressedString(builder.string());
} catch (Exception e) {
throw new FailedToGenerateSourceMapperException(e.getMessage(), e);
}
}
@Override public void close() {
cache.remove();
rootObjectMapper.close();
idFieldMapper.close();
indexFieldMapper.close();
typeFieldMapper.close();
allFieldMapper.close();
analyzerMapper.close();
sourceFieldMapper.close();
sizeFieldMapper.close();
}
@Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
rootObjectMapper.toXContent(builder, params, new ToXContent() {
@Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (indexAnalyzer != null && searchAnalyzer != null && indexAnalyzer.name().equals(searchAnalyzer.name()) && !indexAnalyzer.name().startsWith("_")) {
if (!indexAnalyzer.name().equals("default")) {
// same analyzers, output it once
builder.field("analyzer", indexAnalyzer.name());
}
} else {
if (indexAnalyzer != null && !indexAnalyzer.name().startsWith("_")) {
if (!indexAnalyzer.name().equals("default")) {
builder.field("index_analyzer", indexAnalyzer.name());
}
}
if (searchAnalyzer != null && !searchAnalyzer.name().startsWith("_")) {
if (!searchAnalyzer.name().equals("default")) {
builder.field("search_analyzer", searchAnalyzer.name());
}
}
}
if (meta != null && !meta.isEmpty()) {
builder.field("_meta", meta());
}
return builder;
}
// no need to pass here id and boost, since they are added to the root object mapper
// in the constructor
}, indexFieldMapper, typeFieldMapper, allFieldMapper, analyzerMapper, sourceFieldMapper, sizeFieldMapper);
return builder;
}
}

View File

@ -1,356 +0,0 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.ImmutableMap;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.collect.Maps;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldMapper;
import org.elasticsearch.index.mapper.xcontent.ip.IpFieldMapper;
import org.elasticsearch.index.settings.IndexSettings;
import java.io.IOException;
import java.util.Map;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.*;
import static org.elasticsearch.index.mapper.xcontent.XContentMapperBuilders.*;
import static org.elasticsearch.index.mapper.xcontent.XContentTypeParsers.*;
/**
* @author kimchy (shay.banon)
*/
public class XContentDocumentMapperParser extends AbstractIndexComponent implements DocumentMapperParser {
final AnalysisService analysisService;
private final RootObjectMapper.TypeParser rootObjectTypeParser = new RootObjectMapper.TypeParser();
private final Object typeParsersMutex = new Object();
private volatile ImmutableMap<String, XContentMapper.TypeParser> typeParsers;
public XContentDocumentMapperParser(Index index, AnalysisService analysisService) {
this(index, ImmutableSettings.Builder.EMPTY_SETTINGS, analysisService);
}
public XContentDocumentMapperParser(Index index, @IndexSettings Settings indexSettings, AnalysisService analysisService) {
super(index, indexSettings);
this.analysisService = analysisService;
typeParsers = new MapBuilder<String, XContentMapper.TypeParser>()
.put(ByteFieldMapper.CONTENT_TYPE, new ByteFieldMapper.TypeParser())
.put(ShortFieldMapper.CONTENT_TYPE, new ShortFieldMapper.TypeParser())
.put(IntegerFieldMapper.CONTENT_TYPE, new IntegerFieldMapper.TypeParser())
.put(LongFieldMapper.CONTENT_TYPE, new LongFieldMapper.TypeParser())
.put(FloatFieldMapper.CONTENT_TYPE, new FloatFieldMapper.TypeParser())
.put(DoubleFieldMapper.CONTENT_TYPE, new DoubleFieldMapper.TypeParser())
.put(BooleanFieldMapper.CONTENT_TYPE, new BooleanFieldMapper.TypeParser())
.put(BinaryFieldMapper.CONTENT_TYPE, new BinaryFieldMapper.TypeParser())
.put(DateFieldMapper.CONTENT_TYPE, new DateFieldMapper.TypeParser())
.put(IpFieldMapper.CONTENT_TYPE, new IpFieldMapper.TypeParser())
.put(StringFieldMapper.CONTENT_TYPE, new StringFieldMapper.TypeParser())
.put(ObjectMapper.CONTENT_TYPE, new ObjectMapper.TypeParser())
.put(MultiFieldMapper.CONTENT_TYPE, new MultiFieldMapper.TypeParser())
.put(GeoPointFieldMapper.CONTENT_TYPE, new GeoPointFieldMapper.TypeParser())
.immutableMap();
}
public void putTypeParser(String type, XContentMapper.TypeParser typeParser) {
synchronized (typeParsersMutex) {
typeParsers = new MapBuilder<String, XContentMapper.TypeParser>()
.putAll(typeParsers)
.put(type, typeParser)
.immutableMap();
}
}
public XContentMapper.TypeParser.ParserContext parserContext() {
return new XContentMapper.TypeParser.ParserContext(analysisService, typeParsers);
}
@Override public XContentDocumentMapper parse(String source) throws MapperParsingException {
return parse(null, source);
}
@Override public XContentDocumentMapper parse(@Nullable String type, String source) throws MapperParsingException {
return parse(type, source, null);
}
@SuppressWarnings({"unchecked"})
@Override public XContentDocumentMapper parse(@Nullable String type, String source, String defaultSource) throws MapperParsingException {
Map<String, Object> mapping = null;
if (source != null) {
Tuple<String, Map<String, Object>> t = extractMapping(type, source);
type = t.v1();
mapping = t.v2();
}
if (mapping == null) {
mapping = Maps.newHashMap();
}
if (type == null) {
throw new MapperParsingException("Failed to derive type");
}
if (defaultSource != null) {
Tuple<String, Map<String, Object>> t = extractMapping(MapperService.DEFAULT_MAPPING, defaultSource);
if (t.v2() != null) {
XContentHelper.mergeDefaults(mapping, t.v2());
}
}
XContentMapper.TypeParser.ParserContext parserContext = new XContentMapper.TypeParser.ParserContext(analysisService, typeParsers);
XContentDocumentMapper.Builder docBuilder = doc(index.name(), indexSettings, (RootObjectMapper.Builder) rootObjectTypeParser.parse(type, mapping, parserContext));
for (Map.Entry<String, Object> entry : mapping.entrySet()) {
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (SourceFieldMapper.CONTENT_TYPE.equals(fieldName) || "sourceField".equals(fieldName)) {
docBuilder.sourceField(parseSourceField((Map<String, Object>) fieldNode, parserContext));
} else if (SizeFieldMapper.CONTENT_TYPE.equals(fieldName)) {
docBuilder.sizeField(parseSizeField((Map<String, Object>) fieldNode, parserContext));
} else if (IdFieldMapper.CONTENT_TYPE.equals(fieldName) || "idField".equals(fieldName)) {
docBuilder.idField(parseIdField((Map<String, Object>) fieldNode, parserContext));
} else if (IndexFieldMapper.CONTENT_TYPE.equals(fieldName) || "indexField".equals(fieldName)) {
docBuilder.indexField(parseIndexField((Map<String, Object>) fieldNode, parserContext));
} else if (TypeFieldMapper.CONTENT_TYPE.equals(fieldName) || "typeField".equals(fieldName)) {
docBuilder.typeField(parseTypeField((Map<String, Object>) fieldNode, parserContext));
} else if (UidFieldMapper.CONTENT_TYPE.equals(fieldName) || "uidField".equals(fieldName)) {
docBuilder.uidField(parseUidField((Map<String, Object>) fieldNode, parserContext));
} else if (RoutingFieldMapper.CONTENT_TYPE.equals(fieldName)) {
docBuilder.routingField(parseRoutingField((Map<String, Object>) fieldNode, parserContext));
} else if (ParentFieldMapper.CONTENT_TYPE.equals(fieldName)) {
docBuilder.parentFiled(parseParentField((Map<String, Object>) fieldNode, parserContext));
} else if (BoostFieldMapper.CONTENT_TYPE.equals(fieldName) || "boostField".equals(fieldName)) {
docBuilder.boostField(parseBoostField((Map<String, Object>) fieldNode, parserContext));
} else if (AllFieldMapper.CONTENT_TYPE.equals(fieldName) || "allField".equals(fieldName)) {
docBuilder.allField(parseAllField((Map<String, Object>) fieldNode, parserContext));
} else if (AnalyzerMapper.CONTENT_TYPE.equals(fieldName)) {
docBuilder.analyzerField(parseAnalyzerField((Map<String, Object>) fieldNode, parserContext));
} else if ("index_analyzer".equals(fieldName)) {
docBuilder.indexAnalyzer(analysisService.analyzer(fieldNode.toString()));
} else if ("search_analyzer".equals(fieldName)) {
docBuilder.searchAnalyzer(analysisService.analyzer(fieldNode.toString()));
} else if ("analyzer".equals(fieldName)) {
docBuilder.indexAnalyzer(analysisService.analyzer(fieldNode.toString()));
docBuilder.searchAnalyzer(analysisService.analyzer(fieldNode.toString()));
}
}
if (!docBuilder.hasIndexAnalyzer()) {
docBuilder.indexAnalyzer(analysisService.defaultIndexAnalyzer());
}
if (!docBuilder.hasSearchAnalyzer()) {
docBuilder.searchAnalyzer(analysisService.defaultSearchAnalyzer());
}
ImmutableMap<String, Object> attributes = ImmutableMap.of();
if (mapping.containsKey("_meta")) {
attributes = ImmutableMap.copyOf((Map<String, Object>) mapping.get("_meta"));
}
docBuilder.meta(attributes);
XContentDocumentMapper documentMapper = docBuilder.build(this);
// update the source with the generated one
documentMapper.refreshSource();
return documentMapper;
}
private UidFieldMapper.Builder parseUidField(Map<String, Object> uidNode, XContentMapper.TypeParser.ParserContext parserContext) {
UidFieldMapper.Builder builder = uid();
return builder;
}
private BoostFieldMapper.Builder parseBoostField(Map<String, Object> boostNode, XContentMapper.TypeParser.ParserContext parserContext) {
String name = boostNode.get("name") == null ? BoostFieldMapper.Defaults.NAME : boostNode.get("name").toString();
BoostFieldMapper.Builder builder = boost(name);
parseNumberField(builder, name, boostNode, parserContext);
for (Map.Entry<String, Object> entry : boostNode.entrySet()) {
String propName = Strings.toUnderscoreCase(entry.getKey());
Object propNode = entry.getValue();
if (propName.equals("null_value")) {
builder.nullValue(nodeFloatValue(propNode));
}
}
return builder;
}
private TypeFieldMapper.Builder parseTypeField(Map<String, Object> typeNode, XContentMapper.TypeParser.ParserContext parserContext) {
TypeFieldMapper.Builder builder = type();
parseField(builder, builder.name, typeNode, parserContext);
return builder;
}
private IdFieldMapper.Builder parseIdField(Map<String, Object> idNode, XContentMapper.TypeParser.ParserContext parserContext) {
IdFieldMapper.Builder builder = id();
parseField(builder, builder.name, idNode, parserContext);
return builder;
}
// NOTE, we also parse this in MappingMetaData
private RoutingFieldMapper.Builder parseRoutingField(Map<String, Object> routingNode, XContentMapper.TypeParser.ParserContext parserContext) {
RoutingFieldMapper.Builder builder = routing();
parseField(builder, builder.name, routingNode, parserContext);
for (Map.Entry<String, Object> entry : routingNode.entrySet()) {
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("required")) {
builder.required(nodeBooleanValue(fieldNode));
} else if (fieldName.equals("path")) {
builder.path(fieldNode.toString());
}
}
return builder;
}
private ParentFieldMapper.Builder parseParentField(Map<String, Object> parentNode, XContentMapper.TypeParser.ParserContext parserContext) {
ParentFieldMapper.Builder builder = new ParentFieldMapper.Builder();
for (Map.Entry<String, Object> entry : parentNode.entrySet()) {
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("type")) {
builder.type(fieldNode.toString());
}
}
return builder;
}
private AnalyzerMapper.Builder parseAnalyzerField(Map<String, Object> analyzerNode, XContentMapper.TypeParser.ParserContext parserContext) {
AnalyzerMapper.Builder builder = analyzer();
for (Map.Entry<String, Object> entry : analyzerNode.entrySet()) {
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("path")) {
builder.field(fieldNode.toString());
}
}
return builder;
}
private AllFieldMapper.Builder parseAllField(Map<String, Object> allNode, XContentMapper.TypeParser.ParserContext parserContext) {
AllFieldMapper.Builder builder = all();
parseField(builder, builder.name, allNode, parserContext);
for (Map.Entry<String, Object> entry : allNode.entrySet()) {
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("enabled")) {
builder.enabled(nodeBooleanValue(fieldNode));
}
}
return builder;
}
private SizeFieldMapper.Builder parseSizeField(Map<String, Object> node, XContentMapper.TypeParser.ParserContext parserContext) {
SizeFieldMapper.Builder builder = new SizeFieldMapper.Builder();
for (Map.Entry<String, Object> entry : node.entrySet()) {
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("enabled")) {
builder.enabled(nodeBooleanValue(fieldNode));
} else if (fieldName.equals("store")) {
builder.store(parseStore(fieldName, fieldNode.toString()));
}
}
return builder;
}
private SourceFieldMapper.Builder parseSourceField(Map<String, Object> sourceNode, XContentMapper.TypeParser.ParserContext parserContext) {
SourceFieldMapper.Builder builder = source();
for (Map.Entry<String, Object> entry : sourceNode.entrySet()) {
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("enabled")) {
builder.enabled(nodeBooleanValue(fieldNode));
} else if (fieldName.equals("compress") && fieldNode != null) {
builder.compress(nodeBooleanValue(fieldNode));
} else if (fieldName.equals("compress_threshold") && fieldNode != null) {
if (fieldNode instanceof Number) {
builder.compressThreshold(((Number) fieldNode).longValue());
builder.compress(true);
} else {
builder.compressThreshold(ByteSizeValue.parseBytesSizeValue(fieldNode.toString()).bytes());
builder.compress(true);
}
}
}
return builder;
}
private IndexFieldMapper.Builder parseIndexField(Map<String, Object> indexNode, XContentMapper.TypeParser.ParserContext parserContext) {
IndexFieldMapper.Builder builder = XContentMapperBuilders.index();
parseField(builder, builder.name, indexNode, parserContext);
for (Map.Entry<String, Object> entry : indexNode.entrySet()) {
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("enabled")) {
builder.enabled(nodeBooleanValue(fieldNode));
}
}
return builder;
}
@SuppressWarnings({"unchecked"})
private Tuple<String, Map<String, Object>> extractMapping(String type, String source) throws MapperParsingException {
Map<String, Object> root;
XContentParser xContentParser = null;
try {
xContentParser = XContentFactory.xContent(source).createParser(source);
root = xContentParser.map();
} catch (IOException e) {
throw new MapperParsingException("Failed to parse mapping definition", e);
} finally {
if (xContentParser != null) {
xContentParser.close();
}
}
// we always assume the first and single key is the mapping type root
if (root.keySet().size() != 1) {
throw new MapperParsingException("Mapping must have the `type` as the root object");
}
String rootName = root.keySet().iterator().next();
if (type == null) {
type = rootName;
}
return new Tuple<String, Map<String, Object>>(type, (Map<String, Object>) root.get(rootName));
}
}

View File

@ -53,7 +53,7 @@ import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.UidFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;

View File

@ -22,7 +22,11 @@ package org.elasticsearch.index.percolator;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.*;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.DeletionAwareConstantScoreQuery;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
import org.elasticsearch.common.collect.Maps;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.TermFilter;
@ -30,7 +34,11 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.mapper.*;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.mapper.selector.UidAndSourceFieldSelector;
import org.elasticsearch.index.service.IndexService;
import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.index.shard.IndexShardState;

View File

@ -24,7 +24,7 @@ import org.apache.lucene.search.Query;
import org.elasticsearch.common.collect.Lists;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.AllFieldMapper;
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
import java.io.IOException;
import java.util.List;

View File

@ -24,8 +24,8 @@ import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldDataType;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldMapper;
import org.elasticsearch.index.mapper.geo.GeoPointFieldDataType;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.search.geo.GeoBoundingBoxFilter;
import org.elasticsearch.index.search.geo.GeoHashUtils;

View File

@ -25,8 +25,8 @@ import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldDataType;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldMapper;
import org.elasticsearch.index.mapper.geo.GeoPointFieldDataType;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.search.geo.GeoDistance;
import org.elasticsearch.index.search.geo.GeoDistanceFilter;
import org.elasticsearch.index.search.geo.GeoHashUtils;

View File

@ -25,8 +25,8 @@ import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldDataType;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldMapper;
import org.elasticsearch.index.mapper.geo.GeoPointFieldDataType;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.search.geo.GeoDistance;
import org.elasticsearch.index.search.geo.GeoDistanceRangeFilter;
import org.elasticsearch.index.search.geo.GeoHashUtils;

View File

@ -25,8 +25,8 @@ import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldDataType;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldMapper;
import org.elasticsearch.index.mapper.geo.GeoPointFieldDataType;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.search.geo.GeoHashUtils;
import org.elasticsearch.index.search.geo.GeoPolygonFilter;

View File

@ -25,7 +25,7 @@ import org.elasticsearch.common.collect.Sets;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.MoreLikeThisQuery;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.AllFieldMapper;
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
import java.io.IOException;
import java.util.List;

View File

@ -24,7 +24,7 @@ import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.xcontent.NumberFieldMapper;
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
import java.io.IOException;

View File

@ -30,7 +30,7 @@ import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.trove.impl.Constants;
import org.elasticsearch.common.trove.map.hash.TObjectFloatHashMap;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.AllFieldMapper;
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
import java.io.IOException;

View File

@ -25,7 +25,7 @@ import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.TermFilter;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.TypeFieldMapper;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import java.io.IOException;

View File

@ -30,7 +30,7 @@ import org.elasticsearch.common.Unicode;
import org.elasticsearch.common.bloom.BloomFilter;
import org.elasticsearch.index.cache.bloom.BloomCache;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.UidFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import java.io.IOException;
import java.util.ArrayList;

View File

@ -24,8 +24,8 @@ import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.Filter;
import org.elasticsearch.common.lucene.docset.GetDocSet;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldData;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldDataType;
import org.elasticsearch.index.mapper.geo.GeoPointFieldData;
import org.elasticsearch.index.mapper.geo.GeoPointFieldDataType;
import java.io.IOException;

View File

@ -27,8 +27,8 @@ import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldData;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldDataType;
import org.elasticsearch.index.mapper.geo.GeoPointFieldData;
import org.elasticsearch.index.mapper.geo.GeoPointFieldDataType;
import java.io.IOException;

View File

@ -25,8 +25,8 @@ import org.apache.lucene.search.Filter;
import org.elasticsearch.common.lucene.docset.GetDocSet;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldData;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldDataType;
import org.elasticsearch.index.mapper.geo.GeoPointFieldData;
import org.elasticsearch.index.mapper.geo.GeoPointFieldDataType;
import java.io.IOException;

View File

@ -26,8 +26,8 @@ import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.common.lucene.docset.GetDocSet;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldData;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldDataType;
import org.elasticsearch.index.mapper.geo.GeoPointFieldData;
import org.elasticsearch.index.mapper.geo.GeoPointFieldDataType;
import java.io.IOException;

View File

@ -24,8 +24,8 @@ import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.Filter;
import org.elasticsearch.common.lucene.docset.GetDocSet;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldData;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldDataType;
import org.elasticsearch.index.mapper.geo.GeoPointFieldData;
import org.elasticsearch.index.mapper.geo.GeoPointFieldDataType;
import java.io.IOException;

View File

@ -23,8 +23,8 @@ import org.apache.lucene.index.IndexReader;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldData;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldDataType;
import org.elasticsearch.index.mapper.geo.GeoPointFieldData;
import org.elasticsearch.index.mapper.geo.GeoPointFieldDataType;
import org.elasticsearch.index.search.geo.GeoDistance;
import org.elasticsearch.search.facet.AbstractFacetCollector;
import org.elasticsearch.search.facet.Facet;

View File

@ -25,7 +25,7 @@ import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldMapper;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.search.geo.GeoDistance;
import org.elasticsearch.index.search.geo.GeoHashUtils;
import org.elasticsearch.search.facet.Facet;

View File

@ -22,7 +22,7 @@ package org.elasticsearch.search.facet.geodistance;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.Scorer;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldData;
import org.elasticsearch.index.mapper.geo.GeoPointFieldData;
import org.elasticsearch.index.search.geo.GeoDistance;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.internal.SearchContext;

View File

@ -24,7 +24,7 @@ import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.field.data.NumericFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldData;
import org.elasticsearch.index.mapper.geo.GeoPointFieldData;
import org.elasticsearch.index.search.geo.GeoDistance;
import org.elasticsearch.search.facet.FacetPhaseExecutionException;
import org.elasticsearch.search.internal.SearchContext;

View File

@ -28,7 +28,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.xcontent.ip.IpFieldMapper;
import org.elasticsearch.index.mapper.ip.IpFieldMapper;
import org.elasticsearch.search.facet.Facet;
import org.elasticsearch.search.facet.FacetCollector;
import org.elasticsearch.search.facet.FacetProcessor;

View File

@ -28,7 +28,7 @@ import org.elasticsearch.common.trove.iterator.TLongIntIterator;
import org.elasticsearch.common.trove.map.hash.TLongIntHashMap;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.index.mapper.xcontent.ip.IpFieldMapper;
import org.elasticsearch.index.mapper.ip.IpFieldMapper;
import org.elasticsearch.search.facet.Facet;
import org.elasticsearch.search.facet.terms.InternalTermsFacet;
import org.elasticsearch.search.facet.terms.TermsFacet;
@ -187,7 +187,7 @@ public class InternalIpTermsFacet extends InternalTermsFacet {
}
BoundedTreeSet<LongEntry> ordered = new BoundedTreeSet<LongEntry>(first.comparatorType.comparator(), first.requiredSize);
for (TLongIntIterator it = aggregated.iterator(); it.hasNext();) {
for (TLongIntIterator it = aggregated.iterator(); it.hasNext(); ) {
it.advance();
ordered.add(new LongEntry(it.key(), it.value()));
}

View File

@ -26,7 +26,16 @@ import org.apache.lucene.index.IndexReader;
import org.elasticsearch.common.collect.ImmutableMap;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.mapper.*;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.FieldMappers;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.mapper.selector.AllButSourceFieldSelector;
import org.elasticsearch.index.mapper.selector.FieldMappersFieldSelector;
import org.elasticsearch.index.mapper.selector.UidAndSourceFieldSelector;
import org.elasticsearch.index.mapper.selector.UidFieldSelector;
import org.elasticsearch.indices.TypeMissingException;
import org.elasticsearch.search.SearchHitField;
import org.elasticsearch.search.SearchParseElement;

View File

@ -22,7 +22,7 @@ package org.elasticsearch.search.fetch.version;
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.common.collect.ImmutableMap;
import org.elasticsearch.common.lucene.uid.UidField;
import org.elasticsearch.index.mapper.UidFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.fetch.SearchHitPhase;
import org.elasticsearch.search.internal.SearchContext;

View File

@ -31,8 +31,8 @@ import org.elasticsearch.common.io.stream.LZFStreamInput;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.index.mapper.SourceFieldSelector;
import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
import org.elasticsearch.index.mapper.internal.SourceFieldSelector;
import java.util.Collection;
import java.util.List;

View File

@ -22,7 +22,7 @@ package org.elasticsearch.search.sort;
import org.apache.lucene.search.SortField;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldMapper;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.search.geo.GeoDistance;
import org.elasticsearch.index.search.geo.GeoDistanceDataComparator;
import org.elasticsearch.index.search.geo.GeoHashUtils;

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent;
package org.elasticsearch.index.mapper;
import org.elasticsearch.common.inject.Injector;
import org.elasticsearch.common.inject.ModulesBuilder;
@ -27,7 +27,6 @@ import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexNameModule;
import org.elasticsearch.index.analysis.AnalysisModule;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.settings.IndexSettingsModule;
/**
@ -35,8 +34,8 @@ import org.elasticsearch.index.settings.IndexSettingsModule;
*/
public class MapperTests {
public static XContentDocumentMapperParser newParser() {
return new XContentDocumentMapperParser(new Index("test"), newAnalysisService());
public static DocumentMapperParser newParser() {
return new DocumentMapperParser(new Index("test"), newAnalysisService());
}
public static MapperService newMapperService() {

View File

@ -17,14 +17,14 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent.all;
package org.elasticsearch.index.mapper.all;
import org.apache.lucene.document.Document;
import org.elasticsearch.common.lucene.all.AllEntries;
import org.elasticsearch.common.lucene.all.AllField;
import org.elasticsearch.common.lucene.all.AllTokenStream;
import org.elasticsearch.index.mapper.xcontent.MapperTests;
import org.elasticsearch.index.mapper.xcontent.XContentDocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MapperTests;
import org.testng.annotations.Test;
import static org.elasticsearch.common.io.Streams.*;
@ -38,9 +38,9 @@ import static org.hamcrest.Matchers.*;
public class SimpleAllMapperTests {
@Test public void testSimpleAllMappers() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/xcontent/all/mapping.json");
XContentDocumentMapper docMapper = MapperTests.newParser().parse(mapping);
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/xcontent/all/test1.json");
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/mapping.json");
DocumentMapper docMapper = MapperTests.newParser().parse(mapping);
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json");
Document doc = docMapper.parse(json).doc();
AllField field = (AllField) doc.getFieldable("_all");
AllEntries allEntries = ((AllTokenStream) field.tokenStreamValue()).allEntries();
@ -51,13 +51,13 @@ public class SimpleAllMapperTests {
}
@Test public void testSimpleAllMappersWithReparse() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/xcontent/all/mapping.json");
XContentDocumentMapper docMapper = MapperTests.newParser().parse(mapping);
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/mapping.json");
DocumentMapper docMapper = MapperTests.newParser().parse(mapping);
String builtMapping = docMapper.mappingSource().string();
// System.out.println(builtMapping);
// reparse it
XContentDocumentMapper builtDocMapper = MapperTests.newParser().parse(builtMapping);
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/xcontent/all/test1.json");
DocumentMapper builtDocMapper = MapperTests.newParser().parse(builtMapping);
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json");
Document doc = builtDocMapper.parse(json).doc();
AllField field = (AllField) doc.getFieldable("_all");
@ -69,9 +69,9 @@ public class SimpleAllMapperTests {
}
@Test public void testSimpleAllMappersWithStore() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/xcontent/all/store-mapping.json");
XContentDocumentMapper docMapper = MapperTests.newParser().parse(mapping);
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/xcontent/all/test1.json");
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/store-mapping.json");
DocumentMapper docMapper = MapperTests.newParser().parse(mapping);
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json");
Document doc = docMapper.parse(json).doc();
AllField field = (AllField) doc.getFieldable("_all");
AllEntries allEntries = ((AllTokenStream) field.tokenStreamValue()).allEntries();
@ -84,13 +84,13 @@ public class SimpleAllMapperTests {
}
@Test public void testSimpleAllMappersWithReparseWithStore() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/xcontent/all/store-mapping.json");
XContentDocumentMapper docMapper = MapperTests.newParser().parse(mapping);
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/store-mapping.json");
DocumentMapper docMapper = MapperTests.newParser().parse(mapping);
String builtMapping = docMapper.mappingSource().string();
System.out.println(builtMapping);
// reparse it
XContentDocumentMapper builtDocMapper = MapperTests.newParser().parse(builtMapping);
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/xcontent/all/test1.json");
DocumentMapper builtDocMapper = MapperTests.newParser().parse(builtMapping);
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json");
Document doc = builtDocMapper.parse(json).doc();
AllField field = (AllField) doc.getFieldable("_all");

View File

@ -0,0 +1,55 @@
{
"person" : {
"_all" : {
"enabled" : true,
"store" : "yes"
},
"properties" : {
"name" : {
"type" : "object",
"dynamic" : false,
"properties" : {
"first" : {
"type" : "string",
"store" : "yes",
"include_in_all" : false
},
"last" : {
"type" : "string",
"index" : "not_analyzed"
}
}
},
"address" : {
"type" : "object",
"include_in_all" : false,
"properties" : {
"first" : {
"properties" : {
"location" : {
"type" : "string",
"store" : "yes",
"index_name" : "firstLocation"
}
}
},
"last" : {
"properties" : {
"location" : {
"type" : "string"
}
}
}
}
},
"simple1" : {
"type" : "long",
"include_in_all" : true
},
"simple2" : {
"type" : "long",
"include_in_all" : false
}
}
}
}

Some files were not shown because too many files have changed in this diff Show More