Mapping cleanups. #18180
This removes dead/duplicate code and makes the `_index` field not configurable. (Configuration used to jus be ignored, now we would throw an exception if any is provided.)
This commit is contained in:
parent
ba2fe156e8
commit
5d8f684319
|
@ -169,7 +169,7 @@ public class TransportIndexAction extends TransportReplicationAction<IndexReques
|
|||
*/
|
||||
public static Engine.Index executeIndexRequestOnReplica(IndexRequest request, IndexShard indexShard) {
|
||||
final ShardId shardId = indexShard.shardId();
|
||||
SourceToParse sourceToParse = SourceToParse.source(SourceToParse.Origin.REPLICA, request.source()).index(shardId.getIndexName()).type(request.type()).id(request.id())
|
||||
SourceToParse sourceToParse = SourceToParse.source(SourceToParse.Origin.REPLICA, shardId.getIndexName(), request.type(), request.id(), request.source())
|
||||
.routing(request.routing()).parent(request.parent()).timestamp(request.timestamp()).ttl(request.ttl());
|
||||
|
||||
final Engine.Index operation = indexShard.prepareIndexOnReplica(sourceToParse, request.version(), request.versionType());
|
||||
|
@ -183,7 +183,7 @@ public class TransportIndexAction extends TransportReplicationAction<IndexReques
|
|||
|
||||
/** Utility method to prepare an index operation on primary shards */
|
||||
public static Engine.Index prepareIndexOperationOnPrimary(IndexRequest request, IndexShard indexShard) {
|
||||
SourceToParse sourceToParse = SourceToParse.source(SourceToParse.Origin.PRIMARY, request.source()).index(request.index()).type(request.type()).id(request.id())
|
||||
SourceToParse sourceToParse = SourceToParse.source(SourceToParse.Origin.PRIMARY, request.index(), request.type(), request.id(), request.source())
|
||||
.routing(request.routing()).parent(request.parent()).timestamp(request.timestamp()).ttl(request.ttl());
|
||||
return indexShard.prepareIndexOnPrimary(sourceToParse, request.version(), request.versionType());
|
||||
}
|
||||
|
|
|
@ -42,10 +42,6 @@ public final class ContentPath {
|
|||
public ContentPath(int offset) {
|
||||
this.sb = new StringBuilder();
|
||||
this.offset = offset;
|
||||
reset();
|
||||
}
|
||||
|
||||
public void reset() {
|
||||
this.index = 0;
|
||||
}
|
||||
|
||||
|
|
|
@ -279,7 +279,7 @@ public class DocumentMapper implements ToXContent {
|
|||
}
|
||||
|
||||
public ParsedDocument parse(String index, String type, String id, BytesReference source) throws MapperParsingException {
|
||||
return parse(SourceToParse.source(source).index(index).type(type).id(id));
|
||||
return parse(SourceToParse.source(index, type, id, source));
|
||||
}
|
||||
|
||||
public ParsedDocument parse(SourceToParse source) throws MapperParsingException {
|
||||
|
|
|
@ -50,6 +50,7 @@ import java.util.Collections;
|
|||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
/** A parser for documents, given mappings from a DocumentMapper */
|
||||
final class DocumentParser {
|
||||
|
@ -67,23 +68,16 @@ final class DocumentParser {
|
|||
final ParsedDocument parseDocument(SourceToParse source) throws MapperParsingException {
|
||||
validateType(source);
|
||||
|
||||
source.type(docMapper.type());
|
||||
final Mapping mapping = docMapper.mapping();
|
||||
final ParseContext.InternalParseContext context = new ParseContext.InternalParseContext(indexSettings.getSettings(), docMapperParser, docMapper, new ContentPath(0));
|
||||
XContentParser parser = null;
|
||||
try {
|
||||
parser = parser(source);
|
||||
context.reset(parser, new ParseContext.Document(), source);
|
||||
final ParseContext.InternalParseContext context;
|
||||
try (XContentParser parser = XContentHelper.createParser(source.source())) {
|
||||
context = new ParseContext.InternalParseContext(indexSettings.getSettings(),
|
||||
docMapperParser, docMapper, source, parser);
|
||||
validateStart(parser);
|
||||
internalParseDocument(mapping, context, parser);
|
||||
validateEnd(source, parser);
|
||||
} catch (Throwable t) {
|
||||
throw wrapInMapperParsingException(source, t);
|
||||
} finally {
|
||||
// only close the parser when its not provided externally
|
||||
if (internalParser(source, parser)) {
|
||||
parser.close();
|
||||
}
|
||||
}
|
||||
String remainingPath = context.path().pathAsText("");
|
||||
if (remainingPath.isEmpty() == false) {
|
||||
|
@ -120,19 +114,11 @@ final class DocumentParser {
|
|||
throw new IllegalArgumentException("It is forbidden to index into the default mapping [" + MapperService.DEFAULT_MAPPING + "]");
|
||||
}
|
||||
|
||||
if (source.type() != null && !source.type().equals(docMapper.type())) {
|
||||
if (Objects.equals(source.type(), docMapper.type()) == false) {
|
||||
throw new MapperParsingException("Type mismatch, provide type [" + source.type() + "] but mapper is of type [" + docMapper.type() + "]");
|
||||
}
|
||||
}
|
||||
|
||||
private static XContentParser parser(SourceToParse source) throws IOException {
|
||||
return source.parser() == null ? XContentHelper.createParser(source.source()) : source.parser();
|
||||
}
|
||||
|
||||
private static boolean internalParser(SourceToParse source, XContentParser parser) {
|
||||
return source.parser() == null && parser != null;
|
||||
}
|
||||
|
||||
private static void validateStart(XContentParser parser) throws IOException {
|
||||
// will result in START_OBJECT
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
|
@ -143,13 +129,11 @@ final class DocumentParser {
|
|||
|
||||
private static void validateEnd(SourceToParse source, XContentParser parser) throws IOException {
|
||||
XContentParser.Token token;// only check for end of tokens if we created the parser here
|
||||
if (internalParser(source, parser)) {
|
||||
// try to parse the next token, this should be null if the object is ended properly
|
||||
// but will throw a JSON exception if the extra tokens is not valid JSON (this will be handled by the catch)
|
||||
token = parser.nextToken();
|
||||
if (token != null) {
|
||||
throw new IllegalArgumentException("Malformed content, found extra data after parsing: " + token);
|
||||
}
|
||||
// try to parse the next token, this should be null if the object is ended properly
|
||||
// but will throw a JSON exception if the extra tokens is not valid JSON (this will be handled by the catch)
|
||||
token = parser.nextToken();
|
||||
if (token != null) {
|
||||
throw new IllegalArgumentException("Malformed content, found extra data after parsing: " + token);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -175,15 +159,14 @@ final class DocumentParser {
|
|||
|
||||
private static ParsedDocument parsedDocument(SourceToParse source, ParseContext.InternalParseContext context, Mapping update) {
|
||||
return new ParsedDocument(
|
||||
context.uid(),
|
||||
context.version(),
|
||||
context.id(),
|
||||
context.type(),
|
||||
context.sourceToParse().id(),
|
||||
context.sourceToParse().type(),
|
||||
source.routing(),
|
||||
source.timestamp(),
|
||||
source.ttl(),
|
||||
context.docs(),
|
||||
context.source(),
|
||||
context.sourceToParse().source(),
|
||||
update
|
||||
).parent(source.parent());
|
||||
}
|
||||
|
|
|
@ -22,17 +22,11 @@ package org.elasticsearch.index.mapper;
|
|||
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||
import com.carrotsearch.hppc.ObjectObjectMap;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.LegacyIntField;
|
||||
import org.apache.lucene.document.LegacyLongField;
|
||||
import org.apache.lucene.document.LegacyFloatField;
|
||||
import org.apache.lucene.document.LegacyDoubleField;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.lucene.all.AllEntries;
|
||||
import org.elasticsearch.common.lucene.all.AllField;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.analysis.AnalysisService;
|
||||
|
@ -133,8 +127,6 @@ public abstract class ParseContext {
|
|||
* Returns an array of values of the field specified as the method parameter.
|
||||
* This method returns an empty array when there are no
|
||||
* matching fields. It never returns null.
|
||||
* For {@link org.apache.lucene.document.LegacyIntField}, {@link org.apache.lucene.document.LegacyLongField}, {@link
|
||||
* org.apache.lucene.document.LegacyFloatField} and {@link org.apache.lucene.document.LegacyDoubleField} it returns the string value of the number.
|
||||
* If you want the actual numeric field instances back, use {@link #getFields}.
|
||||
* @param name the name of the field
|
||||
* @return a <code>String[]</code> of field values
|
||||
|
@ -201,36 +193,16 @@ public abstract class ParseContext {
|
|||
return in.isWithinMultiFields();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String index() {
|
||||
return in.index();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Settings indexSettings() {
|
||||
return in.indexSettings();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String type() {
|
||||
return in.type();
|
||||
}
|
||||
|
||||
@Override
|
||||
public SourceToParse sourceToParse() {
|
||||
return in.sourceToParse();
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesReference source() {
|
||||
return in.source();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void source(BytesReference source) {
|
||||
in.source(source);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ContentPath path() {
|
||||
return in.path();
|
||||
|
@ -257,7 +229,7 @@ public abstract class ParseContext {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void addDoc(Document doc) {
|
||||
protected void addDoc(Document doc) {
|
||||
in.addDoc(doc);
|
||||
}
|
||||
|
||||
|
@ -281,26 +253,6 @@ public abstract class ParseContext {
|
|||
return in.mapperService();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String id() {
|
||||
return in.id();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void id(String id) {
|
||||
in.id(id);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Field uid() {
|
||||
return in.uid();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void uid(Field uid) {
|
||||
in.uid(uid);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Field version() {
|
||||
return in.version();
|
||||
|
@ -345,48 +297,37 @@ public abstract class ParseContext {
|
|||
|
||||
private final ContentPath path;
|
||||
|
||||
private XContentParser parser;
|
||||
private final XContentParser parser;
|
||||
|
||||
private Document document;
|
||||
|
||||
private List<Document> documents = new ArrayList<>();
|
||||
private final List<Document> documents;
|
||||
|
||||
@Nullable
|
||||
private final Settings indexSettings;
|
||||
|
||||
private SourceToParse sourceToParse;
|
||||
private BytesReference source;
|
||||
private final SourceToParse sourceToParse;
|
||||
|
||||
private String id;
|
||||
private Field version;
|
||||
|
||||
private Field uid, version;
|
||||
private StringBuilder stringBuilder = new StringBuilder();
|
||||
|
||||
private AllEntries allEntries = new AllEntries();
|
||||
private final AllEntries allEntries;
|
||||
|
||||
private List<Mapper> dynamicMappers = new ArrayList<>();
|
||||
private final List<Mapper> dynamicMappers;
|
||||
|
||||
public InternalParseContext(@Nullable Settings indexSettings, DocumentMapperParser docMapperParser, DocumentMapper docMapper, ContentPath path) {
|
||||
public InternalParseContext(@Nullable Settings indexSettings, DocumentMapperParser docMapperParser, DocumentMapper docMapper,
|
||||
SourceToParse source, XContentParser parser) {
|
||||
this.indexSettings = indexSettings;
|
||||
this.docMapper = docMapper;
|
||||
this.docMapperParser = docMapperParser;
|
||||
this.path = path;
|
||||
}
|
||||
|
||||
public void reset(XContentParser parser, Document document, SourceToParse source) {
|
||||
this.path = new ContentPath(0);
|
||||
this.parser = parser;
|
||||
this.document = document;
|
||||
if (document != null) {
|
||||
this.documents = new ArrayList<>();
|
||||
this.documents.add(document);
|
||||
} else {
|
||||
this.documents = null;
|
||||
}
|
||||
this.uid = null;
|
||||
this.document = new Document();
|
||||
this.documents = new ArrayList<>();
|
||||
this.documents.add(document);
|
||||
this.version = null;
|
||||
this.id = null;
|
||||
this.sourceToParse = source;
|
||||
this.source = source == null ? null : sourceToParse.source();
|
||||
this.path.reset();
|
||||
this.allEntries = new AllEntries();
|
||||
this.dynamicMappers = new ArrayList<>();
|
||||
}
|
||||
|
@ -396,38 +337,17 @@ public abstract class ParseContext {
|
|||
return this.docMapperParser;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String index() {
|
||||
return sourceToParse.index();
|
||||
}
|
||||
|
||||
@Override
|
||||
@Nullable
|
||||
public Settings indexSettings() {
|
||||
return this.indexSettings;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String type() {
|
||||
return sourceToParse.type();
|
||||
}
|
||||
|
||||
@Override
|
||||
public SourceToParse sourceToParse() {
|
||||
return this.sourceToParse;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesReference source() {
|
||||
return source;
|
||||
}
|
||||
|
||||
// only should be used by SourceFieldMapper to update with a compressed source
|
||||
@Override
|
||||
public void source(BytesReference source) {
|
||||
this.source = source;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ContentPath path() {
|
||||
return this.path;
|
||||
|
@ -454,7 +374,7 @@ public abstract class ParseContext {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void addDoc(Document doc) {
|
||||
protected void addDoc(Document doc) {
|
||||
this.documents.add(doc);
|
||||
}
|
||||
|
||||
|
@ -478,32 +398,6 @@ public abstract class ParseContext {
|
|||
return docMapperParser.mapperService;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String id() {
|
||||
return id;
|
||||
}
|
||||
|
||||
/**
|
||||
* Really, just the id mapper should set this.
|
||||
*/
|
||||
@Override
|
||||
public void id(String id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Field uid() {
|
||||
return this.uid;
|
||||
}
|
||||
|
||||
/**
|
||||
* Really, just the uid mapper should set this.
|
||||
*/
|
||||
@Override
|
||||
public void uid(Field uid) {
|
||||
this.uid = uid;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Field version() {
|
||||
return this.version;
|
||||
|
@ -597,21 +491,11 @@ public abstract class ParseContext {
|
|||
return false;
|
||||
}
|
||||
|
||||
public abstract String index();
|
||||
|
||||
@Nullable
|
||||
public abstract Settings indexSettings();
|
||||
|
||||
public abstract String type();
|
||||
|
||||
public abstract SourceToParse sourceToParse();
|
||||
|
||||
@Nullable
|
||||
public abstract BytesReference source();
|
||||
|
||||
// only should be used by SourceFieldMapper to update with a compressed source
|
||||
public abstract void source(BytesReference source);
|
||||
|
||||
public abstract ContentPath path();
|
||||
|
||||
public abstract XContentParser parser();
|
||||
|
@ -622,7 +506,7 @@ public abstract class ParseContext {
|
|||
|
||||
public abstract Document doc();
|
||||
|
||||
public abstract void addDoc(Document doc);
|
||||
protected abstract void addDoc(Document doc);
|
||||
|
||||
public abstract RootObjectMapper root();
|
||||
|
||||
|
@ -632,20 +516,6 @@ public abstract class ParseContext {
|
|||
|
||||
public abstract MapperService mapperService();
|
||||
|
||||
public abstract String id();
|
||||
|
||||
/**
|
||||
* Really, just the id mapper should set this.
|
||||
*/
|
||||
public abstract void id(String id);
|
||||
|
||||
public abstract Field uid();
|
||||
|
||||
/**
|
||||
* Really, just the uid mapper should set this.
|
||||
*/
|
||||
public abstract void uid(Field uid);
|
||||
|
||||
public abstract Field version();
|
||||
|
||||
public abstract void version(Field version);
|
||||
|
|
|
@ -30,11 +30,9 @@ import java.util.List;
|
|||
*/
|
||||
public class ParsedDocument {
|
||||
|
||||
private final Field uid, version;
|
||||
private final Field version;
|
||||
|
||||
private final String id;
|
||||
|
||||
private final String type;
|
||||
private final String uid, id, type;
|
||||
|
||||
private final String routing;
|
||||
|
||||
|
@ -50,11 +48,11 @@ public class ParsedDocument {
|
|||
|
||||
private String parent;
|
||||
|
||||
public ParsedDocument(Field uid, Field version, String id, String type, String routing, long timestamp, long ttl, List<Document> documents, BytesReference source, Mapping dynamicMappingsUpdate) {
|
||||
this.uid = uid;
|
||||
public ParsedDocument(Field version, String id, String type, String routing, long timestamp, long ttl, List<Document> documents, BytesReference source, Mapping dynamicMappingsUpdate) {
|
||||
this.version = version;
|
||||
this.id = id;
|
||||
this.type = type;
|
||||
this.uid = Uid.createUid(type, id);
|
||||
this.routing = routing;
|
||||
this.timestamp = timestamp;
|
||||
this.ttl = ttl;
|
||||
|
@ -62,15 +60,14 @@ public class ParsedDocument {
|
|||
this.source = source;
|
||||
this.dynamicMappingsUpdate = dynamicMappingsUpdate;
|
||||
}
|
||||
|
||||
public Field uid() {
|
||||
return this.uid;
|
||||
}
|
||||
|
||||
public Field version() {
|
||||
return version;
|
||||
}
|
||||
|
||||
public String uid() {
|
||||
return uid;
|
||||
}
|
||||
|
||||
public String id() {
|
||||
return this.id;
|
||||
}
|
||||
|
|
|
@ -19,38 +19,33 @@
|
|||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class SourceToParse {
|
||||
|
||||
public static SourceToParse source(XContentParser parser) {
|
||||
return new SourceToParse(Origin.PRIMARY, parser);
|
||||
public static SourceToParse source(String index, String type, String id, BytesReference source) {
|
||||
return source(Origin.PRIMARY, index, type, id, source);
|
||||
}
|
||||
|
||||
public static SourceToParse source(BytesReference source) {
|
||||
return new SourceToParse(Origin.PRIMARY, source);
|
||||
}
|
||||
|
||||
public static SourceToParse source(Origin origin, BytesReference source) {
|
||||
return new SourceToParse(origin, source);
|
||||
public static SourceToParse source(Origin origin, String index, String type, String id, BytesReference source) {
|
||||
return new SourceToParse(origin, index, type, id, source);
|
||||
}
|
||||
|
||||
private final Origin origin;
|
||||
|
||||
private final BytesReference source;
|
||||
|
||||
private final XContentParser parser;
|
||||
private final String index;
|
||||
|
||||
private String index;
|
||||
private final String type;
|
||||
|
||||
private String type;
|
||||
|
||||
private String id;
|
||||
private final String id;
|
||||
|
||||
private String routing;
|
||||
|
||||
|
@ -60,28 +55,20 @@ public class SourceToParse {
|
|||
|
||||
private long ttl;
|
||||
|
||||
private SourceToParse(Origin origin, XContentParser parser) {
|
||||
this.origin = origin;
|
||||
this.parser = parser;
|
||||
this.source = null;
|
||||
}
|
||||
|
||||
private SourceToParse(Origin origin, BytesReference source) {
|
||||
this.origin = origin;
|
||||
private SourceToParse(Origin origin, String index, String type, String id, BytesReference source) {
|
||||
this.origin = Objects.requireNonNull(origin);
|
||||
this.index = Objects.requireNonNull(index);
|
||||
this.type = Objects.requireNonNull(type);
|
||||
this.id = Objects.requireNonNull(id);
|
||||
// we always convert back to byte array, since we store it and Field only supports bytes..
|
||||
// so, we might as well do it here, and improve the performance of working with direct byte arrays
|
||||
this.source = source.toBytesArray();
|
||||
this.parser = null;
|
||||
}
|
||||
|
||||
public Origin origin() {
|
||||
return origin;
|
||||
}
|
||||
|
||||
public XContentParser parser() {
|
||||
return this.parser;
|
||||
}
|
||||
|
||||
public BytesReference source() {
|
||||
return this.source;
|
||||
}
|
||||
|
@ -90,29 +77,14 @@ public class SourceToParse {
|
|||
return this.index;
|
||||
}
|
||||
|
||||
public SourceToParse index(String index) {
|
||||
this.index = index;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String type() {
|
||||
return this.type;
|
||||
}
|
||||
|
||||
public SourceToParse type(String type) {
|
||||
this.type = type;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String id() {
|
||||
return this.id;
|
||||
}
|
||||
|
||||
public SourceToParse id(String id) {
|
||||
this.id = id;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String parent() {
|
||||
return this.parentId;
|
||||
}
|
||||
|
|
|
@ -95,15 +95,6 @@ public final class Uid {
|
|||
return new Uid(uid.substring(0, delimiterIndex), uid.substring(delimiterIndex + 1));
|
||||
}
|
||||
|
||||
public static BytesRef[] createUids(List<? extends DocumentRequest> requests) {
|
||||
BytesRef[] uids = new BytesRef[requests.size()];
|
||||
int idx = 0;
|
||||
for (DocumentRequest item : requests) {
|
||||
uids[idx++] = createUidAsBytes(item.type(), item.id());
|
||||
}
|
||||
return uids;
|
||||
}
|
||||
|
||||
public static BytesRef createUidAsBytes(String type, String id) {
|
||||
return createUidAsBytes(new BytesRef(type), new BytesRef(id));
|
||||
}
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.index.mapper.internal;
|
||||
|
||||
import org.apache.lucene.document.BinaryDocValuesField;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.Term;
|
||||
|
@ -37,7 +36,6 @@ import org.elasticsearch.common.lucene.Lucene;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.iterable.Iterables;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
|
@ -52,7 +50,9 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
|
||||
/**
|
||||
*
|
||||
* A mapper for the _id field. It does nothing since _id is neither indexed nor
|
||||
* stored, but we need to keep it so that its FieldType can be used to generate
|
||||
* queries.
|
||||
*/
|
||||
public class IdFieldMapper extends MetadataFieldMapper {
|
||||
|
||||
|
@ -84,12 +84,6 @@ public class IdFieldMapper extends MetadataFieldMapper {
|
|||
indexName = Defaults.NAME;
|
||||
}
|
||||
|
||||
// if we are indexed we use DOCS
|
||||
@Override
|
||||
protected IndexOptions getDefaultIndexOption() {
|
||||
return IndexOptions.DOCS;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IdFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
|
@ -203,40 +197,13 @@ public class IdFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void preParse(ParseContext context) throws IOException {
|
||||
if (context.sourceToParse().id() != null) {
|
||||
context.id(context.sourceToParse().id());
|
||||
super.parse(context);
|
||||
}
|
||||
}
|
||||
public void preParse(ParseContext context) throws IOException {}
|
||||
|
||||
@Override
|
||||
public void postParse(ParseContext context) throws IOException {
|
||||
if (context.id() == null) {
|
||||
throw new MapperParsingException("No id found while parsing the content source");
|
||||
}
|
||||
// it either get built in the preParse phase, or get parsed...
|
||||
}
|
||||
public void postParse(ParseContext context) throws IOException {}
|
||||
|
||||
@Override
|
||||
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
|
||||
XContentParser parser = context.parser();
|
||||
if (parser.currentName() != null && parser.currentName().equals(Defaults.NAME) && parser.currentToken().isValue()) {
|
||||
// we are in the parse Phase
|
||||
String id = parser.text();
|
||||
if (context.id() != null && !context.id().equals(id)) {
|
||||
throw new MapperParsingException("Provided id [" + context.id() + "] does not match the content one [" + id + "]");
|
||||
}
|
||||
context.id(id);
|
||||
} // else we are in the pre/post parse phase
|
||||
|
||||
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
|
||||
fields.add(new Field(fieldType().name(), context.id(), fieldType()));
|
||||
}
|
||||
if (fieldType().hasDocValues()) {
|
||||
fields.add(new BinaryDocValuesField(fieldType().name(), new BytesRef(context.id())));
|
||||
}
|
||||
}
|
||||
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {}
|
||||
|
||||
@Override
|
||||
protected String contentType() {
|
||||
|
|
|
@ -19,11 +19,11 @@
|
|||
|
||||
package org.elasticsearch.index.mapper.internal;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
|
@ -58,7 +58,7 @@ public class IndexFieldMapper extends MetadataFieldMapper {
|
|||
public static final MappedFieldType FIELD_TYPE = new IndexFieldType();
|
||||
|
||||
static {
|
||||
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS);
|
||||
FIELD_TYPE.setIndexOptions(IndexOptions.NONE);
|
||||
FIELD_TYPE.setTokenized(false);
|
||||
FIELD_TYPE.setStored(false);
|
||||
FIELD_TYPE.setOmitNorms(true);
|
||||
|
@ -67,35 +67,28 @@ public class IndexFieldMapper extends MetadataFieldMapper {
|
|||
FIELD_TYPE.setName(NAME);
|
||||
FIELD_TYPE.freeze();
|
||||
}
|
||||
|
||||
public static final EnabledAttributeMapper ENABLED_STATE = EnabledAttributeMapper.UNSET_DISABLED;
|
||||
}
|
||||
|
||||
public static class Builder extends MetadataFieldMapper.Builder<Builder, IndexFieldMapper> {
|
||||
|
||||
private EnabledAttributeMapper enabledState = EnabledAttributeMapper.UNSET_DISABLED;
|
||||
|
||||
public Builder(MappedFieldType existing) {
|
||||
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE);
|
||||
indexName = Defaults.NAME;
|
||||
}
|
||||
|
||||
public Builder enabled(EnabledAttributeMapper enabledState) {
|
||||
this.enabledState = enabledState;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
fieldType.setHasDocValues(false);
|
||||
return new IndexFieldMapper(fieldType, enabledState, context.indexSettings());
|
||||
return new IndexFieldMapper(fieldType, context.indexSettings());
|
||||
}
|
||||
}
|
||||
|
||||
public static class TypeParser implements MetadataFieldMapper.TypeParser {
|
||||
@Override
|
||||
public MetadataFieldMapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
public MetadataFieldMapper.Builder<?,?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
if (parserContext.indexVersionCreated().onOrAfter(Version.V_5_0_0)) {
|
||||
throw new MapperParsingException(NAME + " is not configurable");
|
||||
}
|
||||
return new Builder(parserContext.mapperService().fullName(NAME));
|
||||
}
|
||||
|
||||
|
@ -179,43 +172,22 @@ public class IndexFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
private EnabledAttributeMapper enabledState;
|
||||
|
||||
private IndexFieldMapper(Settings indexSettings, MappedFieldType existing) {
|
||||
this(existing == null ? Defaults.FIELD_TYPE.clone() : existing, Defaults.ENABLED_STATE, indexSettings);
|
||||
this(existing == null ? Defaults.FIELD_TYPE.clone() : existing, indexSettings);
|
||||
}
|
||||
|
||||
private IndexFieldMapper(MappedFieldType fieldType, EnabledAttributeMapper enabledState, Settings indexSettings) {
|
||||
private IndexFieldMapper(MappedFieldType fieldType, Settings indexSettings) {
|
||||
super(NAME, fieldType, Defaults.FIELD_TYPE, indexSettings);
|
||||
this.enabledState = enabledState;
|
||||
}
|
||||
|
||||
public boolean enabled() {
|
||||
return this.enabledState.enabled;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void preParse(ParseContext context) throws IOException {
|
||||
// we pre parse it and not in parse, since its not part of the root object
|
||||
super.parse(context);
|
||||
}
|
||||
public void preParse(ParseContext context) throws IOException {}
|
||||
|
||||
@Override
|
||||
public void postParse(ParseContext context) throws IOException {
|
||||
}
|
||||
public void postParse(ParseContext context) throws IOException {}
|
||||
|
||||
@Override
|
||||
public Mapper parse(ParseContext context) throws IOException {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
|
||||
if (!enabledState.enabled) {
|
||||
return;
|
||||
}
|
||||
fields.add(new Field(fieldType().name(), context.index(), fieldType()));
|
||||
}
|
||||
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {}
|
||||
|
||||
@Override
|
||||
protected String contentType() {
|
||||
|
@ -224,26 +196,12 @@ public class IndexFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
|
||||
|
||||
// if all defaults, no need to write it at all
|
||||
if (includeDefaults == false && enabledState == Defaults.ENABLED_STATE) {
|
||||
return builder;
|
||||
}
|
||||
builder.startObject(CONTENT_TYPE);
|
||||
if (includeDefaults || enabledState != Defaults.ENABLED_STATE) {
|
||||
builder.field("enabled", enabledState.enabled);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
|
||||
IndexFieldMapper indexFieldMapperMergeWith = (IndexFieldMapper) mergeWith;
|
||||
if (indexFieldMapperMergeWith.enabledState != enabledState && !indexFieldMapperMergeWith.enabledState.unset()) {
|
||||
this.enabledState = indexFieldMapperMergeWith.enabledState;
|
||||
}
|
||||
// nothing to do
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -238,9 +238,9 @@ public class ParentFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
@Override
|
||||
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
|
||||
boolean parent = context.docMapper().isParent(context.type());
|
||||
boolean parent = context.docMapper().isParent(context.sourceToParse().type());
|
||||
if (parent) {
|
||||
fields.add(new SortedDocValuesField(parentJoinField.fieldType().name(), new BytesRef(context.id())));
|
||||
fields.add(new SortedDocValuesField(parentJoinField.fieldType().name(), new BytesRef(context.sourceToParse().id())));
|
||||
}
|
||||
|
||||
if (!active()) {
|
||||
|
|
|
@ -165,12 +165,10 @@ public class RoutingFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
@Override
|
||||
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
|
||||
if (context.sourceToParse().routing() != null) {
|
||||
String routing = context.sourceToParse().routing();
|
||||
if (routing != null) {
|
||||
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
|
||||
fields.add(new Field(fieldType().name(), routing, fieldType()));
|
||||
}
|
||||
String routing = context.sourceToParse().routing();
|
||||
if (routing != null) {
|
||||
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
|
||||
fields.add(new Field(fieldType().name(), routing, fieldType()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -229,7 +229,7 @@ public class SourceFieldMapper extends MetadataFieldMapper {
|
|||
if (!fieldType().stored()) {
|
||||
return;
|
||||
}
|
||||
BytesReference source = context.source();
|
||||
BytesReference source = context.sourceToParse().source();
|
||||
// Percolate and tv APIs may not set the source and that is ok, because these APIs will not index any data
|
||||
if (source == null) {
|
||||
return;
|
||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.index.mapper.internal;
|
|||
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
|
@ -223,7 +222,8 @@ public class TTLFieldMapper extends MetadataFieldMapper {
|
|||
long now = System.currentTimeMillis();
|
||||
// there is not point indexing already expired doc
|
||||
if (context.sourceToParse().origin() == SourceToParse.Origin.PRIMARY && now >= expire) {
|
||||
throw new AlreadyExpiredException(context.index(), context.type(), context.id(), timestamp, ttl, now);
|
||||
throw new AlreadyExpiredException(context.sourceToParse().index(),
|
||||
context.sourceToParse().type(), context.sourceToParse().id(), timestamp, ttl, now);
|
||||
}
|
||||
// the expiration timestamp (timestamp + ttl) is set as field
|
||||
fields.add(new LegacyLongFieldMapper.CustomLongNumericField(expire, fieldType()));
|
||||
|
|
|
@ -208,9 +208,9 @@ public class TypeFieldMapper extends MetadataFieldMapper {
|
|||
if (fieldType().indexOptions() == IndexOptions.NONE && !fieldType().stored()) {
|
||||
return;
|
||||
}
|
||||
fields.add(new Field(fieldType().name(), context.type(), fieldType()));
|
||||
fields.add(new Field(fieldType().name(), context.sourceToParse().type(), fieldType()));
|
||||
if (fieldType().hasDocValues()) {
|
||||
fields.add(new SortedSetDocValuesField(fieldType().name(), new BytesRef(context.type())));
|
||||
fields.add(new SortedSetDocValuesField(fieldType().name(), new BytesRef(context.sourceToParse().type())));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.index.mapper.internal;
|
|||
import org.apache.lucene.document.BinaryDocValuesField;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -34,7 +33,6 @@ import org.elasticsearch.index.mapper.Mapper;
|
|||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||
import org.elasticsearch.index.mapper.core.TextFieldMapper;
|
||||
import org.elasticsearch.index.mapper.Uid;
|
||||
|
||||
|
@ -138,46 +136,21 @@ public class UidFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
@Override
|
||||
public void preParse(ParseContext context) throws IOException {
|
||||
// if we have the id provided, fill it, and parse now
|
||||
if (context.sourceToParse().id() != null) {
|
||||
context.id(context.sourceToParse().id());
|
||||
super.parse(context);
|
||||
}
|
||||
super.parse(context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void postParse(ParseContext context) throws IOException {
|
||||
if (context.id() == null) {
|
||||
throw new MapperParsingException("No id found while parsing the content source");
|
||||
}
|
||||
// if we did not have the id as part of the sourceToParse, then we need to parse it here
|
||||
// it would have been filled in the _id parse phase
|
||||
if (context.sourceToParse().id() == null) {
|
||||
super.parse(context);
|
||||
// since we did not have the uid in the pre phase, we did not add it automatically to the nested docs
|
||||
// as they were created we need to make sure we add it to all the nested docs...
|
||||
if (context.docs().size() > 1) {
|
||||
final IndexableField uidField = context.rootDoc().getField(UidFieldMapper.NAME);
|
||||
assert uidField != null;
|
||||
// we need to go over the docs and add it...
|
||||
for (int i = 1; i < context.docs().size(); i++) {
|
||||
final Document doc = context.docs().get(i);
|
||||
doc.add(new Field(UidFieldMapper.NAME, uidField.stringValue(), Defaults.NESTED_FIELD_TYPE));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
public void postParse(ParseContext context) throws IOException {}
|
||||
|
||||
@Override
|
||||
public Mapper parse(ParseContext context) throws IOException {
|
||||
// nothing to do here, we either do it in post parse, or in pre parse.
|
||||
// nothing to do here, we do everything in preParse
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
|
||||
Field uid = new Field(NAME, Uid.createUid(context.type(), context.id()), Defaults.FIELD_TYPE);
|
||||
context.uid(uid);
|
||||
Field uid = new Field(NAME, Uid.createUid(context.sourceToParse().type(), context.sourceToParse().id()), Defaults.FIELD_TYPE);
|
||||
fields.add(uid);
|
||||
if (fieldType().hasDocValues()) {
|
||||
fields.add(new BinaryDocValuesField(NAME, new BytesRef(uid.stringValue())));
|
||||
|
|
|
@ -172,7 +172,7 @@ public class PercolatorFieldMapper extends FieldMapper {
|
|||
@Override
|
||||
public Mapper parse(ParseContext context) throws IOException {
|
||||
QueryShardContext queryShardContext = new QueryShardContext(this.queryShardContext);
|
||||
DocumentMapper documentMapper = queryShardContext.getMapperService().documentMapper(context.type());
|
||||
DocumentMapper documentMapper = queryShardContext.getMapperService().documentMapper(context.sourceToParse().type());
|
||||
for (FieldMapper fieldMapper : documentMapper.mappers()) {
|
||||
if (fieldMapper instanceof PercolatorFieldMapper) {
|
||||
PercolatorFieldType fieldType = (PercolatorFieldType) fieldMapper.fieldType();
|
||||
|
|
|
@ -362,10 +362,7 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
|
|||
DocumentMapperForType docMapperForType = mapperService.documentMapperWithAutoCreate(documentType);
|
||||
DocumentMapper docMapper = docMapperForType.getDocumentMapper();
|
||||
|
||||
ParsedDocument doc = docMapper.parse(source(document)
|
||||
.index(context.index().getName())
|
||||
.id("_temp_id")
|
||||
.type(documentType));
|
||||
ParsedDocument doc = docMapper.parse(source(context.index().getName(), documentType, "_temp_id", document));
|
||||
|
||||
FieldNameAnalyzer fieldNameAnalyzer = (FieldNameAnalyzer) docMapper.mappers().indexAnalyzer();
|
||||
// Need to this custom impl because FieldNameAnalyzer is strict and the percolator sometimes isn't when
|
||||
|
|
|
@ -85,7 +85,6 @@ import org.elasticsearch.index.mapper.ParsedDocument;
|
|||
import org.elasticsearch.index.mapper.SourceToParse;
|
||||
import org.elasticsearch.index.mapper.Uid;
|
||||
import org.elasticsearch.index.merge.MergeStats;
|
||||
import org.elasticsearch.index.percolator.PercolatorFieldMapper;
|
||||
import org.elasticsearch.index.recovery.RecoveryStats;
|
||||
import org.elasticsearch.index.refresh.RefreshStats;
|
||||
import org.elasticsearch.index.search.stats.SearchStats;
|
||||
|
@ -478,7 +477,7 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
doc.addDynamicMappingsUpdate(docMapper.getMapping());
|
||||
}
|
||||
MappedFieldType uidFieldType = docMapper.getDocumentMapper().uidMapper().fieldType();
|
||||
Query uidQuery = uidFieldType.termQuery(doc.uid().stringValue(), null);
|
||||
Query uidQuery = uidFieldType.termQuery(doc.uid(), null);
|
||||
Term uid = MappedFieldType.extractTerm(uidQuery);
|
||||
return new Engine.Index(uid, doc, version, versionType, origin, startTime);
|
||||
}
|
||||
|
|
|
@ -151,7 +151,7 @@ public class TranslogRecoveryPerformer {
|
|||
switch (operation.opType()) {
|
||||
case INDEX:
|
||||
Translog.Index index = (Translog.Index) operation;
|
||||
Engine.Index engineIndex = IndexShard.prepareIndex(docMapper(index.type()), source(index.source()).type(index.type()).id(index.id())
|
||||
Engine.Index engineIndex = IndexShard.prepareIndex(docMapper(index.type()), source(shardId.getIndexName(), index.type(), index.id(), index.source())
|
||||
.routing(index.routing()).parent(index.parent()).timestamp(index.timestamp()).ttl(index.ttl()),
|
||||
index.version(), index.versionType().versionTypeForReplicationAndRecovery(), Engine.Operation.Origin.RECOVERY);
|
||||
maybeAddMappingUpdate(engineIndex.type(), engineIndex.parsedDoc().dynamicMappingsUpdate(), engineIndex.id(), allowMappingUpdates);
|
||||
|
|
|
@ -290,7 +290,7 @@ public class TermVectorsService {
|
|||
private static ParsedDocument parseDocument(IndexShard indexShard, String index, String type, BytesReference doc) throws Throwable {
|
||||
MapperService mapperService = indexShard.mapperService();
|
||||
DocumentMapperForType docMapper = mapperService.documentMapperWithAutoCreate(type);
|
||||
ParsedDocument parsedDocument = docMapper.getDocumentMapper().parse(source(doc).index(index).type(type).id("_id_for_tv_api"));
|
||||
ParsedDocument parsedDocument = docMapper.getDocumentMapper().parse(source(index, type, "_id_for_tv_api", doc));
|
||||
if (docMapper.getMapping() != null) {
|
||||
parsedDocument.addDynamicMappingsUpdate(docMapper.getMapping());
|
||||
}
|
||||
|
|
|
@ -19,9 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index;
|
||||
|
||||
import org.apache.lucene.document.Field.Store;
|
||||
import org.apache.lucene.document.LegacyIntField;
|
||||
import org.apache.lucene.document.StringField;
|
||||
import org.apache.lucene.document.NumericDocValuesField;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
@ -41,7 +39,7 @@ import static org.hamcrest.Matchers.startsWith;
|
|||
public class IndexingSlowLogTests extends ESTestCase {
|
||||
public void testSlowLogParsedDocumentPrinterSourceToLog() throws IOException {
|
||||
BytesReference source = JsonXContent.contentBuilder().startObject().field("foo", "bar").endObject().bytes();
|
||||
ParsedDocument pd = new ParsedDocument(new StringField("uid", "test:id", Store.YES), new LegacyIntField("version", 1, Store.YES), "id",
|
||||
ParsedDocument pd = new ParsedDocument(new NumericDocValuesField("version", 1), "id",
|
||||
"test", null, 0, -1, null, source, null);
|
||||
Index index = new Index("foo", "123");
|
||||
// Turning off document logging doesn't log source[]
|
||||
|
|
|
@ -229,7 +229,7 @@ public class InternalEngineTests extends ESTestCase {
|
|||
Field versionField = new NumericDocValuesField("_version", 0);
|
||||
document.add(uidField);
|
||||
document.add(versionField);
|
||||
return new ParsedDocument(uidField, versionField, id, type, routing, timestamp, ttl, Arrays.asList(document), source, mappingUpdate);
|
||||
return new ParsedDocument(versionField, id, type, routing, timestamp, ttl, Arrays.asList(document), source, mappingUpdate);
|
||||
}
|
||||
|
||||
protected Store createStore() throws IOException {
|
||||
|
|
|
@ -172,7 +172,7 @@ public class ShadowEngineTests extends ESTestCase {
|
|||
document.add(uidField);
|
||||
document.add(versionField);
|
||||
document.add(new LongPoint("point_field", 42)); // so that points report memory/disk usage
|
||||
return new ParsedDocument(uidField, versionField, id, type, routing, timestamp, ttl, Arrays.asList(document), source, mappingsUpdate);
|
||||
return new ParsedDocument(versionField, id, type, routing, timestamp, ttl, Arrays.asList(document), source, mappingsUpdate);
|
||||
}
|
||||
|
||||
protected Store createStore(Path p) throws IOException {
|
||||
|
|
|
@ -171,13 +171,12 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
|
|||
|
||||
// creates an object mapper, which is about 100x harder than it should be....
|
||||
ObjectMapper createObjectMapper(MapperService mapperService, String name) throws Exception {
|
||||
String[] nameParts = name.split("\\.");
|
||||
ContentPath path = new ContentPath();
|
||||
for (int i = 0; i < nameParts.length - 1; ++i) {
|
||||
path.add(nameParts[i]);
|
||||
}
|
||||
ParseContext context = new ParseContext.InternalParseContext(Settings.EMPTY,
|
||||
mapperService.documentMapperParser(), mapperService.documentMapper("type"), path);
|
||||
mapperService.documentMapperParser(), mapperService.documentMapper("type"), null, null);
|
||||
String[] nameParts = name.split("\\.");
|
||||
for (int i = 0; i < nameParts.length - 1; ++i) {
|
||||
context.path().add(nameParts[i]);
|
||||
}
|
||||
Mapper.Builder builder = new ObjectMapper.Builder(nameParts[nameParts.length - 1]).enabled(true);
|
||||
Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings(), context.path());
|
||||
return (ObjectMapper)builder.build(builderContext);
|
||||
|
|
|
@ -206,9 +206,8 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
|
|||
|
||||
private Mapper parse(DocumentMapper mapper, DocumentMapperParser parser, XContentBuilder builder) throws Exception {
|
||||
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
|
||||
ParseContext.InternalParseContext ctx = new ParseContext.InternalParseContext(settings, parser, mapper, new ContentPath(0));
|
||||
SourceToParse source = SourceToParse.source(builder.bytes());
|
||||
ctx.reset(XContentHelper.createParser(source.source()), new ParseContext.Document(), source);
|
||||
SourceToParse source = SourceToParse.source("test", mapper.type(), "some_id", builder.bytes());
|
||||
ParseContext.InternalParseContext ctx = new ParseContext.InternalParseContext(settings, parser, mapper, source, XContentHelper.createParser(source.source()));
|
||||
assertEquals(XContentParser.Token.START_OBJECT, ctx.parser().nextToken());
|
||||
ctx.parser().nextToken();
|
||||
DocumentParser.parseObjectOrNested(ctx, mapper.root(), true);
|
||||
|
@ -558,7 +557,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
|
|||
XContentBuilder json = XContentFactory.jsonBuilder().startObject()
|
||||
.field("field", "foo")
|
||||
.endObject();
|
||||
SourceToParse source = SourceToParse.source(json.bytes()).id("1");
|
||||
SourceToParse source = SourceToParse.source("test", "type1", "1", json.bytes());
|
||||
DocumentMapper mapper = indexService.mapperService().documentMapper("type1");
|
||||
assertNull(mapper.mappers().getMapper("field.raw"));
|
||||
ParsedDocument parsed = mapper.parse(source);
|
||||
|
|
|
@ -19,23 +19,16 @@
|
|||
|
||||
package org.elasticsearch.index.mapper.id;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.mapper.SourceToParse;
|
||||
import org.elasticsearch.index.mapper.Uid;
|
||||
import org.elasticsearch.index.mapper.internal.IdFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
|
@ -53,16 +46,6 @@ public class IdMappingTests extends ESSingleNodeTestCase {
|
|||
|
||||
assertThat(doc.rootDoc().get(UidFieldMapper.NAME), notNullValue());
|
||||
assertThat(doc.rootDoc().get(IdFieldMapper.NAME), nullValue());
|
||||
|
||||
try {
|
||||
docMapper.parse("test", "type", null, XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.endObject()
|
||||
.bytes());
|
||||
fail("expect missing id");
|
||||
} catch (MapperParsingException e) {
|
||||
assertTrue(e.getMessage().contains("No id found"));
|
||||
}
|
||||
}
|
||||
|
||||
public void testIncludeInObjectNotAllowed() throws Exception {
|
||||
|
@ -70,8 +53,8 @@ public class IdMappingTests extends ESSingleNodeTestCase {
|
|||
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
|
||||
try {
|
||||
docMapper.parse(SourceToParse.source(XContentFactory.jsonBuilder()
|
||||
.startObject().field("_id", "1").endObject().bytes()).type("type"));
|
||||
docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject().field("_id", "1").endObject().bytes()));
|
||||
fail("Expected failure to parse metadata field");
|
||||
} catch (MapperParsingException e) {
|
||||
assertTrue(e.getMessage(), e.getMessage().contains("Field [_id] is a metadata field and cannot be added inside a document"));
|
||||
|
|
|
@ -19,24 +19,36 @@
|
|||
|
||||
package org.elasticsearch.index.mapper.index;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.mapper.internal.IndexFieldMapper;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
|
||||
public class IndexTypeMapperTests extends ESSingleNodeTestCase {
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> getPlugins() {
|
||||
return pluginList(InternalSettingsPlugin.class);
|
||||
}
|
||||
|
||||
public void testDefaultDisabledIndexMapper() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.endObject().endObject().string();
|
||||
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
IndexFieldMapper indexMapper = docMapper.metadataMapper(IndexFieldMapper.class);
|
||||
assertThat(indexMapper.enabled(), equalTo(false));
|
||||
|
||||
ParsedDocument doc = docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
|
@ -47,4 +59,24 @@ public class IndexTypeMapperTests extends ESSingleNodeTestCase {
|
|||
assertThat(doc.rootDoc().get("_index"), nullValue());
|
||||
assertThat(doc.rootDoc().get("field"), equalTo("value"));
|
||||
}
|
||||
|
||||
public void testIndexNotConfigurable() throws IOException {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_index").endObject()
|
||||
.endObject().endObject().string();
|
||||
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
||||
MapperParsingException e = expectThrows(MapperParsingException.class,
|
||||
() -> parser.parse("type", new CompressedXContent(mapping)));
|
||||
assertEquals("_index is not configurable", e.getMessage());
|
||||
}
|
||||
|
||||
public void testBwCompatIndexNotConfigurable() throws IOException {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_index").endObject()
|
||||
.endObject().endObject().string();
|
||||
DocumentMapperParser parser = createIndex("test",
|
||||
Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0).build())
|
||||
.mapperService().documentMapperParser();
|
||||
parser.parse("type", new CompressedXContent(mapping)); // no exception
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,7 +24,6 @@ import org.elasticsearch.index.mapper.DocumentMapper;
|
|||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.mapper.SourceToParse;
|
||||
import org.elasticsearch.index.mapper.Uid;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
|
||||
public class ParentMappingTests extends ESSingleNodeTestCase {
|
||||
|
@ -35,8 +34,8 @@ public class ParentMappingTests extends ESSingleNodeTestCase {
|
|||
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
|
||||
try {
|
||||
docMapper.parse(SourceToParse.source(XContentFactory.jsonBuilder()
|
||||
.startObject().field("_parent", "1122").endObject().bytes()).type("type").id("1"));
|
||||
docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject().field("_parent", "1122").endObject().bytes()));
|
||||
fail("Expected failure to parse metadata field");
|
||||
} catch (MapperParsingException e) {
|
||||
assertTrue(e.getMessage(), e.getMessage().contains("Field [_parent] is a metadata field and cannot be added inside a document"));
|
||||
|
@ -49,11 +48,11 @@ public class ParentMappingTests extends ESSingleNodeTestCase {
|
|||
.endObject().endObject().string();
|
||||
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
|
||||
ParsedDocument doc = docMapper.parse(SourceToParse.source(XContentFactory.jsonBuilder()
|
||||
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("x_field", "x_value")
|
||||
.endObject()
|
||||
.bytes()).type("type").id("1").parent("1122"));
|
||||
.bytes()).parent("1122"));
|
||||
|
||||
assertEquals("1122", doc.rootDoc().getBinaryValue("_parent#p_type").utf8ToString());
|
||||
}
|
||||
|
|
|
@ -36,11 +36,11 @@ public class RoutingTypeMapperTests extends ESSingleNodeTestCase {
|
|||
.endObject().endObject().string();
|
||||
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
|
||||
ParsedDocument doc = docMapper.parse(SourceToParse.source(XContentFactory.jsonBuilder()
|
||||
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("field", "value")
|
||||
.endObject()
|
||||
.bytes()).type("type").id("1").routing("routing_value"));
|
||||
.bytes()).routing("routing_value"));
|
||||
|
||||
assertThat(doc.rootDoc().get("_routing"), equalTo("routing_value"));
|
||||
assertThat(doc.rootDoc().get("field"), equalTo("value"));
|
||||
|
|
|
@ -76,7 +76,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
|
|||
.field("field", "value")
|
||||
.endObject()
|
||||
.bytes();
|
||||
ParsedDocument doc = docMapper.parse(SourceToParse.source(source).type("type").id("1").timestamp(1));
|
||||
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source).timestamp(1));
|
||||
|
||||
assertThat(doc.rootDoc().getField("_timestamp"), equalTo(null));
|
||||
}
|
||||
|
@ -91,7 +91,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
|
|||
.field("field", "value")
|
||||
.endObject()
|
||||
.bytes();
|
||||
ParsedDocument doc = docMapper.parse(SourceToParse.source(source).type("type").id("1").timestamp(1));
|
||||
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source).timestamp(1));
|
||||
|
||||
assertThat(doc.rootDoc().getField("_timestamp").fieldType().stored(), equalTo(true));
|
||||
assertNotSame(IndexOptions.NONE, doc.rootDoc().getField("_timestamp").fieldType().indexOptions());
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
package org.elasticsearch.index.mapper.ttl;
|
||||
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
|
@ -52,7 +51,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
|
|||
.field("field", "value")
|
||||
.endObject()
|
||||
.bytes();
|
||||
ParsedDocument doc = docMapper.parse(SourceToParse.source(source).type("type").id("1").ttl(Long.MAX_VALUE));
|
||||
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source).ttl(Long.MAX_VALUE));
|
||||
|
||||
assertThat(doc.rootDoc().getField("_ttl"), equalTo(null));
|
||||
}
|
||||
|
@ -67,7 +66,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
|
|||
.field("field", "value")
|
||||
.endObject()
|
||||
.bytes();
|
||||
ParsedDocument doc = docMapper.parse(SourceToParse.source(source).type("type").id("1").ttl(Long.MAX_VALUE));
|
||||
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source).ttl(Long.MAX_VALUE));
|
||||
|
||||
assertThat(doc.rootDoc().getField("_ttl").fieldType().stored(), equalTo(true));
|
||||
assertNotSame(IndexOptions.NONE, doc.rootDoc().getField("_ttl").fieldType().indexOptions());
|
||||
|
|
|
@ -674,7 +674,7 @@ public class IndexShardTests extends ESSingleNodeTestCase {
|
|||
Field versionField = new NumericDocValuesField("_version", 0);
|
||||
document.add(uidField);
|
||||
document.add(versionField);
|
||||
return new ParsedDocument(uidField, versionField, id, type, routing, timestamp, ttl, Arrays.asList(document), source, mappingUpdate);
|
||||
return new ParsedDocument(versionField, id, type, routing, timestamp, ttl, Arrays.asList(document), source, mappingUpdate);
|
||||
}
|
||||
|
||||
public void testIndexingOperationsListeners() throws IOException {
|
||||
|
|
|
@ -156,10 +156,7 @@ public class SizeFieldMapper extends MetadataFieldMapper {
|
|||
if (!enabledState.enabled) {
|
||||
return;
|
||||
}
|
||||
if (context.source() == null) {
|
||||
return;
|
||||
}
|
||||
final int value = context.source().length();
|
||||
final int value = context.sourceToParse().source().length();
|
||||
if (Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha2)) {
|
||||
fields.add(new LegacyIntegerFieldMapper.CustomIntegerNumericField(value, fieldType()));
|
||||
} else {
|
||||
|
|
|
@ -29,14 +29,11 @@ import org.elasticsearch.index.mapper.MapperService;
|
|||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.mapper.SourceToParse;
|
||||
import org.elasticsearch.indices.IndicesModule;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.junit.Before;
|
||||
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
|
@ -67,7 +64,7 @@ public class SizeMappingTests extends ESSingleNodeTestCase {
|
|||
.field("field", "value")
|
||||
.endObject()
|
||||
.bytes();
|
||||
ParsedDocument doc = docMapper.parse(SourceToParse.source(source).type("type").id("1"));
|
||||
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source));
|
||||
|
||||
boolean stored = false;
|
||||
boolean points = false;
|
||||
|
@ -90,7 +87,7 @@ public class SizeMappingTests extends ESSingleNodeTestCase {
|
|||
.field("field", "value")
|
||||
.endObject()
|
||||
.bytes();
|
||||
ParsedDocument doc = docMapper.parse(SourceToParse.source(source).type("type").id("1"));
|
||||
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source));
|
||||
|
||||
assertThat(doc.rootDoc().getField("_size"), nullValue());
|
||||
}
|
||||
|
@ -105,7 +102,7 @@ public class SizeMappingTests extends ESSingleNodeTestCase {
|
|||
.field("field", "value")
|
||||
.endObject()
|
||||
.bytes();
|
||||
ParsedDocument doc = docMapper.parse(SourceToParse.source(source).type("type").id("1"));
|
||||
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source));
|
||||
|
||||
assertThat(doc.rootDoc().getField("_size"), nullValue());
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue