mappings: remove fly weight

This commit is contained in:
Martijn van Groningen 2016-01-28 10:15:04 +01:00
parent 9ec1b11148
commit f5e89f7242
11 changed files with 22 additions and 49 deletions

View File

@ -181,11 +181,6 @@ public abstract class ParseContext {
this.in = in;
}
@Override
public boolean flyweight() {
return in.flyweight();
}
@Override
public DocumentMapperParser docMapperParser() {
return in.docMapperParser();
@ -411,11 +406,6 @@ public abstract class ParseContext {
this.dynamicMappingsUpdate = null;
}
@Override
public boolean flyweight() {
return sourceToParse.flyweight();
}
@Override
public DocumentMapperParser docMapperParser() {
return this.docMapperParser;
@ -580,8 +570,6 @@ public abstract class ParseContext {
}
}
public abstract boolean flyweight();
public abstract DocumentMapperParser docMapperParser();
/**
@ -658,6 +646,7 @@ public abstract class ParseContext {
public abstract SourceToParse sourceToParse();
@Nullable
public abstract BytesReference source();
// only should be used by SourceFieldMapper to update with a compressed source

View File

@ -46,8 +46,6 @@ public class SourceToParse {
private final XContentParser parser;
private boolean flyweight = false;
private String index;
private String type;
@ -106,15 +104,6 @@ public class SourceToParse {
return this;
}
public SourceToParse flyweight(boolean flyweight) {
this.flyweight = flyweight;
return this;
}
public boolean flyweight() {
return this.flyweight;
}
public String id() {
return this.id;
}

View File

@ -220,7 +220,7 @@ public class IdFieldMapper extends MetadataFieldMapper {
@Override
public void postParse(ParseContext context) throws IOException {
if (context.id() == null && !context.sourceToParse().flyweight()) {
if (context.id() == null) {
throw new MapperParsingException("No id found while parsing the content source");
}
// it either get built in the preParse phase, or get parsed...

View File

@ -228,9 +228,7 @@ public class ParentFieldMapper extends MetadataFieldMapper {
@Override
public void postParse(ParseContext context) throws IOException {
if (context.sourceToParse().flyweight() == false) {
parse(context);
}
parse(context);
}
@Override

View File

@ -251,10 +251,11 @@ public class SourceFieldMapper extends MetadataFieldMapper {
if (!fieldType().stored()) {
return;
}
if (context.flyweight()) {
BytesReference source = context.source();
// Percolate and tv APIs may not set the source and that is ok, because these APIs will not index any data
if (source == null) {
return;
}
BytesReference source = context.source();
boolean filtered = (includes != null && includes.length > 0) || (excludes != null && excludes.length > 0);
if (filtered) {

View File

@ -212,7 +212,7 @@ public class TTLFieldMapper extends MetadataFieldMapper {
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException, AlreadyExpiredException {
if (enabledState.enabled && !context.sourceToParse().flyweight()) {
if (enabledState.enabled) {
long ttl = context.sourceToParse().ttl();
if (ttl <= 0 && defaultTTL > 0) { // no ttl provided so we use the default value
ttl = defaultTTL;

View File

@ -149,7 +149,7 @@ public class UidFieldMapper extends MetadataFieldMapper {
@Override
public void postParse(ParseContext context) throws IOException {
if (context.id() == null && !context.sourceToParse().flyweight()) {
if (context.id() == null) {
throw new MapperParsingException("No id found while parsing the content source");
}
// if we did not have the id as part of the sourceToParse, then we need to parse it here

View File

@ -126,9 +126,7 @@ public class PercolatorFieldMapper extends FieldMapper {
public Mapper parse(ParseContext context) throws IOException {
QueryShardContext queryShardContext = new QueryShardContext(this.queryShardContext);
Query query = PercolatorQueriesRegistry.parseQuery(queryShardContext, mapUnmappedFieldAsString, context.parser());
if (context.flyweight() == false) {
ExtractQueryTermsService.extractQueryTerms(query, context.doc(), queryTermsField.name(), unknownQueryField.name(), queryTermsField.fieldType());
}
ExtractQueryTermsService.extractQueryTerms(query, context.doc(), queryTermsField.name(), unknownQueryField.name(), queryTermsField.fieldType());
return null;
}

View File

@ -292,7 +292,7 @@ public class TermVectorsService {
private ParsedDocument parseDocument(IndexShard indexShard, String index, String type, BytesReference doc) throws Throwable {
MapperService mapperService = indexShard.mapperService();
DocumentMapperForType docMapper = mapperService.documentMapperWithAutoCreate(type);
ParsedDocument parsedDocument = docMapper.getDocumentMapper().parse(source(doc).index(index).type(type).flyweight(true));
ParsedDocument parsedDocument = docMapper.getDocumentMapper().parse(source(doc).index(index).type(type).id("_id_for_tv_api"));
if (docMapper.getMapping() != null) {
parsedDocument.addDynamicMappingsUpdate(docMapper.getMapping());
}

View File

@ -24,6 +24,7 @@ import org.apache.lucene.search.Query;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.action.percolate.PercolateShardRequest;
import org.elasticsearch.cluster.action.index.MappingUpdatedAction;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
@ -34,6 +35,7 @@ import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.mapper.DocumentMapperForType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.SourceToParse;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.aggregations.AggregationPhase;
@ -93,7 +95,7 @@ public class PercolateDocumentParser {
DocumentMapperForType docMapper = mapperService.documentMapperWithAutoCreate(request.documentType());
String index = context.shardTarget().index();
doc = docMapper.getDocumentMapper().parse(source(parser).index(index).type(request.documentType()).flyweight(true));
doc = docMapper.getDocumentMapper().parse(source(parser).index(index).type(request.documentType()).id("_id_for_percolate_api"));
if (docMapper.getMapping() != null) {
doc.addDynamicMappingsUpdate(docMapper.getMapping());
}
@ -202,19 +204,15 @@ public class PercolateDocumentParser {
}
private ParsedDocument parseFetchedDoc(PercolateContext context, BytesReference fetchedDoc, MapperService mapperService, String index, String type) {
try (XContentParser parser = XContentFactory.xContent(fetchedDoc).createParser(fetchedDoc)) {
DocumentMapperForType docMapper = mapperService.documentMapperWithAutoCreate(type);
ParsedDocument doc = docMapper.getDocumentMapper().parse(source(parser).index(index).type(type).flyweight(true));
if (doc == null) {
throw new ElasticsearchParseException("No doc to percolate in the request");
}
if (context.highlight() != null) {
doc.setSource(fetchedDoc);
}
return doc;
} catch (Throwable e) {
throw new ElasticsearchParseException("failed to parse request", e);
DocumentMapperForType docMapper = mapperService.documentMapperWithAutoCreate(type);
ParsedDocument doc = docMapper.getDocumentMapper().parse(source(fetchedDoc).index(index).type(type).id("_id_for_percolate_api"));
if (doc == null) {
throw new ElasticsearchParseException("No doc to percolate in the request");
}
if (context.highlight() != null) {
doc.setSource(fetchedDoc);
}
return doc;
}
}

View File

@ -150,7 +150,7 @@ public class SizeFieldMapper extends MetadataFieldMapper {
if (!enabledState.enabled) {
return;
}
if (context.flyweight()) {
if (context.source() == null) {
return;
}
fields.add(new IntegerFieldMapper.CustomIntegerNumericField(context.source().length(), fieldType()));