Merge pull request #11243 from rjernst/remove/type-listener

Mappings: Remove document parse listener
This commit is contained in:
Ryan Ernst 2015-05-20 00:00:57 -07:00
commit eaf35c4e4a
6 changed files with 8 additions and 56 deletions

View File

@ -82,28 +82,6 @@ import java.util.concurrent.CopyOnWriteArrayList;
*/ */
public class DocumentMapper implements ToXContent { public class DocumentMapper implements ToXContent {
/**
* A listener to be called during the parse process.
*/
public static interface ParseListener<ParseContext> {
public static final ParseListener EMPTY = new ParseListenerAdapter();
/**
* Called before a field is added to the document. Return <tt>true</tt> to include
* it in the document.
*/
boolean beforeFieldAdded(FieldMapper fieldMapper, Field fieldable, ParseContext parseContent);
}
public static class ParseListenerAdapter implements ParseListener {
@Override
public boolean beforeFieldAdded(FieldMapper fieldMapper, Field fieldable, Object parseContext) {
return true;
}
}
public static class Builder { public static class Builder {
private Map<Class<? extends RootMapper>, RootMapper> rootMappers = new LinkedHashMap<>(); private Map<Class<? extends RootMapper>, RootMapper> rootMappers = new LinkedHashMap<>();
@ -341,13 +319,7 @@ public class DocumentMapper implements ToXContent {
} }
public ParsedDocument parse(SourceToParse source) throws MapperParsingException { public ParsedDocument parse(SourceToParse source) throws MapperParsingException {
return documentParser.parseDocument(source, null); return documentParser.parseDocument(source);
}
// NOTE: do not use this method, it will be removed in the future once
// https://github.com/elastic/elasticsearch/issues/10736 is done (MLT api is the only user of this listener)
public ParsedDocument parse(SourceToParse source, @Nullable ParseListener listener) throws MapperParsingException {
return documentParser.parseDocument(source, listener);
} }
/** /**

View File

@ -67,7 +67,7 @@ class DocumentParser implements Closeable {
this.docMapper = docMapper; this.docMapper = docMapper;
} }
public ParsedDocument parseDocument(SourceToParse source, @Nullable DocumentMapper.ParseListener listener) throws MapperParsingException { public ParsedDocument parseDocument(SourceToParse source) throws MapperParsingException {
ParseContext.InternalParseContext context = cache.get(); ParseContext.InternalParseContext context = cache.get();
final Mapping mapping = docMapper.mapping(); final Mapping mapping = docMapper.mapping();
@ -84,7 +84,7 @@ class DocumentParser implements Closeable {
if (mapping.sourceTransforms.length > 0) { if (mapping.sourceTransforms.length > 0) {
parser = transform(mapping, parser); parser = transform(mapping, parser);
} }
context.reset(parser, new ParseContext.Document(), source, listener); context.reset(parser, new ParseContext.Document(), source);
// will result in START_OBJECT // will result in START_OBJECT
int countDownTokens = 0; int countDownTokens = 0;
@ -166,7 +166,7 @@ class DocumentParser implements Closeable {
ParsedDocument doc = new ParsedDocument(context.uid(), context.version(), context.id(), context.type(), source.routing(), source.timestamp(), source.ttl(), context.docs(), ParsedDocument doc = new ParsedDocument(context.uid(), context.version(), context.id(), context.type(), source.routing(), source.timestamp(), source.ttl(), context.docs(),
context.source(), update).parent(source.parent()); context.source(), update).parent(source.parent());
// reset the context to free up memory // reset the context to free up memory
context.reset(null, null, null, null); context.reset(null, null, null);
return doc; return doc;
} }

View File

@ -33,7 +33,6 @@ import org.elasticsearch.common.lucene.all.AllEntries;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.mapper.DocumentMapper.ParseListener;
import org.elasticsearch.index.mapper.object.RootObjectMapper; import org.elasticsearch.index.mapper.object.RootObjectMapper;
import java.util.ArrayList; import java.util.ArrayList;
@ -246,11 +245,6 @@ public abstract class ParseContext {
return in.parser(); return in.parser();
} }
@Override
public ParseListener listener() {
return in.listener();
}
@Override @Override
public Document rootDoc() { public Document rootDoc() {
return in.rootDoc(); return in.rootDoc();
@ -391,8 +385,6 @@ public abstract class ParseContext {
private String id; private String id;
private DocumentMapper.ParseListener listener;
private Field uid, version; private Field uid, version;
private StringBuilder stringBuilder = new StringBuilder(); private StringBuilder stringBuilder = new StringBuilder();
@ -413,7 +405,7 @@ public abstract class ParseContext {
this.path = path; this.path = path;
} }
public void reset(XContentParser parser, Document document, SourceToParse source, DocumentMapper.ParseListener listener) { public void reset(XContentParser parser, Document document, SourceToParse source) {
this.parser = parser; this.parser = parser;
this.document = document; this.document = document;
if (document != null) { if (document != null) {
@ -428,7 +420,6 @@ public abstract class ParseContext {
this.sourceToParse = source; this.sourceToParse = source;
this.source = source == null ? null : sourceToParse.source(); this.source = source == null ? null : sourceToParse.source();
this.path.reset(); this.path.reset();
this.listener = listener == null ? DocumentMapper.ParseListener.EMPTY : listener;
this.allEntries = new AllEntries(); this.allEntries = new AllEntries();
this.ignoredValues.clear(); this.ignoredValues.clear();
this.docBoost = 1.0f; this.docBoost = 1.0f;
@ -487,11 +478,6 @@ public abstract class ParseContext {
return this.parser; return this.parser;
} }
@Override
public DocumentMapper.ParseListener listener() {
return this.listener;
}
@Override @Override
public Document rootDoc() { public Document rootDoc() {
return documents.get(0); return documents.get(0);
@ -701,8 +687,6 @@ public abstract class ParseContext {
public abstract XContentParser parser(); public abstract XContentParser parser();
public abstract DocumentMapper.ParseListener listener();
public abstract Document rootDoc(); public abstract Document rootDoc();
public abstract List<Document> docs(); public abstract List<Document> docs();

View File

@ -425,9 +425,7 @@ public abstract class AbstractFieldMapper<T> implements FieldMapper<T> {
if (!customBoost()) { if (!customBoost()) {
field.setBoost(boost); field.setBoost(boost);
} }
if (context.listener().beforeFieldAdded(this, field, context)) { context.doc().add(field);
context.doc().add(field);
}
} }
} catch (Exception e) { } catch (Exception e) {
throw new MapperParsingException("failed to parse [" + names.fullName() + "]", e); throw new MapperParsingException("failed to parse [" + names.fullName() + "]", e);

View File

@ -269,9 +269,7 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper<String> {
if (!customBoost()) { if (!customBoost()) {
field.setBoost(boost); field.setBoost(boost);
} }
if (context.listener().beforeFieldAdded(this, field, context)) { context.doc().add(field);
context.doc().add(field);
}
} }
} catch (Exception e) { } catch (Exception e) {
throw new MapperParsingException("failed to parse [" + names.fullName() + "]", e); throw new MapperParsingException("failed to parse [" + names.fullName() + "]", e);

View File

@ -198,7 +198,7 @@ public class DynamicMappingTests extends ElasticsearchSingleNodeTest {
Settings settings = ImmutableSettings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); Settings settings = ImmutableSettings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
ParseContext.InternalParseContext ctx = new ParseContext.InternalParseContext("test", settings, parser, mapper, new ContentPath(0)); ParseContext.InternalParseContext ctx = new ParseContext.InternalParseContext("test", settings, parser, mapper, new ContentPath(0));
SourceToParse source = SourceToParse.source(builder.bytes()); SourceToParse source = SourceToParse.source(builder.bytes());
ctx.reset(XContentHelper.createParser(source.source()), new ParseContext.Document(), source, null); ctx.reset(XContentHelper.createParser(source.source()), new ParseContext.Document(), source);
assertEquals(XContentParser.Token.START_OBJECT, ctx.parser().nextToken()); assertEquals(XContentParser.Token.START_OBJECT, ctx.parser().nextToken());
ctx.parser().nextToken(); ctx.parser().nextToken();
return DocumentParser.parseObject(ctx, mapper.root()); return DocumentParser.parseObject(ctx, mapper.root());