Merge pull request #11243 from rjernst/remove/type-listener
Mappings: Remove document parse listener
This commit is contained in:
commit
eaf35c4e4a
|
@ -82,28 +82,6 @@ import java.util.concurrent.CopyOnWriteArrayList;
|
|||
*/
|
||||
public class DocumentMapper implements ToXContent {
|
||||
|
||||
/**
|
||||
* A listener to be called during the parse process.
|
||||
*/
|
||||
public static interface ParseListener<ParseContext> {
|
||||
|
||||
public static final ParseListener EMPTY = new ParseListenerAdapter();
|
||||
|
||||
/**
|
||||
* Called before a field is added to the document. Return <tt>true</tt> to include
|
||||
* it in the document.
|
||||
*/
|
||||
boolean beforeFieldAdded(FieldMapper fieldMapper, Field fieldable, ParseContext parseContent);
|
||||
}
|
||||
|
||||
public static class ParseListenerAdapter implements ParseListener {
|
||||
|
||||
@Override
|
||||
public boolean beforeFieldAdded(FieldMapper fieldMapper, Field fieldable, Object parseContext) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
public static class Builder {
|
||||
|
||||
private Map<Class<? extends RootMapper>, RootMapper> rootMappers = new LinkedHashMap<>();
|
||||
|
@ -341,13 +319,7 @@ public class DocumentMapper implements ToXContent {
|
|||
}
|
||||
|
||||
public ParsedDocument parse(SourceToParse source) throws MapperParsingException {
|
||||
return documentParser.parseDocument(source, null);
|
||||
}
|
||||
|
||||
// NOTE: do not use this method, it will be removed in the future once
|
||||
// https://github.com/elastic/elasticsearch/issues/10736 is done (MLT api is the only user of this listener)
|
||||
public ParsedDocument parse(SourceToParse source, @Nullable ParseListener listener) throws MapperParsingException {
|
||||
return documentParser.parseDocument(source, listener);
|
||||
return documentParser.parseDocument(source);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -67,7 +67,7 @@ class DocumentParser implements Closeable {
|
|||
this.docMapper = docMapper;
|
||||
}
|
||||
|
||||
public ParsedDocument parseDocument(SourceToParse source, @Nullable DocumentMapper.ParseListener listener) throws MapperParsingException {
|
||||
public ParsedDocument parseDocument(SourceToParse source) throws MapperParsingException {
|
||||
ParseContext.InternalParseContext context = cache.get();
|
||||
|
||||
final Mapping mapping = docMapper.mapping();
|
||||
|
@ -84,7 +84,7 @@ class DocumentParser implements Closeable {
|
|||
if (mapping.sourceTransforms.length > 0) {
|
||||
parser = transform(mapping, parser);
|
||||
}
|
||||
context.reset(parser, new ParseContext.Document(), source, listener);
|
||||
context.reset(parser, new ParseContext.Document(), source);
|
||||
|
||||
// will result in START_OBJECT
|
||||
int countDownTokens = 0;
|
||||
|
@ -166,7 +166,7 @@ class DocumentParser implements Closeable {
|
|||
ParsedDocument doc = new ParsedDocument(context.uid(), context.version(), context.id(), context.type(), source.routing(), source.timestamp(), source.ttl(), context.docs(),
|
||||
context.source(), update).parent(source.parent());
|
||||
// reset the context to free up memory
|
||||
context.reset(null, null, null, null);
|
||||
context.reset(null, null, null);
|
||||
return doc;
|
||||
}
|
||||
|
||||
|
|
|
@ -33,7 +33,6 @@ import org.elasticsearch.common.lucene.all.AllEntries;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.analysis.AnalysisService;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper.ParseListener;
|
||||
import org.elasticsearch.index.mapper.object.RootObjectMapper;
|
||||
|
||||
import java.util.ArrayList;
|
||||
|
@ -246,11 +245,6 @@ public abstract class ParseContext {
|
|||
return in.parser();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ParseListener listener() {
|
||||
return in.listener();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Document rootDoc() {
|
||||
return in.rootDoc();
|
||||
|
@ -391,8 +385,6 @@ public abstract class ParseContext {
|
|||
|
||||
private String id;
|
||||
|
||||
private DocumentMapper.ParseListener listener;
|
||||
|
||||
private Field uid, version;
|
||||
|
||||
private StringBuilder stringBuilder = new StringBuilder();
|
||||
|
@ -413,7 +405,7 @@ public abstract class ParseContext {
|
|||
this.path = path;
|
||||
}
|
||||
|
||||
public void reset(XContentParser parser, Document document, SourceToParse source, DocumentMapper.ParseListener listener) {
|
||||
public void reset(XContentParser parser, Document document, SourceToParse source) {
|
||||
this.parser = parser;
|
||||
this.document = document;
|
||||
if (document != null) {
|
||||
|
@ -428,7 +420,6 @@ public abstract class ParseContext {
|
|||
this.sourceToParse = source;
|
||||
this.source = source == null ? null : sourceToParse.source();
|
||||
this.path.reset();
|
||||
this.listener = listener == null ? DocumentMapper.ParseListener.EMPTY : listener;
|
||||
this.allEntries = new AllEntries();
|
||||
this.ignoredValues.clear();
|
||||
this.docBoost = 1.0f;
|
||||
|
@ -487,11 +478,6 @@ public abstract class ParseContext {
|
|||
return this.parser;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DocumentMapper.ParseListener listener() {
|
||||
return this.listener;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Document rootDoc() {
|
||||
return documents.get(0);
|
||||
|
@ -701,8 +687,6 @@ public abstract class ParseContext {
|
|||
|
||||
public abstract XContentParser parser();
|
||||
|
||||
public abstract DocumentMapper.ParseListener listener();
|
||||
|
||||
public abstract Document rootDoc();
|
||||
|
||||
public abstract List<Document> docs();
|
||||
|
|
|
@ -425,9 +425,7 @@ public abstract class AbstractFieldMapper<T> implements FieldMapper<T> {
|
|||
if (!customBoost()) {
|
||||
field.setBoost(boost);
|
||||
}
|
||||
if (context.listener().beforeFieldAdded(this, field, context)) {
|
||||
context.doc().add(field);
|
||||
}
|
||||
context.doc().add(field);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new MapperParsingException("failed to parse [" + names.fullName() + "]", e);
|
||||
|
|
|
@ -269,9 +269,7 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper<String> {
|
|||
if (!customBoost()) {
|
||||
field.setBoost(boost);
|
||||
}
|
||||
if (context.listener().beforeFieldAdded(this, field, context)) {
|
||||
context.doc().add(field);
|
||||
}
|
||||
context.doc().add(field);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new MapperParsingException("failed to parse [" + names.fullName() + "]", e);
|
||||
|
|
|
@ -198,7 +198,7 @@ public class DynamicMappingTests extends ElasticsearchSingleNodeTest {
|
|||
Settings settings = ImmutableSettings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
|
||||
ParseContext.InternalParseContext ctx = new ParseContext.InternalParseContext("test", settings, parser, mapper, new ContentPath(0));
|
||||
SourceToParse source = SourceToParse.source(builder.bytes());
|
||||
ctx.reset(XContentHelper.createParser(source.source()), new ParseContext.Document(), source, null);
|
||||
ctx.reset(XContentHelper.createParser(source.source()), new ParseContext.Document(), source);
|
||||
assertEquals(XContentParser.Token.START_OBJECT, ctx.parser().nextToken());
|
||||
ctx.parser().nextToken();
|
||||
return DocumentParser.parseObject(ctx, mapper.root());
|
||||
|
|
Loading…
Reference in New Issue