[percolator] Removed old 2.x bwc logic.

This commit is contained in:
Martijn van Groningen 2017-02-10 16:03:42 +01:00
parent e178dc5493
commit cab43707dc
2 changed files with 12 additions and 97 deletions

View File

@ -21,16 +21,13 @@ package org.elasticsearch.index.mapper;
import com.carrotsearch.hppc.ObjectHashSet;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.DelegatingAnalyzerWrapper;
import org.elasticsearch.ElasticsearchGenerationException;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.Setting;
@ -100,8 +97,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
"_uid", "_id", "_type", "_all", "_parent", "_routing", "_index",
"_size", "_timestamp", "_ttl"
);
@Deprecated
public static final String PERCOLATOR_LEGACY_TYPE_NAME = ".percolator";
private final IndexAnalyzers indexAnalyzers;
@ -494,13 +489,8 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
}
private boolean typeNameStartsWithIllegalDot(DocumentMapper mapper) {
boolean legacyIndex = getIndexSettings().getIndexVersionCreated().before(Version.V_5_0_0_alpha1);
if (legacyIndex) {
return mapper.type().startsWith(".") && !PERCOLATOR_LEGACY_TYPE_NAME.equals(mapper.type());
} else {
return mapper.type().startsWith(".");
}
}
private boolean assertSerialization(DocumentMapper mapper) {
// capture the source now, it may change due to concurrent parsing

View File

@ -23,19 +23,14 @@ import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.DelegatingAnalyzerWrapper;
import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.StoredFieldVisitor;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.memory.MemoryIndex;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.Weight;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.Bits;
@ -47,7 +42,6 @@ import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@ -55,7 +49,6 @@ import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.XContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.analysis.FieldNameAnalyzer;
@ -64,8 +57,6 @@ import org.elasticsearch.index.mapper.DocumentMapperForType;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.index.mapper.TypeFieldMapper;
import org.elasticsearch.index.query.AbstractQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
@ -428,7 +419,6 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
// We have to make a copy of the QueryShardContext here so we can have a unfrozen version for parsing the legacy
// percolator queries
QueryShardContext percolateShardContext = new QueryShardContext(context);
if (indexVersionCreated.onOrAfter(Version.V_5_0_0_alpha1)) {
MappedFieldType fieldType = context.fieldMapper(field);
if (fieldType == null) {
throw new QueryShardException(context, "field [" + field + "] does not exist");
@ -441,12 +431,6 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
PercolatorFieldMapper.FieldType pft = (PercolatorFieldMapper.FieldType) fieldType;
PercolateQuery.QueryStore queryStore = createStore(pft, percolateShardContext, mapUnmappedFieldsAsString);
return pft.percolateQuery(documentType, queryStore, document, docSearcher);
} else {
Query percolateTypeQuery = new TermQuery(new Term(TypeFieldMapper.NAME, MapperService.PERCOLATOR_LEGACY_TYPE_NAME));
PercolateQuery.QueryStore queryStore = createLegacyStore(percolateShardContext, mapUnmappedFieldsAsString);
return new PercolateQuery(documentType, queryStore, document, percolateTypeQuery, docSearcher,
new MatchNoDocsQuery("pre 5.0.0-alpha1 index, no verified matches"));
}
}
public String getField() {
@ -521,63 +505,4 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
};
}
private static PercolateQuery.QueryStore createLegacyStore(QueryShardContext context, boolean mapUnmappedFieldsAsString) {
return ctx -> {
LeafReader leafReader = ctx.reader();
return docId -> {
LegacyQueryFieldVisitor visitor = new LegacyQueryFieldVisitor();
leafReader.document(docId, visitor);
if (visitor.source == null) {
throw new IllegalStateException("No source found for document with docid [" + docId + "]");
}
try (XContentParser sourceParser = XContentHelper.createParser(context.getXContentRegistry(), visitor.source)) {
String currentFieldName = null;
XContentParser.Token token = sourceParser.nextToken(); // move the START_OBJECT
if (token != XContentParser.Token.START_OBJECT) {
throw new ElasticsearchException("failed to parse query [" + docId + "], not starting with OBJECT");
}
while ((token = sourceParser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = sourceParser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if ("query".equals(currentFieldName)) {
QueryParseContext queryParseContext = context.newParseContext(sourceParser);
return parseQuery(context, mapUnmappedFieldsAsString, queryParseContext, sourceParser);
} else {
sourceParser.skipChildren();
}
} else if (token == XContentParser.Token.START_ARRAY) {
sourceParser.skipChildren();
}
}
}
return null;
};
};
}
private static final class LegacyQueryFieldVisitor extends StoredFieldVisitor {
private BytesArray source;
@Override
public void binaryField(FieldInfo fieldInfo, byte[] bytes) throws IOException {
source = new BytesArray(bytes);
}
@Override
public Status needsField(FieldInfo fieldInfo) throws IOException {
if (source != null) {
return Status.STOP;
}
if (SourceFieldMapper.NAME.equals(fieldInfo.name)) {
return Status.YES;
} else {
return Status.NO;
}
}
}
}