Merge pull request #11272 from rjernst/refactor/mapper-names

Mappings: Cleanup names handling
This commit is contained in:
Ryan Ernst 2015-05-21 00:36:14 -07:00
commit ea3c5d5820
19 changed files with 57 additions and 104 deletions

View File

@ -201,8 +201,8 @@ public class TransportGetFieldMappingsIndexAction extends TransportSingleCustomO
}
for (Iterator<FieldMapper<?>> it = remainingFieldMappers.iterator(); it.hasNext(); ) {
final FieldMapper<?> fieldMapper = it.next();
if (Regex.simpleMatch(field, fieldMapper.names().name())) {
addFieldMapper(fieldMapper.names().name(), fieldMapper, fieldMappings, request.includeDefaults());
if (Regex.simpleMatch(field, fieldMapper.names().shortName())) {
addFieldMapper(fieldMapper.names().shortName(), fieldMapper, fieldMappings, request.includeDefaults());
it.remove();
}
}

View File

@ -61,7 +61,7 @@ public final class DisabledIndexFieldData extends AbstractIndexFieldData<AtomicF
}
private IllegalStateException fail() {
return new IllegalStateException("Field data loading is forbidden on " + getFieldNames().name());
return new IllegalStateException("Field data loading is forbidden on " + getFieldNames().fullName());
}
}

View File

@ -97,7 +97,7 @@ public abstract class DocValuesIndexFieldData {
final Settings fdSettings = mapper.fieldDataType().getSettings();
final Map<String, Settings> filter = fdSettings.getGroups("filter");
if (filter != null && !filter.isEmpty()) {
throw new IllegalArgumentException("Doc values field data doesn't support filters [" + fieldNames.name() + "]");
throw new IllegalArgumentException("Doc values field data doesn't support filters [" + fieldNames.fullName() + "]");
}
if (BINARY_INDEX_FIELD_NAMES.contains(fieldNames.indexName())) {

View File

@ -19,12 +19,9 @@
package org.elasticsearch.index.mapper;
/**
*
*/
public class ContentPath {
public static enum Type {
public enum Type {
JUST_NAME,
FULL,
}
@ -41,8 +38,6 @@ public class ContentPath {
private String[] path = new String[10];
private String sourcePath;
public ContentPath() {
this(0);
}
@ -60,7 +55,6 @@ public class ContentPath {
public void reset() {
this.index = 0;
this.sourcePath = null;
}
public void add(String name) {
@ -99,14 +93,4 @@ public class ContentPath {
public void pathType(Type type) {
this.pathType = type;
}
public String sourcePath(String sourcePath) {
String orig = this.sourcePath;
this.sourcePath = sourcePath;
return orig;
}
public String sourcePath() {
return this.sourcePath;
}
}

View File

@ -44,41 +44,34 @@ import java.util.List;
*/
public interface FieldMapper<T> extends Mapper {
public static final String DOC_VALUES_FORMAT = "doc_values_format";
String DOC_VALUES_FORMAT = "doc_values_format";
public static class Names {
class Names {
private final String name;
private final String shortName;
private final String indexName;
private final String indexNameClean;
private final String originalIndexName;
private final String fullName;
private final String sourcePath;
public Names(String name) {
this(name, name, name, name);
}
public Names(String name, String indexName, String indexNameClean, String fullName) {
this(name, indexName, indexNameClean, fullName, fullName);
}
public Names(String name, String indexName, String indexNameClean, String fullName, @Nullable String sourcePath) {
this.name = name;
public Names(String shortName, String indexName, String originalIndexName, String fullName) {
this.shortName = shortName;
this.indexName = indexName;
this.indexNameClean = indexNameClean;
this.originalIndexName = originalIndexName;
this.fullName = fullName;
this.sourcePath = sourcePath == null ? this.fullName : sourcePath;
}
/**
* The logical name of the field.
*/
public String name() {
return name;
public String shortName() {
return shortName;
}
/**
@ -90,10 +83,10 @@ public interface FieldMapper<T> extends Mapper {
}
/**
* The cleaned index name, before any "path" modifications performed on it.
* The original index name, before any "path" modifications performed on it.
*/
public String indexNameClean() {
return indexNameClean;
public String originalIndexName() {
return originalIndexName;
}
/**
@ -103,27 +96,6 @@ public interface FieldMapper<T> extends Mapper {
return fullName;
}
/**
* The dot path notation to extract the value from source.
*/
public String sourcePath() {
return sourcePath;
}
/**
* Creates a new index term based on the provided value.
*/
public Term createIndexNameTerm(String value) {
return new Term(indexName, value);
}
/**
* Creates a new index term based on the provided value.
*/
public Term createIndexNameTerm(BytesRef value) {
return new Term(indexName, value);
}
@Override
public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
@ -132,25 +104,23 @@ public interface FieldMapper<T> extends Mapper {
if (!fullName.equals(names.fullName)) return false;
if (!indexName.equals(names.indexName)) return false;
if (!indexNameClean.equals(names.indexNameClean)) return false;
if (!name.equals(names.name)) return false;
if (!sourcePath.equals(names.sourcePath)) return false;
if (!originalIndexName.equals(names.originalIndexName)) return false;
if (!shortName.equals(names.shortName)) return false;
return true;
}
@Override
public int hashCode() {
int result = name.hashCode();
int result = shortName.hashCode();
result = 31 * result + indexName.hashCode();
result = 31 * result + indexNameClean.hashCode();
result = 31 * result + originalIndexName.hashCode();
result = 31 * result + fullName.hashCode();
result = 31 * result + sourcePath.hashCode();
return result;
}
}
public static enum Loading {
enum Loading {
LAZY {
@Override
public String toString() {
@ -220,7 +190,7 @@ public interface FieldMapper<T> extends Mapper {
/**
* List of fields where this field should be copied to
*/
public AbstractFieldMapper.CopyTo copyTo();
AbstractFieldMapper.CopyTo copyTo();
/**
* Returns the actual value of the field.
@ -285,7 +255,7 @@ public interface FieldMapper<T> extends Mapper {
*
* @return If the field is available before indexing or not.
* */
public boolean isGenerated();
boolean isGenerated();
/**
* Parse using the provided {@link ParseContext} and return a mapping

View File

@ -19,7 +19,6 @@
package org.elasticsearch.index.mapper.core;
import com.carrotsearch.hppc.ObjectHashSet;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import com.google.common.base.Function;
@ -33,7 +32,6 @@ import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
import org.apache.lucene.queries.TermsQuery;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.FuzzyQuery;
import org.apache.lucene.search.MultiTermQuery;
import org.apache.lucene.search.PrefixQuery;
@ -261,7 +259,7 @@ public abstract class AbstractFieldMapper<T> implements FieldMapper<T> {
}
protected Names buildNames(BuilderContext context) {
return new Names(name, buildIndexName(context), buildIndexNameClean(context), buildFullName(context), context.path().sourcePath());
return new Names(name, buildIndexName(context), buildIndexNameClean(context), buildFullName(context));
}
protected String buildIndexName(BuilderContext context) {
@ -364,7 +362,8 @@ public abstract class AbstractFieldMapper<T> implements FieldMapper<T> {
@Override
public String name() {
return names.name();
// TODO: cleanup names so Mapper knows about paths, so that it is always clear whether we are using short or full name
return names.shortName();
}
@Override
@ -475,7 +474,7 @@ public abstract class AbstractFieldMapper<T> implements FieldMapper<T> {
@Override
public Query termQuery(Object value, @Nullable QueryParseContext context) {
return new TermQuery(names().createIndexNameTerm(indexedValueForSearch(value)));
return new TermQuery(createTerm(value));
}
@Override
@ -509,12 +508,12 @@ public abstract class AbstractFieldMapper<T> implements FieldMapper<T> {
@Override
public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
return new FuzzyQuery(names.createIndexNameTerm(indexedValueForSearch(value)), fuzziness.asDistance(value), prefixLength, maxExpansions, transpositions);
return new FuzzyQuery(createTerm(value), fuzziness.asDistance(value), prefixLength, maxExpansions, transpositions);
}
@Override
public Query prefixQuery(Object value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) {
PrefixQuery query = new PrefixQuery(names().createIndexNameTerm(indexedValueForSearch(value)));
PrefixQuery query = new PrefixQuery(createTerm(value));
if (method != null) {
query.setRewriteMethod(method);
}
@ -523,13 +522,17 @@ public abstract class AbstractFieldMapper<T> implements FieldMapper<T> {
@Override
public Query regexpQuery(Object value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) {
RegexpQuery query = new RegexpQuery(names().createIndexNameTerm(indexedValueForSearch(value)), flags, maxDeterminizedStates);
RegexpQuery query = new RegexpQuery(createTerm(value), flags, maxDeterminizedStates);
if (method != null) {
query.setRewriteMethod(method);
}
return query;
}
protected Term createTerm(Object value) {
return new Term(names.indexName(), indexedValueForSearch(value));
}
@Override
public Query nullValueFilter() {
return null;
@ -629,7 +632,7 @@ public abstract class AbstractFieldMapper<T> implements FieldMapper<T> {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(names.name());
builder.startObject(names.shortName());
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
doXContentBody(builder, includeDefaults, params);
return builder.endObject();
@ -638,8 +641,8 @@ public abstract class AbstractFieldMapper<T> implements FieldMapper<T> {
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
builder.field("type", contentType());
if (indexCreatedBefore2x && (includeDefaults || !names.name().equals(names.indexNameClean()))) {
builder.field("index_name", names.indexNameClean());
if (indexCreatedBefore2x && (includeDefaults || !names.shortName().equals(names.originalIndexName()))) {
builder.field("index_name", names.originalIndexName());
}
if (includeDefaults || boost != 1.0f) {
@ -864,7 +867,7 @@ public abstract class AbstractFieldMapper<T> implements FieldMapper<T> {
ContentPath.Type origPathType = context.path().pathType();
context.path().pathType(pathType);
context.path().add(mainField.name());
context.path().add(mainField.names().shortName());
for (ObjectCursor<FieldMapper> cursor : mappers.values()) {
cursor.value.parse(context);
}
@ -881,7 +884,7 @@ public abstract class AbstractFieldMapper<T> implements FieldMapper<T> {
for (ObjectCursor<FieldMapper> cursor : mergeWithMultiField.multiFields.mappers.values()) {
FieldMapper mergeWithMapper = cursor.value;
Mapper mergeIntoMapper = mappers.get(mergeWithMapper.name());
Mapper mergeIntoMapper = mappers.get(mergeWithMapper.names().shortName());
if (mergeIntoMapper == null) {
// no mapping, simply add it if not simulating
if (!mergeResult.simulate()) {
@ -892,7 +895,7 @@ public abstract class AbstractFieldMapper<T> implements FieldMapper<T> {
if (newMappersBuilder == null) {
newMappersBuilder = ImmutableOpenMap.builder(mappers);
}
newMappersBuilder.put(mergeWithMapper.name(), mergeWithMapper);
newMappersBuilder.put(mergeWithMapper.names().shortName(), mergeWithMapper);
if (mergeWithMapper instanceof AbstractFieldMapper) {
if (newFieldMappers == null) {
newFieldMappers = new ArrayList<>(2);

View File

@ -461,7 +461,7 @@ public class CompletionFieldMapper extends AbstractFieldMapper<String> {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name())
builder.startObject(names().shortName())
.field(Fields.TYPE, CONTENT_TYPE);
builder.field(Fields.ANALYZER, indexAnalyzer.name());

View File

@ -144,7 +144,7 @@ public class TokenCountFieldMapper extends IntegerFieldMapper {
if (valueAndBoost.value() == null) {
count = nullValue();
} else {
count = countPositions(analyzer.analyzer().tokenStream(name(), valueAndBoost.value()));
count = countPositions(analyzer.analyzer().tokenStream(names().shortName(), valueAndBoost.value()));
}
addIntegerFields(context, fields, count, valueAndBoost.boost());
}

View File

@ -515,7 +515,7 @@ public class GeoPointFieldMapper extends AbstractFieldMapper<GeoPoint> implement
public Mapper parse(ParseContext context) throws IOException {
ContentPath.Type origPathType = context.path().pathType();
context.path().pathType(pathType);
context.path().add(name());
context.path().add(names().shortName());
GeoPoint sparse = context.parseExternalValue(GeoPoint.class);

View File

@ -193,7 +193,7 @@ public class AllFieldMapper extends AbstractFieldMapper<String> implements RootM
@Override
public Query termQuery(Object value, QueryParseContext context) {
return queryStringTermQuery(names().createIndexNameTerm(indexedValueForSearch(value)));
return queryStringTermQuery(createTerm(value));
}
@Override

View File

@ -133,7 +133,7 @@ public class TypeFieldMapper extends AbstractFieldMapper<String> implements Root
if (fieldType.indexOptions() == IndexOptions.NONE) {
return new ConstantScoreQuery(new PrefixQuery(new Term(UidFieldMapper.NAME, Uid.typePrefixAsBytes(BytesRefs.toBytesRef(value)))));
}
return new ConstantScoreQuery(new TermQuery(names().createIndexNameTerm(BytesRefs.toBytesRef(value))));
return new ConstantScoreQuery(new TermQuery(createTerm(value)));
}
@Override

View File

@ -184,12 +184,8 @@ public class UidFieldMapper extends AbstractFieldMapper<Uid> implements RootMapp
return Uid.createUid(value.toString());
}
public Term term(String type, String id) {
return term(Uid.createUid(type, id));
}
public Term term(String uid) {
return names().createIndexNameTerm(uid);
return createTerm(uid);
}
@Override

View File

@ -534,7 +534,7 @@ public class IndexShard extends AbstractIndexShardComponent {
public Engine.Delete prepareDelete(String type, String id, long version, VersionType versionType, Engine.Operation.Origin origin) {
long startTime = System.nanoTime();
final DocumentMapper documentMapper = docMapper(type).v1();
return new Engine.Delete(type, id, documentMapper.uidMapper().term(type, id), version, versionType, origin, startTime, false);
return new Engine.Delete(type, id, documentMapper.uidMapper().term(Uid.createUid(type, id)), version, versionType, origin, startTime, false);
}
public void delete(Engine.Delete delete) {

View File

@ -924,10 +924,10 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
final long start = System.nanoTime();
indexFieldDataService.getForField(fieldMapper).load(ctx);
if (indexShard.warmerService().logger().isTraceEnabled()) {
indexShard.warmerService().logger().trace("warmed fielddata for [{}], took [{}]", fieldMapper.names().name(), TimeValue.timeValueNanos(System.nanoTime() - start));
indexShard.warmerService().logger().trace("warmed fielddata for [{}], took [{}]", fieldMapper.names().fullName(), TimeValue.timeValueNanos(System.nanoTime() - start));
}
} catch (Throwable t) {
indexShard.warmerService().logger().warn("failed to warm-up fielddata for [{}]", t, fieldMapper.names().name());
indexShard.warmerService().logger().warn("failed to warm-up fielddata for [{}]", t, fieldMapper.names().fullName());
} finally {
latch.countDown();
}
@ -976,10 +976,10 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
IndexFieldData.Global ifd = indexFieldDataService.getForField(fieldMapper);
ifd.loadGlobal(context.reader());
if (indexShard.warmerService().logger().isTraceEnabled()) {
indexShard.warmerService().logger().trace("warmed global ordinals for [{}], took [{}]", fieldMapper.names().name(), TimeValue.timeValueNanos(System.nanoTime() - start));
indexShard.warmerService().logger().trace("warmed global ordinals for [{}], took [{}]", fieldMapper.names().fullName(), TimeValue.timeValueNanos(System.nanoTime() - start));
}
} catch (Throwable t) {
indexShard.warmerService().logger().warn("failed to warm-up global ordinals for [{}]", t, fieldMapper.names().name());
indexShard.warmerService().logger().warn("failed to warm-up global ordinals for [{}]", t, fieldMapper.names().fullName());
} finally {
latch.countDown();
}

View File

@ -57,7 +57,7 @@ public final class HighlightUtils {
} else {
SourceLookup sourceLookup = searchContext.lookup().source();
sourceLookup.setSegmentAndDocument(hitContext.readerContext(), hitContext.docId());
textsToHighlight = sourceLookup.extractRawValues(hitContext.getSourcePath(mapper.names().sourcePath()));
textsToHighlight = sourceLookup.extractRawValues(hitContext.getSourcePath(mapper.names().fullName()));
}
assert textsToHighlight != null;
return textsToHighlight;

View File

@ -60,7 +60,7 @@ public class SourceScoreOrderFragmentsBuilder extends ScoreOrderFragmentsBuilder
SourceLookup sourceLookup = searchContext.lookup().source();
sourceLookup.setSegmentAndDocument((LeafReaderContext) reader.getContext(), docId);
List<Object> values = sourceLookup.extractRawValues(hitContext.getSourcePath(mapper.names().sourcePath()));
List<Object> values = sourceLookup.extractRawValues(hitContext.getSourcePath(mapper.names().fullName()));
Field[] fields = new Field[values.size()];
for (int i = 0; i < values.size(); i++) {
fields[i] = new Field(mapper.names().indexName(), values.get(i).toString(), TextField.TYPE_NOT_STORED);

View File

@ -56,7 +56,7 @@ public class SourceSimpleFragmentsBuilder extends SimpleFragmentsBuilder {
SourceLookup sourceLookup = searchContext.lookup().source();
sourceLookup.setSegmentAndDocument((LeafReaderContext) reader.getContext(), docId);
List<Object> values = sourceLookup.extractRawValues(hitContext.getSourcePath(mapper.names().sourcePath()));
List<Object> values = sourceLookup.extractRawValues(hitContext.getSourcePath(mapper.names().fullName()));
if (values.isEmpty()) {
return EMPTY_FIELDS;
}

View File

@ -236,7 +236,7 @@ public class ExternalMapper extends AbstractFieldMapper<Object> {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name());
builder.startObject(names().shortName());
builder.field("type", mapperName);
multiFields.toXContent(builder, params);
builder.endObject();

View File

@ -306,7 +306,7 @@ public class CompletionPostingsFormatTest extends ElasticsearchTestCase {
assertThat(reader.leaves().size(), equalTo(1));
assertThat(reader.leaves().get(0).reader().numDocs(), equalTo(weights.length));
LeafReaderContext atomicReaderContext = reader.leaves().get(0);
Terms luceneTerms = atomicReaderContext.reader().terms(mapper.name());
Terms luceneTerms = atomicReaderContext.reader().terms(mapper.names().fullName());
Lookup lookup = ((Completion090PostingsFormat.CompletionTerms) luceneTerms).getLookup(mapper, new CompletionSuggestionContext(null));
reader.close();
writer.close();