mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-26 06:46:10 +00:00
Merge pull request #16589 from jpountz/feature/keyword
Add a new `keyword` field.
This commit is contained in:
commit
70f19b2c64
@ -36,6 +36,7 @@ import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.core.BooleanFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.KeywordFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.IndexFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
@ -94,6 +95,7 @@ public class IndexFieldDataService extends AbstractIndexComponent implements Clo
|
||||
static {
|
||||
Map<String, IndexFieldData.Builder> buildersByTypeBuilder = new HashMap<>();
|
||||
buildersByTypeBuilder.put("string", new PagedBytesIndexFieldData.Builder());
|
||||
buildersByTypeBuilder.put(KeywordFieldMapper.CONTENT_TYPE, MISSING_DOC_VALUES_BUILDER);
|
||||
buildersByTypeBuilder.put("float", MISSING_DOC_VALUES_BUILDER);
|
||||
buildersByTypeBuilder.put("double", MISSING_DOC_VALUES_BUILDER);
|
||||
buildersByTypeBuilder.put("byte", MISSING_DOC_VALUES_BUILDER);
|
||||
@ -110,6 +112,7 @@ public class IndexFieldDataService extends AbstractIndexComponent implements Clo
|
||||
|
||||
docValuesBuildersByType = MapBuilder.<String, IndexFieldData.Builder>newMapBuilder()
|
||||
.put("string", new DocValuesIndexFieldData.Builder())
|
||||
.put(KeywordFieldMapper.CONTENT_TYPE, new DocValuesIndexFieldData.Builder())
|
||||
.put("float", new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.FLOAT))
|
||||
.put("double", new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.DOUBLE))
|
||||
.put("byte", new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.BYTE))
|
||||
@ -126,6 +129,9 @@ public class IndexFieldDataService extends AbstractIndexComponent implements Clo
|
||||
.put(Tuple.tuple("string", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder())
|
||||
.put(Tuple.tuple("string", DISABLED_FORMAT), DISABLED_BUILDER)
|
||||
|
||||
.put(Tuple.tuple(KeywordFieldMapper.CONTENT_TYPE, DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder())
|
||||
.put(Tuple.tuple(KeywordFieldMapper.CONTENT_TYPE, DISABLED_FORMAT), DISABLED_BUILDER)
|
||||
|
||||
.put(Tuple.tuple("float", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.FLOAT))
|
||||
.put(Tuple.tuple("float", DISABLED_FORMAT), DISABLED_BUILDER)
|
||||
|
||||
|
@ -0,0 +1,274 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper.core;
|
||||
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.SortedSetDocValuesField;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.parseField;
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField;
|
||||
|
||||
/**
|
||||
* A field mapper for keywords. This mapper accepts strings and indexes them as-is.
|
||||
*/
|
||||
public final class KeywordFieldMapper extends FieldMapper implements AllFieldMapper.IncludeInAll {
|
||||
|
||||
public static final String CONTENT_TYPE = "keyword";
|
||||
|
||||
public static class Defaults {
|
||||
public static final MappedFieldType FIELD_TYPE = new KeywordFieldType();
|
||||
|
||||
static {
|
||||
FIELD_TYPE.setTokenized(false);
|
||||
FIELD_TYPE.setOmitNorms(true);
|
||||
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS);
|
||||
FIELD_TYPE.freeze();
|
||||
}
|
||||
|
||||
public static final String NULL_VALUE = null;
|
||||
public static final int IGNORE_ABOVE = Integer.MAX_VALUE;
|
||||
}
|
||||
|
||||
public static class Builder extends FieldMapper.Builder<Builder, KeywordFieldMapper> {
|
||||
|
||||
protected String nullValue = Defaults.NULL_VALUE;
|
||||
protected int ignoreAbove = Defaults.IGNORE_ABOVE;
|
||||
|
||||
public Builder(String name) {
|
||||
super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE);
|
||||
builder = this;
|
||||
}
|
||||
|
||||
public Builder ignoreAbove(int ignoreAbove) {
|
||||
if (ignoreAbove < 0) {
|
||||
throw new IllegalArgumentException("[ignore_above] must be positive, got " + ignoreAbove);
|
||||
}
|
||||
this.ignoreAbove = ignoreAbove;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder indexOptions(IndexOptions indexOptions) {
|
||||
if (fieldType.indexOptions().compareTo(IndexOptions.DOCS_AND_FREQS) > 0) {
|
||||
throw new IllegalArgumentException("The [keyword] field does not support positions, got [index_options]="
|
||||
+ indexOptionToString(fieldType.indexOptions()));
|
||||
}
|
||||
return super.indexOptions(indexOptions);
|
||||
}
|
||||
|
||||
@Override
|
||||
public KeywordFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
KeywordFieldMapper fieldMapper = new KeywordFieldMapper(
|
||||
name, fieldType, defaultFieldType, ignoreAbove,
|
||||
context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
return fieldMapper.includeInAll(includeInAll);
|
||||
}
|
||||
}
|
||||
|
||||
public static class TypeParser implements Mapper.TypeParser {
|
||||
@Override
|
||||
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
KeywordFieldMapper.Builder builder = new KeywordFieldMapper.Builder(name);
|
||||
parseField(builder, name, node, parserContext);
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
String propName = Strings.toUnderscoreCase(entry.getKey());
|
||||
Object propNode = entry.getValue();
|
||||
if (propName.equals("null_value")) {
|
||||
if (propNode == null) {
|
||||
throw new MapperParsingException("Property [null_value] cannot be null.");
|
||||
}
|
||||
builder.nullValue(propNode.toString());
|
||||
iterator.remove();
|
||||
} else if (propName.equals("ignore_above")) {
|
||||
builder.ignoreAbove(XContentMapValues.nodeIntegerValue(propNode, -1));
|
||||
iterator.remove();
|
||||
} else if (parseMultiField(builder, name, parserContext, propName, propNode)) {
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
|
||||
public static final class KeywordFieldType extends MappedFieldType {
|
||||
|
||||
public KeywordFieldType() {}
|
||||
|
||||
protected KeywordFieldType(KeywordFieldType ref) {
|
||||
super(ref);
|
||||
}
|
||||
|
||||
public KeywordFieldType clone() {
|
||||
return new KeywordFieldType(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String typeName() {
|
||||
return CONTENT_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String value(Object value) {
|
||||
if (value == null) {
|
||||
return null;
|
||||
}
|
||||
return value.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query nullValueQuery() {
|
||||
if (nullValue() == null) {
|
||||
return null;
|
||||
}
|
||||
return termQuery(nullValue(), null);
|
||||
}
|
||||
}
|
||||
|
||||
private Boolean includeInAll;
|
||||
private int ignoreAbove;
|
||||
|
||||
protected KeywordFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
|
||||
int ignoreAbove, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo);
|
||||
assert fieldType.indexOptions().compareTo(IndexOptions.DOCS_AND_FREQS) <= 0;
|
||||
this.ignoreAbove = ignoreAbove;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected KeywordFieldMapper clone() {
|
||||
return (KeywordFieldMapper) super.clone();
|
||||
}
|
||||
|
||||
@Override
|
||||
public KeywordFieldMapper includeInAll(Boolean includeInAll) {
|
||||
if (includeInAll != null) {
|
||||
KeywordFieldMapper clone = clone();
|
||||
clone.includeInAll = includeInAll;
|
||||
return clone;
|
||||
} else {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public KeywordFieldMapper includeInAllIfNotSet(Boolean includeInAll) {
|
||||
if (includeInAll != null && this.includeInAll == null) {
|
||||
KeywordFieldMapper clone = clone();
|
||||
clone.includeInAll = includeInAll;
|
||||
return clone;
|
||||
} else {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public KeywordFieldMapper unsetIncludeInAll() {
|
||||
if (includeInAll != null) {
|
||||
KeywordFieldMapper clone = clone();
|
||||
clone.includeInAll = null;
|
||||
return clone;
|
||||
} else {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
|
||||
final String value;
|
||||
if (context.externalValueSet()) {
|
||||
value = context.externalValue().toString();
|
||||
} else {
|
||||
XContentParser parser = context.parser();
|
||||
if (parser.currentToken() == XContentParser.Token.VALUE_NULL) {
|
||||
value = fieldType().nullValueAsString();
|
||||
} else {
|
||||
value = parser.textOrNull();
|
||||
}
|
||||
}
|
||||
|
||||
if (value == null || value.length() > ignoreAbove) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (context.includeInAll(includeInAll, this)) {
|
||||
context.allEntries().addText(fieldType().name(), value, fieldType().boost());
|
||||
}
|
||||
|
||||
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
|
||||
Field field = new Field(fieldType().name(), value, fieldType());
|
||||
fields.add(field);
|
||||
}
|
||||
if (fieldType().hasDocValues()) {
|
||||
fields.add(new SortedSetDocValuesField(fieldType().name(), new BytesRef(value)));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String contentType() {
|
||||
return CONTENT_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
|
||||
super.doMerge(mergeWith, updateAllTypes);
|
||||
this.includeInAll = ((KeywordFieldMapper) mergeWith).includeInAll;
|
||||
this.ignoreAbove = ((KeywordFieldMapper) mergeWith).ignoreAbove;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
|
||||
super.doXContentBody(builder, includeDefaults, params);
|
||||
|
||||
if (includeDefaults || fieldType().nullValue() != null) {
|
||||
builder.field("null_value", fieldType().nullValue());
|
||||
}
|
||||
|
||||
if (includeInAll != null) {
|
||||
builder.field("include_in_all", includeInAll);
|
||||
} else if (includeDefaults) {
|
||||
builder.field("include_in_all", true);
|
||||
}
|
||||
|
||||
if (includeDefaults || ignoreAbove != Defaults.IGNORE_ABOVE) {
|
||||
builder.field("ignore_above", ignoreAbove);
|
||||
}
|
||||
}
|
||||
}
|
@ -42,7 +42,7 @@ import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.Uid;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.KeywordFieldMapper;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -131,15 +131,15 @@ public class ParentFieldMapper extends MetadataFieldMapper {
|
||||
|
||||
@Override
|
||||
public MetadataFieldMapper getDefault(Settings indexSettings, MappedFieldType fieldType, String typeName) {
|
||||
StringFieldMapper parentJoinField = createParentJoinFieldMapper(typeName, new BuilderContext(indexSettings, new ContentPath(0)));
|
||||
KeywordFieldMapper parentJoinField = createParentJoinFieldMapper(typeName, new BuilderContext(indexSettings, new ContentPath(0)));
|
||||
MappedFieldType childJoinFieldType = Defaults.FIELD_TYPE.clone();
|
||||
childJoinFieldType.setName(joinField(null));
|
||||
return new ParentFieldMapper(parentJoinField, childJoinFieldType, null, indexSettings);
|
||||
}
|
||||
}
|
||||
|
||||
static StringFieldMapper createParentJoinFieldMapper(String docType, BuilderContext context) {
|
||||
StringFieldMapper.Builder parentJoinField = new StringFieldMapper.Builder(joinField(docType));
|
||||
static KeywordFieldMapper createParentJoinFieldMapper(String docType, BuilderContext context) {
|
||||
KeywordFieldMapper.Builder parentJoinField = new KeywordFieldMapper.Builder(joinField(docType));
|
||||
parentJoinField.indexOptions(IndexOptions.NONE);
|
||||
parentJoinField.docValues(true);
|
||||
parentJoinField.fieldType().setDocValuesType(DocValuesType.SORTED);
|
||||
@ -205,9 +205,9 @@ public class ParentFieldMapper extends MetadataFieldMapper {
|
||||
private final String parentType;
|
||||
// has no impact of field data settings, is just here for creating a join field,
|
||||
// the parent field mapper in the child type pointing to this type determines the field data settings for this join field
|
||||
private final StringFieldMapper parentJoinField;
|
||||
private final KeywordFieldMapper parentJoinField;
|
||||
|
||||
private ParentFieldMapper(StringFieldMapper parentJoinField, MappedFieldType childJoinFieldType, String parentType, Settings indexSettings) {
|
||||
private ParentFieldMapper(KeywordFieldMapper parentJoinField, MappedFieldType childJoinFieldType, String parentType, Settings indexSettings) {
|
||||
super(NAME, childJoinFieldType, Defaults.FIELD_TYPE, indexSettings);
|
||||
this.parentType = parentType;
|
||||
this.parentJoinField = parentJoinField;
|
||||
|
@ -28,7 +28,7 @@ import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.KeywordFieldMapper;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -60,17 +60,16 @@ public class PercolatorFieldMapper extends FieldMapper {
|
||||
@Override
|
||||
public PercolatorFieldMapper build(BuilderContext context) {
|
||||
context.path().add(name);
|
||||
StringFieldMapper extractedTermsField = createStringFieldBuilder(EXTRACTED_TERMS_FIELD_NAME).build(context);
|
||||
StringFieldMapper unknownQueryField = createStringFieldBuilder(UNKNOWN_QUERY_FIELD_NAME).build(context);
|
||||
KeywordFieldMapper extractedTermsField = createStringFieldBuilder(EXTRACTED_TERMS_FIELD_NAME).build(context);
|
||||
KeywordFieldMapper unknownQueryField = createStringFieldBuilder(UNKNOWN_QUERY_FIELD_NAME).build(context);
|
||||
context.path().remove();
|
||||
return new PercolatorFieldMapper(name(), fieldType, defaultFieldType, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo, queryShardContext, extractedTermsField, unknownQueryField);
|
||||
}
|
||||
|
||||
static StringFieldMapper.Builder createStringFieldBuilder(String name) {
|
||||
StringFieldMapper.Builder queryMetaDataFieldBuilder = new StringFieldMapper.Builder(name);
|
||||
static KeywordFieldMapper.Builder createStringFieldBuilder(String name) {
|
||||
KeywordFieldMapper.Builder queryMetaDataFieldBuilder = new KeywordFieldMapper.Builder(name);
|
||||
queryMetaDataFieldBuilder.docValues(false);
|
||||
queryMetaDataFieldBuilder.store(false);
|
||||
queryMetaDataFieldBuilder.tokenized(false);
|
||||
queryMetaDataFieldBuilder.indexOptions(IndexOptions.DOCS);
|
||||
return queryMetaDataFieldBuilder;
|
||||
}
|
||||
@ -110,10 +109,10 @@ public class PercolatorFieldMapper extends FieldMapper {
|
||||
|
||||
private final boolean mapUnmappedFieldAsString;
|
||||
private final QueryShardContext queryShardContext;
|
||||
private final StringFieldMapper queryTermsField;
|
||||
private final StringFieldMapper unknownQueryField;
|
||||
private final KeywordFieldMapper queryTermsField;
|
||||
private final KeywordFieldMapper unknownQueryField;
|
||||
|
||||
public PercolatorFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings, MultiFields multiFields, CopyTo copyTo, QueryShardContext queryShardContext, StringFieldMapper queryTermsField, StringFieldMapper unknownQueryField) {
|
||||
public PercolatorFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings, MultiFields multiFields, CopyTo copyTo, QueryShardContext queryShardContext, KeywordFieldMapper queryTermsField, KeywordFieldMapper unknownQueryField) {
|
||||
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo);
|
||||
this.queryShardContext = queryShardContext;
|
||||
this.queryTermsField = queryTermsField;
|
||||
|
@ -44,6 +44,7 @@ import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.mapper.Uid;
|
||||
import org.elasticsearch.index.mapper.core.KeywordFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
@ -158,7 +159,8 @@ public class TermVectorsService {
|
||||
|
||||
private static boolean isValidField(MappedFieldType fieldType) {
|
||||
// must be a string
|
||||
if (!(fieldType instanceof StringFieldMapper.StringFieldType)) {
|
||||
if (fieldType instanceof StringFieldMapper.StringFieldType == false
|
||||
&& fieldType instanceof KeywordFieldMapper.KeywordFieldType == false) {
|
||||
return false;
|
||||
}
|
||||
// and must be indexed
|
||||
|
@ -34,6 +34,7 @@ import org.elasticsearch.index.mapper.core.DateFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.DoubleFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.FloatFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.IntegerFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.KeywordFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.LongFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.ShortFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
@ -96,6 +97,7 @@ public class IndicesModule extends AbstractModule {
|
||||
registerMapper(DateFieldMapper.CONTENT_TYPE, new DateFieldMapper.TypeParser());
|
||||
registerMapper(IpFieldMapper.CONTENT_TYPE, new IpFieldMapper.TypeParser());
|
||||
registerMapper(StringFieldMapper.CONTENT_TYPE, new StringFieldMapper.TypeParser());
|
||||
registerMapper(KeywordFieldMapper.CONTENT_TYPE, new KeywordFieldMapper.TypeParser());
|
||||
registerMapper(TokenCountFieldMapper.CONTENT_TYPE, new TokenCountFieldMapper.TypeParser());
|
||||
registerMapper(ObjectMapper.CONTENT_TYPE, new ObjectMapper.TypeParser());
|
||||
registerMapper(ObjectMapper.NESTED_CONTENT_TYPE, new ObjectMapper.TypeParser());
|
||||
|
@ -50,10 +50,12 @@ public class IndicesStatsTests extends ESSingleNodeTestCase {
|
||||
.startObject("doc")
|
||||
.startObject("properties")
|
||||
.startObject("foo")
|
||||
.field("type", "string")
|
||||
.field("index", "not_analyzed")
|
||||
.field("type", "keyword")
|
||||
.field("doc_values", true)
|
||||
.field("store", true)
|
||||
.endObject()
|
||||
.startObject("bar")
|
||||
.field("type", "string")
|
||||
.field("term_vector", "with_positions_offsets_payloads")
|
||||
.endObject()
|
||||
.endObject()
|
||||
@ -61,7 +63,7 @@ public class IndicesStatsTests extends ESSingleNodeTestCase {
|
||||
.endObject();
|
||||
assertAcked(client().admin().indices().prepareCreate("test").addMapping("doc", mapping));
|
||||
ensureGreen("test");
|
||||
client().prepareIndex("test", "doc", "1").setSource("foo", "bar").get();
|
||||
client().prepareIndex("test", "doc", "1").setSource("foo", "bar", "bar", "baz").get();
|
||||
client().admin().indices().prepareRefresh("test").get();
|
||||
|
||||
IndicesStatsResponse rsp = client().admin().indices().prepareStats("test").get();
|
||||
@ -73,7 +75,7 @@ public class IndicesStatsTests extends ESSingleNodeTestCase {
|
||||
assertThat(stats.getDocValuesMemoryInBytes(), greaterThan(0L));
|
||||
|
||||
// now check multiple segments stats are merged together
|
||||
client().prepareIndex("test", "doc", "2").setSource("foo", "bar").get();
|
||||
client().prepareIndex("test", "doc", "2").setSource("foo", "bar", "bar", "baz").get();
|
||||
client().admin().indices().prepareRefresh("test").get();
|
||||
|
||||
rsp = client().admin().indices().prepareStats("test").get();
|
||||
|
@ -153,7 +153,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
|
||||
"field1", "type=string,index=no", // no tvs
|
||||
"field2", "type=string,index=no,store=true", // no tvs
|
||||
"field3", "type=string,index=no,term_vector=yes", // no tvs
|
||||
"field4", "type=string,index=not_analyzed", // yes tvs
|
||||
"field4", "type=keyword", // yes tvs
|
||||
"field5", "type=string,index=analyzed")); // yes tvs
|
||||
|
||||
ensureYellow();
|
||||
|
@ -91,7 +91,7 @@ public class SimpleClusterStateIT extends ESIntegTestCase {
|
||||
.setOrder(0)
|
||||
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("field1").field("type", "string").field("store", true).endObject()
|
||||
.startObject("field2").field("type", "string").field("store", true).field("index", "not_analyzed").endObject()
|
||||
.startObject("field2").field("type", "keyword").field("store", true).endObject()
|
||||
.endObject().endObject().endObject())
|
||||
.get();
|
||||
|
||||
|
@ -270,7 +270,7 @@ public class AckIT extends ESIntegTestCase {
|
||||
createIndex("test");
|
||||
ensureGreen();
|
||||
|
||||
assertAcked(client().admin().indices().preparePutMapping("test").setType("test").setSource("field", "type=string,index=not_analyzed"));
|
||||
assertAcked(client().admin().indices().preparePutMapping("test").setType("test").setSource("field", "type=keyword"));
|
||||
|
||||
for (Client client : clients()) {
|
||||
assertThat(getLocalClusterState(client).metaData().indices().get("test").mapping("test"), notNullValue());
|
||||
@ -281,7 +281,7 @@ public class AckIT extends ESIntegTestCase {
|
||||
createIndex("test");
|
||||
ensureGreen();
|
||||
|
||||
PutMappingResponse putMappingResponse = client().admin().indices().preparePutMapping("test").setType("test").setSource("field", "type=string,index=not_analyzed").setTimeout("0s").get();
|
||||
PutMappingResponse putMappingResponse = client().admin().indices().preparePutMapping("test").setType("test").setSource("field", "type=keyword").setTimeout("0s").get();
|
||||
assertThat(putMappingResponse.isAcknowledged(), equalTo(false));
|
||||
}
|
||||
|
||||
|
@ -301,8 +301,8 @@ public class RecoveryFromGatewayIT extends ESIntegTestCase {
|
||||
.setTemplate("te*")
|
||||
.setOrder(0)
|
||||
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("field1").field("type", "string").field("store", "yes").endObject()
|
||||
.startObject("field2").field("type", "string").field("store", "yes").field("index", "not_analyzed").endObject()
|
||||
.startObject("field1").field("type", "string").field("store", true).endObject()
|
||||
.startObject("field2").field("type", "keyword").field("store", true).endObject()
|
||||
.endObject().endObject().endObject())
|
||||
.execute().actionGet();
|
||||
client.admin().indices().prepareAliases().addAlias("test", "test_alias", QueryBuilders.termQuery("field", "value")).execute().actionGet();
|
||||
|
@ -643,7 +643,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase {
|
||||
.put(IndexMetaData.SETTING_SHARED_FILESYSTEM, true)
|
||||
.build();
|
||||
|
||||
prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=string,index=not_analyzed").get();
|
||||
prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=keyword").get();
|
||||
ensureGreen(IDX);
|
||||
|
||||
client().prepareIndex(IDX, "doc", "1").setSource("foo", "foo").get();
|
||||
@ -725,7 +725,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase {
|
||||
.build();
|
||||
|
||||
// only one node, so all primaries will end up on node1
|
||||
prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=string,index=not_analyzed").get();
|
||||
prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=keyword").get();
|
||||
ensureGreen(IDX);
|
||||
|
||||
// Index some documents
|
||||
|
@ -473,8 +473,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
|
||||
.field("type", "string")
|
||||
.startObject("fields")
|
||||
.startObject("raw")
|
||||
.field("type", "string")
|
||||
.field("index", "not_analyzed")
|
||||
.field("type", "keyword")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
|
@ -96,7 +96,7 @@ public class CopyToMapperIntegrationIT extends ESIntegTestCase {
|
||||
.startObject().startObject("template_raw")
|
||||
.field("match", "*_raw")
|
||||
.field("match_mapping_type", "string")
|
||||
.startObject("mapping").field("type", "string").field("index", "not_analyzed").endObject()
|
||||
.startObject("mapping").field("type", "keyword").endObject()
|
||||
.endObject().endObject()
|
||||
|
||||
.startObject().startObject("template_all")
|
||||
|
@ -0,0 +1,203 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper.core;
|
||||
|
||||
import org.apache.lucene.index.DocValuesType;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.apache.lucene.index.IndexableFieldType;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class KeywordFieldMapperTests extends ESSingleNodeTestCase {
|
||||
|
||||
IndexService indexService;
|
||||
DocumentMapperParser parser;
|
||||
|
||||
@Before
|
||||
public void before() {
|
||||
indexService = createIndex("test");
|
||||
parser = indexService.mapperService().documentMapperParser();
|
||||
}
|
||||
|
||||
public void testDefaults() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", "keyword").endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
|
||||
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("field", "1234")
|
||||
.endObject()
|
||||
.bytes());
|
||||
|
||||
IndexableField[] fields = doc.rootDoc().getFields("field");
|
||||
assertEquals(2, fields.length);
|
||||
|
||||
assertEquals("1234", fields[0].stringValue());
|
||||
IndexableFieldType fieldType = fields[0].fieldType();
|
||||
assertThat(fieldType.omitNorms(), equalTo(true));
|
||||
assertFalse(fieldType.tokenized());
|
||||
assertFalse(fieldType.stored());
|
||||
assertThat(fieldType.indexOptions(), equalTo(IndexOptions.DOCS));
|
||||
assertThat(fieldType.storeTermVectors(), equalTo(false));
|
||||
assertThat(fieldType.storeTermVectorOffsets(), equalTo(false));
|
||||
assertThat(fieldType.storeTermVectorPositions(), equalTo(false));
|
||||
assertThat(fieldType.storeTermVectorPayloads(), equalTo(false));
|
||||
assertEquals(DocValuesType.NONE, fieldType.docValuesType());
|
||||
|
||||
assertEquals(new BytesRef("1234"), fields[1].binaryValue());
|
||||
fieldType = fields[1].fieldType();
|
||||
assertThat(fieldType.indexOptions(), equalTo(IndexOptions.NONE));
|
||||
assertEquals(DocValuesType.SORTED_SET, fieldType.docValuesType());
|
||||
}
|
||||
|
||||
public void testIgnoreAbove() throws IOException {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", "keyword").field("ignore_above", 5).endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
|
||||
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("field", "elk")
|
||||
.endObject()
|
||||
.bytes());
|
||||
|
||||
IndexableField[] fields = doc.rootDoc().getFields("field");
|
||||
assertEquals(2, fields.length);
|
||||
|
||||
doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("field", "elasticsearch")
|
||||
.endObject()
|
||||
.bytes());
|
||||
|
||||
fields = doc.rootDoc().getFields("field");
|
||||
assertEquals(0, fields.length);
|
||||
}
|
||||
|
||||
public void testNullValue() throws IOException {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", "keyword").field("null_value", "uri").endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
|
||||
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.endObject()
|
||||
.bytes());
|
||||
|
||||
IndexableField[] fields = doc.rootDoc().getFields("field");
|
||||
assertEquals(0, fields.length);
|
||||
|
||||
doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.nullField("field")
|
||||
.endObject()
|
||||
.bytes());
|
||||
|
||||
fields = doc.rootDoc().getFields("field");
|
||||
assertEquals(2, fields.length);
|
||||
assertEquals("uri", fields[0].stringValue());
|
||||
}
|
||||
|
||||
public void testEnableStore() throws IOException {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", "keyword").field("store", true).endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
|
||||
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("field", "1234")
|
||||
.endObject()
|
||||
.bytes());
|
||||
|
||||
IndexableField[] fields = doc.rootDoc().getFields("field");
|
||||
assertEquals(2, fields.length);
|
||||
assertTrue(fields[0].fieldType().stored());
|
||||
}
|
||||
|
||||
public void testDisableIndex() throws IOException {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", "keyword").field("index", false).endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
|
||||
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("field", "1234")
|
||||
.endObject()
|
||||
.bytes());
|
||||
|
||||
IndexableField[] fields = doc.rootDoc().getFields("field");
|
||||
assertEquals(1, fields.length);
|
||||
assertEquals(IndexOptions.NONE, fields[0].fieldType().indexOptions());
|
||||
assertEquals(DocValuesType.SORTED_SET, fields[0].fieldType().docValuesType());
|
||||
}
|
||||
|
||||
public void testDisableDocValues() throws IOException {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", "keyword").field("doc_values", false).endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
|
||||
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("field", "1234")
|
||||
.endObject()
|
||||
.bytes());
|
||||
|
||||
IndexableField[] fields = doc.rootDoc().getFields("field");
|
||||
assertEquals(1, fields.length);
|
||||
assertEquals(DocValuesType.NONE, fields[0].fieldType().docValuesType());
|
||||
}
|
||||
}
|
@ -0,0 +1,29 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.index.mapper.core;
|
||||
|
||||
import org.elasticsearch.index.mapper.FieldTypeTestCase;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
|
||||
public class KeywordFieldTypeTests extends FieldTypeTestCase {
|
||||
@Override
|
||||
protected MappedFieldType createDefaultFieldType() {
|
||||
return new KeywordFieldMapper.KeywordFieldType();
|
||||
}
|
||||
}
|
@ -92,8 +92,7 @@ public class ExternalValuesMapperIntegrationIT extends ESIntegTestCase {
|
||||
.field("store", true)
|
||||
.startObject("fields")
|
||||
.startObject("raw")
|
||||
.field("type", "string")
|
||||
.field("index", "not_analyzed")
|
||||
.field("type", "keyword")
|
||||
.field("store", true)
|
||||
.endObject()
|
||||
.endObject()
|
||||
|
@ -30,6 +30,7 @@ import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.mapper.core.KeywordFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.elasticsearch.indices.mapper.MapperRegistry;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
@ -106,6 +107,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
|
||||
Map<String, Mapper.TypeParser> mapperParsers = new HashMap<>();
|
||||
mapperParsers.put(ExternalMapperPlugin.EXTERNAL, new ExternalMapper.TypeParser(ExternalMapperPlugin.EXTERNAL, "foo"));
|
||||
mapperParsers.put(StringFieldMapper.CONTENT_TYPE, new StringFieldMapper.TypeParser());
|
||||
mapperParsers.put(KeywordFieldMapper.CONTENT_TYPE, new KeywordFieldMapper.TypeParser());
|
||||
MapperRegistry mapperRegistry = new MapperRegistry(mapperParsers, Collections.emptyMap());
|
||||
|
||||
DocumentMapperParser parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(),
|
||||
@ -121,8 +123,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
|
||||
.field("store", true)
|
||||
.startObject("fields")
|
||||
.startObject("raw")
|
||||
.field("type", "string")
|
||||
.field("index", "not_analyzed")
|
||||
.field("type", "keyword")
|
||||
.field("store", true)
|
||||
.endObject()
|
||||
.endObject()
|
||||
|
@ -57,7 +57,7 @@ public class MultiFieldsIntegrationIT extends ESIntegTestCase {
|
||||
Map titleFields = ((Map) XContentMapValues.extractValue("properties.title.fields", mappingSource));
|
||||
assertThat(titleFields.size(), equalTo(1));
|
||||
assertThat(titleFields.get("not_analyzed"), notNullValue());
|
||||
assertThat(((Map)titleFields.get("not_analyzed")).get("index").toString(), equalTo("not_analyzed"));
|
||||
assertThat(((Map)titleFields.get("not_analyzed")).get("type").toString(), equalTo("keyword"));
|
||||
|
||||
client().prepareIndex("my-index", "my-type", "1")
|
||||
.setSource("title", "Multi fields")
|
||||
@ -86,7 +86,7 @@ public class MultiFieldsIntegrationIT extends ESIntegTestCase {
|
||||
titleFields = ((Map) XContentMapValues.extractValue("properties.title.fields", mappingSource));
|
||||
assertThat(titleFields.size(), equalTo(2));
|
||||
assertThat(titleFields.get("not_analyzed"), notNullValue());
|
||||
assertThat(((Map)titleFields.get("not_analyzed")).get("index").toString(), equalTo("not_analyzed"));
|
||||
assertThat(((Map)titleFields.get("not_analyzed")).get("type").toString(), equalTo("keyword"));
|
||||
assertThat(titleFields.get("uncased"), notNullValue());
|
||||
assertThat(((Map)titleFields.get("uncased")).get("analyzer").toString(), equalTo("whitespace"));
|
||||
|
||||
@ -118,9 +118,8 @@ public class MultiFieldsIntegrationIT extends ESIntegTestCase {
|
||||
assertThat(aField.get("fields"), notNullValue());
|
||||
|
||||
Map bField = ((Map) XContentMapValues.extractValue("properties.a.fields.b", mappingSource));
|
||||
assertThat(bField.size(), equalTo(2));
|
||||
assertThat(bField.get("type").toString(), equalTo("string"));
|
||||
assertThat(bField.get("index").toString(), equalTo("not_analyzed"));
|
||||
assertThat(bField.size(), equalTo(1));
|
||||
assertThat(bField.get("type").toString(), equalTo("keyword"));
|
||||
|
||||
GeoPoint point = new GeoPoint(51, 19);
|
||||
client().prepareIndex("my-index", "my-type", "1").setSource("a", point.toString()).setRefresh(true).get();
|
||||
@ -142,8 +141,7 @@ public class MultiFieldsIntegrationIT extends ESIntegTestCase {
|
||||
.field("analyzer", "simple")
|
||||
.startObject("fields")
|
||||
.startObject("b")
|
||||
.field("type", "string")
|
||||
.field("index", "not_analyzed")
|
||||
.field("type", "keyword")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
@ -161,9 +159,8 @@ public class MultiFieldsIntegrationIT extends ESIntegTestCase {
|
||||
assertThat(aField.get("fields"), notNullValue());
|
||||
|
||||
Map bField = ((Map) XContentMapValues.extractValue("properties.a.fields.b", mappingSource));
|
||||
assertThat(bField.size(), equalTo(2));
|
||||
assertThat(bField.get("type").toString(), equalTo("string"));
|
||||
assertThat(bField.get("index").toString(), equalTo("not_analyzed"));
|
||||
assertThat(bField.size(), equalTo(1));
|
||||
assertThat(bField.get("type").toString(), equalTo("keyword"));
|
||||
|
||||
client().prepareIndex("my-index", "my-type", "1").setSource("a", "my tokens").setRefresh(true).get();
|
||||
SearchResponse countResponse = client().prepareSearch("my-index").setSize(0).setQuery(matchQuery("a.b", "my tokens")).get();
|
||||
@ -186,9 +183,8 @@ public class MultiFieldsIntegrationIT extends ESIntegTestCase {
|
||||
assertThat(aField.get("fields"), notNullValue());
|
||||
|
||||
Map bField = ((Map) XContentMapValues.extractValue("properties.a.fields.b", mappingSource));
|
||||
assertThat(bField.size(), equalTo(2));
|
||||
assertThat(bField.get("type").toString(), equalTo("string"));
|
||||
assertThat(bField.get("index").toString(), equalTo("not_analyzed"));
|
||||
assertThat(bField.size(), equalTo(1));
|
||||
assertThat(bField.get("type").toString(), equalTo("keyword"));
|
||||
|
||||
client().prepareIndex("my-index", "my-type", "1").setSource("a", "complete me").setRefresh(true).get();
|
||||
SearchResponse countResponse = client().prepareSearch("my-index").setSize(0).setQuery(matchQuery("a.b", "complete me")).get();
|
||||
@ -211,9 +207,8 @@ public class MultiFieldsIntegrationIT extends ESIntegTestCase {
|
||||
assertThat(aField.get("fields"), notNullValue());
|
||||
|
||||
Map bField = ((Map) XContentMapValues.extractValue("properties.a.fields.b", mappingSource));
|
||||
assertThat(bField.size(), equalTo(2));
|
||||
assertThat(bField.get("type").toString(), equalTo("string"));
|
||||
assertThat(bField.get("index").toString(), equalTo("not_analyzed"));
|
||||
assertThat(bField.size(), equalTo(1));
|
||||
assertThat(bField.get("type").toString(), equalTo("keyword"));
|
||||
|
||||
client().prepareIndex("my-index", "my-type", "1").setSource("a", "127.0.0.1").setRefresh(true).get();
|
||||
SearchResponse countResponse = client().prepareSearch("my-index").setSize(0).setQuery(matchQuery("a.b", "127.0.0.1")).get();
|
||||
@ -227,8 +222,7 @@ public class MultiFieldsIntegrationIT extends ESIntegTestCase {
|
||||
.field("type", fieldType)
|
||||
.startObject("fields")
|
||||
.startObject("b")
|
||||
.field("type", "string")
|
||||
.field("index", "not_analyzed")
|
||||
.field("type", "keyword")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
@ -243,8 +237,7 @@ public class MultiFieldsIntegrationIT extends ESIntegTestCase {
|
||||
.field("type", "string")
|
||||
.startObject("fields")
|
||||
.startObject("not_analyzed")
|
||||
.field("type", "string")
|
||||
.field("index", "not_analyzed")
|
||||
.field("type", "keyword")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
|
@ -152,8 +152,7 @@ public class SimpleObjectMappingTests extends ESSingleNodeTestCase {
|
||||
.field("index", "analyzed")
|
||||
.startObject("fields")
|
||||
.startObject("raw")
|
||||
.field("type", "string")
|
||||
.field("index","not_analyzed")
|
||||
.field("type", "keyword")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
|
@ -60,7 +60,7 @@ public class SimpleGetFieldMappingsIT extends ESIntegTestCase {
|
||||
private XContentBuilder getMappingForType(String type) throws IOException {
|
||||
return jsonBuilder().startObject().startObject(type).startObject("properties")
|
||||
.startObject("field1").field("type", "string").endObject()
|
||||
.startObject("obj").startObject("properties").startObject("subfield").field("type", "string").field("index", "not_analyzed").endObject().endObject().endObject()
|
||||
.startObject("obj").startObject("properties").startObject("subfield").field("type", "keyword").endObject().endObject().endObject()
|
||||
.endObject().endObject().endObject();
|
||||
}
|
||||
|
||||
@ -147,8 +147,7 @@ public class SimpleGetFieldMappingsIT extends ESIntegTestCase {
|
||||
assertThat((Map<String, Object>) response.fieldMappings("test", "type", "num").sourceAsMap().get("num"), hasEntry("type", (Object) "long"));
|
||||
assertThat((Map<String, Object>) response.fieldMappings("test", "type", "field1").sourceAsMap().get("field1"), hasEntry("index", (Object) "analyzed"));
|
||||
assertThat((Map<String, Object>) response.fieldMappings("test", "type", "field1").sourceAsMap().get("field1"), hasEntry("type", (Object) "string"));
|
||||
assertThat((Map<String, Object>) response.fieldMappings("test", "type", "obj.subfield").sourceAsMap().get("subfield"), hasEntry("index", (Object) "not_analyzed"));
|
||||
assertThat((Map<String, Object>) response.fieldMappings("test", "type", "obj.subfield").sourceAsMap().get("subfield"), hasEntry("type", (Object) "string"));
|
||||
assertThat((Map<String, Object>) response.fieldMappings("test", "type", "obj.subfield").sourceAsMap().get("subfield"), hasEntry("type", (Object) "keyword"));
|
||||
|
||||
|
||||
}
|
||||
|
@ -229,7 +229,7 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase {
|
||||
logger.info("Changing _default_ mappings field from analyzed to non-analyzed");
|
||||
putResponse = client().admin().indices().preparePutMapping("test").setType(MapperService.DEFAULT_MAPPING).setSource(
|
||||
JsonXContent.contentBuilder().startObject().startObject(MapperService.DEFAULT_MAPPING)
|
||||
.startObject("properties").startObject("f").field("type", "string").field("index", "not_analyzed").endObject().endObject()
|
||||
.startObject("properties").startObject("f").field("type", "keyword").endObject().endObject()
|
||||
.endObject().endObject()
|
||||
).get();
|
||||
assertThat(putResponse.isAcknowledged(), equalTo(true));
|
||||
@ -238,7 +238,7 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase {
|
||||
getResponse = client().admin().indices().prepareGetMappings("test").addTypes(MapperService.DEFAULT_MAPPING).get();
|
||||
defaultMapping = getResponse.getMappings().get("test").get(MapperService.DEFAULT_MAPPING).sourceAsMap();
|
||||
Map<String, Object> fieldSettings = (Map<String, Object>) ((Map) defaultMapping.get("properties")).get("f");
|
||||
assertThat(fieldSettings, hasEntry("index", (Object) "not_analyzed"));
|
||||
assertThat(fieldSettings, hasEntry("type", (Object) "keyword"));
|
||||
|
||||
// but we still validate the _default_ type
|
||||
logger.info("Confirming _default_ mappings validation");
|
||||
|
@ -75,8 +75,7 @@ public class RandomExceptionCircuitBreakerIT extends ESIntegTestCase {
|
||||
.startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("test-str")
|
||||
.field("type", "string")
|
||||
.field("index", "not_analyzed")
|
||||
.field("type", "keyword")
|
||||
.field("doc_values", randomBoolean())
|
||||
.endObject() // test-str
|
||||
.startObject("test-num")
|
||||
|
@ -332,8 +332,7 @@ public class OpenCloseIndexIT extends ESIntegTestCase {
|
||||
startObject("type").
|
||||
startObject("properties").
|
||||
startObject("test")
|
||||
.field("type", "string")
|
||||
.field("index", "not_analyzed")
|
||||
.field("type", "keyword")
|
||||
.endObject().
|
||||
endObject().
|
||||
endObject()
|
||||
|
@ -39,7 +39,7 @@ public class IndexTemplateBlocksIT extends ESIntegTestCase {
|
||||
.setOrder(0)
|
||||
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("field1").field("type", "string").field("store", true).endObject()
|
||||
.startObject("field2").field("type", "string").field("store", true).field("index", "not_analyzed").endObject()
|
||||
.startObject("field2").field("type", "keyword").field("store", true).endObject()
|
||||
.endObject().endObject().endObject())
|
||||
.execute().actionGet();
|
||||
|
||||
|
@ -79,7 +79,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
|
||||
.setOrder(0)
|
||||
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("field1").field("type", "string").field("store", true).endObject()
|
||||
.startObject("field2").field("type", "string").field("store", true).field("index", "not_analyzed").endObject()
|
||||
.startObject("field2").field("type", "keyword").field("store", true).endObject()
|
||||
.endObject().endObject().endObject())
|
||||
.get();
|
||||
|
||||
@ -146,7 +146,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
|
||||
.setOrder(0)
|
||||
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("field1").field("type", "string").field("store", true).endObject()
|
||||
.startObject("field2").field("type", "string").field("store", true).field("index", "not_analyzed").endObject()
|
||||
.startObject("field2").field("type", "string").field("store", true).endObject()
|
||||
.endObject().endObject().endObject())
|
||||
.execute().actionGet();
|
||||
|
||||
@ -171,7 +171,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
|
||||
.setOrder(0)
|
||||
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("field1").field("type", "string").field("store", true).endObject()
|
||||
.startObject("field2").field("type", "string").field("store", true).field("index", "not_analyzed").endObject()
|
||||
.startObject("field2").field("type", "keyword").field("store", true).endObject()
|
||||
.endObject().endObject().endObject())
|
||||
.execute().actionGet();
|
||||
|
||||
@ -191,7 +191,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
|
||||
.setOrder(0)
|
||||
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("field1").field("type", "string").field("store", true).endObject()
|
||||
.startObject("field2").field("type", "string").field("store", true).field("index", "not_analyzed").endObject()
|
||||
.startObject("field2").field("type", "keyword").field("store", true).endObject()
|
||||
.endObject().endObject().endObject())
|
||||
.execute().actionGet();
|
||||
|
||||
@ -214,7 +214,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
|
||||
.setOrder(0)
|
||||
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("field1").field("type", "string").field("store", true).endObject()
|
||||
.startObject("field2").field("type", "string").field("store", true).field("index", "not_analyzed").endObject()
|
||||
.startObject("field2").field("type", "keyword").field("store", true).endObject()
|
||||
.endObject().endObject().endObject())
|
||||
.execute().actionGet();
|
||||
|
||||
@ -224,7 +224,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
|
||||
.setOrder(0)
|
||||
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("field1").field("type", "string").field("store", true).endObject()
|
||||
.startObject("field2").field("type", "string").field("store", true).field("index", "not_analyzed").endObject()
|
||||
.startObject("field2").field("type", "keyword").field("store", true).endObject()
|
||||
.endObject().endObject().endObject())
|
||||
.execute().actionGet();
|
||||
|
||||
@ -234,7 +234,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
|
||||
.setOrder(0)
|
||||
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("field1").field("type", "string").field("store", true).endObject()
|
||||
.startObject("field2").field("type", "string").field("store", true).field("index", "not_analyzed").endObject()
|
||||
.startObject("field2").field("type", "keyword").field("store", true).endObject()
|
||||
.endObject().endObject().endObject())
|
||||
.execute().actionGet();
|
||||
|
||||
|
@ -861,7 +861,7 @@ public class PercolatorIT extends ESIntegTestCase {
|
||||
|
||||
public void testPercolateWithAliasFilter() throws Exception {
|
||||
assertAcked(prepareCreate("my-index")
|
||||
.addMapping(PercolatorService.TYPE_NAME, "a", "type=string,index=not_analyzed")
|
||||
.addMapping(PercolatorService.TYPE_NAME, "a", "type=keyword")
|
||||
.addAlias(new Alias("a").filter(QueryBuilders.termQuery("a", "a")))
|
||||
.addAlias(new Alias("b").filter(QueryBuilders.termQuery("a", "b")))
|
||||
.addAlias(new Alias("c").filter(QueryBuilders.termQuery("a", "c")))
|
||||
|
@ -402,8 +402,8 @@ public class ChildrenIT extends ESIntegTestCase {
|
||||
|
||||
assertAcked(
|
||||
prepareCreate("index")
|
||||
.addMapping("parentType", "name", "type=string,index=not_analyzed", "town", "type=string,index=not_analyzed")
|
||||
.addMapping("childType", "_parent", "type=parentType", "name", "type=string,index=not_analyzed", "age", "type=integer")
|
||||
.addMapping("parentType", "name", "type=keyword", "town", "type=keyword")
|
||||
.addMapping("childType", "_parent", "type=parentType", "name", "type=keyword", "age", "type=integer")
|
||||
);
|
||||
List<IndexRequestBuilder> requests = new ArrayList<>();
|
||||
requests.add(client().prepareIndex("index", "parentType", "1").setSource("name", "Bob", "town", "Memphis"));
|
||||
|
@ -83,11 +83,11 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
||||
public void setupSuiteScopeCluster() throws Exception {
|
||||
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
prepareCreate("idx").setSettings(settings)
|
||||
.addMapping("type", "location", "type=geo_point", "city", "type=string,index=not_analyzed")
|
||||
.addMapping("type", "location", "type=geo_point", "city", "type=keyword")
|
||||
.execute().actionGet();
|
||||
|
||||
prepareCreate("idx-multi")
|
||||
.addMapping("type", "location", "type=geo_point", "city", "type=string,index=not_analyzed")
|
||||
.addMapping("type", "location", "type=geo_point", "city", "type=keyword")
|
||||
.execute().actionGet();
|
||||
|
||||
createIndex("idx_unmapped");
|
||||
|
@ -90,7 +90,7 @@ public class GeoHashGridIT extends ESIntegTestCase {
|
||||
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
|
||||
assertAcked(prepareCreate("idx").setSettings(settings)
|
||||
.addMapping("type", "location", "type=geo_point", "city", "type=string,index=not_analyzed"));
|
||||
.addMapping("type", "location", "type=geo_point", "city", "type=keyword"));
|
||||
|
||||
List<IndexRequestBuilder> cities = new ArrayList<>();
|
||||
Random random = getRandom();
|
||||
@ -115,7 +115,7 @@ public class GeoHashGridIT extends ESIntegTestCase {
|
||||
indexRandom(true, cities);
|
||||
|
||||
assertAcked(prepareCreate("multi_valued_idx").setSettings(settings)
|
||||
.addMapping("type", "location", "type=geo_point", "city", "type=string,index=not_analyzed"));
|
||||
.addMapping("type", "location", "type=geo_point", "city", "type=keyword"));
|
||||
|
||||
cities = new ArrayList<>();
|
||||
multiValuedExpectedDocCountsForGeoHash = new ObjectIntHashMap<>(numDocs * 2);
|
||||
|
@ -367,12 +367,12 @@ public class NestedIT extends ESIntegTestCase {
|
||||
.field("type", "nested")
|
||||
.startObject("properties")
|
||||
.startObject("cid").field("type", "long").endObject()
|
||||
.startObject("identifier").field("type", "string").field("index", "not_analyzed").endObject()
|
||||
.startObject("identifier").field("type", "keyword").endObject()
|
||||
.startObject("tags")
|
||||
.field("type", "nested")
|
||||
.startObject("properties")
|
||||
.startObject("tid").field("type", "long").endObject()
|
||||
.startObject("name").field("type", "string").field("index", "not_analyzed").endObject()
|
||||
.startObject("name").field("type", "keyword").endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
@ -386,7 +386,7 @@ public class NestedIT extends ESIntegTestCase {
|
||||
.startObject("properties")
|
||||
.startObject("end").field("type", "date").field("format", "dateOptionalTime").endObject()
|
||||
.startObject("start").field("type", "date").field("format", "dateOptionalTime").endObject()
|
||||
.startObject("label").field("type", "string").field("index", "not_analyzed").endObject()
|
||||
.startObject("label").field("type", "keyword").endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
|
@ -61,12 +61,12 @@ public class SamplerIT extends ESIntegTestCase {
|
||||
@Override
|
||||
public void setupSuiteScopeCluster() throws Exception {
|
||||
assertAcked(prepareCreate("test").setSettings(SETTING_NUMBER_OF_SHARDS, NUM_SHARDS, SETTING_NUMBER_OF_REPLICAS, 0).addMapping(
|
||||
"book", "author", "type=string,index=not_analyzed", "name", "type=string,index=analyzed", "genre",
|
||||
"type=string,index=not_analyzed", "price", "type=float"));
|
||||
"book", "author", "type=keyword", "name", "type=string,index=analyzed", "genre",
|
||||
"type=keyword", "price", "type=float"));
|
||||
createIndex("idx_unmapped");
|
||||
// idx_unmapped_author is same as main index but missing author field
|
||||
assertAcked(prepareCreate("idx_unmapped_author").setSettings(SETTING_NUMBER_OF_SHARDS, NUM_SHARDS, SETTING_NUMBER_OF_REPLICAS, 0)
|
||||
.addMapping("book", "name", "type=string,index=analyzed", "genre", "type=string,index=not_analyzed", "price", "type=float"));
|
||||
.addMapping("book", "name", "type=string,index=analyzed", "genre", "type=keyword", "price", "type=float"));
|
||||
|
||||
ensureGreen();
|
||||
String data[] = {
|
||||
|
@ -32,7 +32,7 @@ import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class ShardSizeTermsIT extends ShardSizeTestCase {
|
||||
public void testNoShardSizeString() throws Exception {
|
||||
createIdx("type=string,index=not_analyzed");
|
||||
createIdx("type=keyword");
|
||||
|
||||
indexData();
|
||||
|
||||
@ -55,7 +55,7 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
|
||||
}
|
||||
|
||||
public void testShardSizeEqualsSizeString() throws Exception {
|
||||
createIdx("type=string,index=not_analyzed");
|
||||
createIdx("type=keyword");
|
||||
|
||||
indexData();
|
||||
|
||||
@ -79,7 +79,7 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
|
||||
|
||||
public void testWithShardSizeString() throws Exception {
|
||||
|
||||
createIdx("type=string,index=not_analyzed");
|
||||
createIdx("type=keyword");
|
||||
|
||||
indexData();
|
||||
|
||||
@ -103,7 +103,7 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
|
||||
|
||||
public void testWithShardSizeStringSingleShard() throws Exception {
|
||||
|
||||
createIdx("type=string,index=not_analyzed");
|
||||
createIdx("type=keyword");
|
||||
|
||||
indexData();
|
||||
|
||||
@ -126,7 +126,7 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
|
||||
}
|
||||
|
||||
public void testNoShardSizeTermOrderString() throws Exception {
|
||||
createIdx("type=string,index=not_analyzed");
|
||||
createIdx("type=keyword");
|
||||
|
||||
indexData();
|
||||
|
||||
|
@ -75,7 +75,7 @@ public class SignificantTermsIT extends ESIntegTestCase {
|
||||
@Override
|
||||
public void setupSuiteScopeCluster() throws Exception {
|
||||
assertAcked(prepareCreate("test").setSettings(SETTING_NUMBER_OF_SHARDS, 5, SETTING_NUMBER_OF_REPLICAS, 0).addMapping("fact",
|
||||
"_routing", "required=true", "routing_id", "type=string,index=not_analyzed", "fact_category",
|
||||
"_routing", "required=true", "routing_id", "type=keyword", "fact_category",
|
||||
"type=integer,index=true", "description", "type=string,index=analyzed"));
|
||||
createIndex("idx_unmapped");
|
||||
|
||||
|
@ -282,7 +282,7 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
|
||||
|
||||
public void testDeletesIssue7951() throws Exception {
|
||||
String settings = "{\"index.number_of_shards\": 1, \"index.number_of_replicas\": 0}";
|
||||
String mappings = "{\"doc\": {\"properties\":{\"text\": {\"type\":\"string\",\"index\":\"not_analyzed\"}}}}";
|
||||
String mappings = "{\"doc\": {\"properties\":{\"text\": {\"type\":\"keyword\"}}}}";
|
||||
assertAcked(prepareCreate(INDEX_NAME).setSettings(settings).addMapping("doc", mappings));
|
||||
String[] cat1v1 = {"constant", "one"};
|
||||
String[] cat1v2 = {"constant", "uno"};
|
||||
|
@ -75,7 +75,7 @@ public abstract class AbstractGeoTestCase extends ESIntegTestCase {
|
||||
createIndex(UNMAPPED_IDX_NAME);
|
||||
assertAcked(prepareCreate(IDX_NAME)
|
||||
.addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=geo_point,geohash_prefix=true,geohash_precision=12",
|
||||
MULTI_VALUED_FIELD_NAME, "type=geo_point", NUMBER_FIELD_NAME, "type=long", "tag", "type=string,index=not_analyzed"));
|
||||
MULTI_VALUED_FIELD_NAME, "type=geo_point", NUMBER_FIELD_NAME, "type=long", "tag", "type=keyword"));
|
||||
|
||||
singleTopLeft = new GeoPoint(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY);
|
||||
singleBottomRight = new GeoPoint(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY);
|
||||
@ -136,7 +136,7 @@ public abstract class AbstractGeoTestCase extends ESIntegTestCase {
|
||||
assertAcked(prepareCreate(EMPTY_IDX_NAME).addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=geo_point"));
|
||||
|
||||
assertAcked(prepareCreate(DATELINE_IDX_NAME)
|
||||
.addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=geo_point", MULTI_VALUED_FIELD_NAME, "type=geo_point", NUMBER_FIELD_NAME, "type=long", "tag", "type=string,index=not_analyzed"));
|
||||
.addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=geo_point", MULTI_VALUED_FIELD_NAME, "type=geo_point", NUMBER_FIELD_NAME, "type=long", "tag", "type=keyword"));
|
||||
|
||||
GeoPoint[] geoValues = new GeoPoint[5];
|
||||
geoValues[0] = new GeoPoint(38, 178);
|
||||
@ -154,7 +154,7 @@ public abstract class AbstractGeoTestCase extends ESIntegTestCase {
|
||||
.endObject()));
|
||||
}
|
||||
assertAcked(prepareCreate(HIGH_CARD_IDX_NAME).setSettings(Settings.builder().put("number_of_shards", 2))
|
||||
.addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=geo_point", MULTI_VALUED_FIELD_NAME, "type=geo_point", NUMBER_FIELD_NAME, "type=long,store=true", "tag", "type=string,index=not_analyzed"));
|
||||
.addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=geo_point", MULTI_VALUED_FIELD_NAME, "type=geo_point", NUMBER_FIELD_NAME, "type=long,store=true", "tag", "type=keyword"));
|
||||
|
||||
for (int i = 0; i < 2000; i++) {
|
||||
singleVal = singleValues[i % numUniqueGeoPoints];
|
||||
|
@ -120,8 +120,7 @@ public class TopHitsIT extends ESIntegTestCase {
|
||||
.field("type", "nested")
|
||||
.startObject("properties")
|
||||
.startObject("name")
|
||||
.field("type", "string")
|
||||
.field("index", "not_analyzed")
|
||||
.field("type", "keyword")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
|
@ -63,8 +63,7 @@ public class SearchWithRandomExceptionsIT extends ESIntegTestCase {
|
||||
startObject("type").
|
||||
startObject("properties").
|
||||
startObject("test")
|
||||
.field("type", "string")
|
||||
.field("index", "not_analyzed")
|
||||
.field("type", "keyword")
|
||||
.endObject().
|
||||
endObject().
|
||||
endObject()
|
||||
|
@ -57,8 +57,7 @@ public class SearchWithRandomIOExceptionsIT extends ESIntegTestCase {
|
||||
startObject("type").
|
||||
startObject("properties").
|
||||
startObject("test")
|
||||
.field("type", "string")
|
||||
.field("index", "not_analyzed")
|
||||
.field("type", "keyword")
|
||||
.endObject().
|
||||
endObject().
|
||||
endObject()
|
||||
|
@ -1524,8 +1524,7 @@ public class SearchQueryIT extends ESIntegTestCase {
|
||||
.field("format", "epoch_millis")
|
||||
.endObject()
|
||||
.startObject("bs")
|
||||
.field("type", "string")
|
||||
.field("index", "not_analyzed")
|
||||
.field("type", "keyword")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
|
@ -295,7 +295,7 @@ public class SearchAfterIT extends ESIntegTestCase {
|
||||
mappings.add("type=boolean");
|
||||
} else if (types.get(i) instanceof Text) {
|
||||
mappings.add("field" + Integer.toString(i));
|
||||
mappings.add("type=string,index=not_analyzed");
|
||||
mappings.add("type=keyword");
|
||||
} else {
|
||||
fail("Can't match type [" + type + "]");
|
||||
}
|
||||
|
@ -187,8 +187,7 @@ public class FieldSortIT extends ESIntegTestCase {
|
||||
"{\"$type\": "
|
||||
+ " {\"properties\": "
|
||||
+ " {\"grantee\": "
|
||||
+ " {\"index\": "
|
||||
+ " \"not_analyzed\", "
|
||||
+ " { \"index\": \"not_analyzed\", "
|
||||
+ " \"term_vector\": \"with_positions_offsets\", "
|
||||
+ " \"type\": \"string\", "
|
||||
+ " \"analyzer\": \"snowball\", "
|
||||
@ -265,12 +264,10 @@ public class FieldSortIT extends ESIntegTestCase {
|
||||
.startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("sparse_bytes")
|
||||
.field("type", "string")
|
||||
.field("index", "not_analyzed")
|
||||
.field("type", "keyword")
|
||||
.endObject()
|
||||
.startObject("dense_bytes")
|
||||
.field("type", "string")
|
||||
.field("index", "not_analyzed")
|
||||
.field("type", "keyword")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
@ -518,7 +515,7 @@ public class FieldSortIT extends ESIntegTestCase {
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1",
|
||||
XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("str_value")
|
||||
.field("type", "string").field("index", "not_analyzed").startObject("fielddata")
|
||||
.field("type", "keyword").startObject("fielddata")
|
||||
.field("format", random().nextBoolean() ? "doc_values" : null).endObject().endObject().startObject("boolean_value")
|
||||
.field("type", "boolean").endObject().startObject("byte_value").field("type", "byte").startObject("fielddata")
|
||||
.field("format", random().nextBoolean() ? "doc_values" : null).endObject().endObject().startObject("short_value")
|
||||
@ -826,8 +823,7 @@ public class FieldSortIT extends ESIntegTestCase {
|
||||
.startObject("type1")
|
||||
.startObject("properties")
|
||||
.startObject("value")
|
||||
.field("type", "string")
|
||||
.field("index", "not_analyzed")
|
||||
.field("type", "keyword")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
@ -950,7 +946,7 @@ public class FieldSortIT extends ESIntegTestCase {
|
||||
.field("type", "float").startObject("fielddata").field("format", random().nextBoolean() ? "doc_values" : null)
|
||||
.endObject().endObject().startObject("double_values").field("type", "double").startObject("fielddata")
|
||||
.field("format", random().nextBoolean() ? "doc_values" : null).endObject().endObject().startObject("string_values")
|
||||
.field("type", "string").field("index", "not_analyzed").startObject("fielddata")
|
||||
.field("type", "keyword").startObject("fielddata")
|
||||
.field("format", random().nextBoolean() ? "doc_values" : null).endObject().endObject().endObject().endObject()
|
||||
.endObject()));
|
||||
ensureGreen();
|
||||
@ -1259,7 +1255,7 @@ public class FieldSortIT extends ESIntegTestCase {
|
||||
public void testSortOnRareField() throws IOException {
|
||||
assertAcked(prepareCreate("test").addMapping("type1",
|
||||
XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("string_values")
|
||||
.field("type", "string").field("index", "not_analyzed").startObject("fielddata")
|
||||
.field("type", "keyword").startObject("fielddata")
|
||||
.field("format", random().nextBoolean() ? "doc_values" : null).endObject().endObject().endObject().endObject()
|
||||
.endObject()));
|
||||
ensureGreen();
|
||||
@ -1437,8 +1433,7 @@ public class FieldSortIT extends ESIntegTestCase {
|
||||
.field("type", "string")
|
||||
.startObject("fields")
|
||||
.startObject("sub")
|
||||
.field("type", "string")
|
||||
.field("index", "not_analyzed")
|
||||
.field("type", "keyword")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
|
@ -188,13 +188,11 @@ public class EquivalenceTests extends ESIntegTestCase {
|
||||
.startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("string_values")
|
||||
.field("type", "string")
|
||||
.field("index", "not_analyzed")
|
||||
.field("type", "keyword")
|
||||
.startObject("fields")
|
||||
.startObject("doc_values")
|
||||
.field("type", "string")
|
||||
.field("index", "no")
|
||||
.field("doc_values", true)
|
||||
.field("type", "keyword")
|
||||
.field("index", false)
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
|
@ -614,10 +614,10 @@ public class SearchFieldsTests extends ESIntegTestCase {
|
||||
|
||||
public void testScriptFields() throws Exception {
|
||||
assertAcked(prepareCreate("index").addMapping("type",
|
||||
"s", "type=string,index=not_analyzed",
|
||||
"s", "type=keyword",
|
||||
"l", "type=long",
|
||||
"d", "type=double",
|
||||
"ms", "type=string,index=not_analyzed",
|
||||
"ms", "type=keyword",
|
||||
"ml", "type=long",
|
||||
"md", "type=double").get());
|
||||
final int numDocs = randomIntBetween(3, 8);
|
||||
|
@ -65,7 +65,7 @@ public class SimpleSortTests extends ESIntegTestCase {
|
||||
Random random = random();
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("str_value").field("type", "string").field("index", "not_analyzed").startObject("fielddata").field("format", random().nextBoolean() ? "doc_values" : null).endObject().endObject()
|
||||
.startObject("str_value").field("type", "keyword").startObject("fielddata").field("format", random().nextBoolean() ? "doc_values" : null).endObject().endObject()
|
||||
.startObject("boolean_value").field("type", "boolean").endObject()
|
||||
.startObject("byte_value").field("type", "byte").startObject("fielddata").field("format", random().nextBoolean() ? "doc_values" : null).endObject().endObject()
|
||||
.startObject("short_value").field("type", "short").startObject("fielddata").field("format", random().nextBoolean() ? "doc_values" : null).endObject().endObject()
|
||||
@ -226,7 +226,7 @@ public class SimpleSortTests extends ESIntegTestCase {
|
||||
// We have to specify mapping explicitly because by the time search is performed dynamic mapping might not
|
||||
// be propagated to all nodes yet and sort operation fail when the sort field is not defined
|
||||
String mapping = jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("svalue").field("type", "string").field("index", "not_analyzed").startObject("fielddata").field("format", random().nextBoolean() ? "doc_values" : null).endObject().endObject()
|
||||
.startObject("svalue").field("type", "keyword").startObject("fielddata").field("format", random().nextBoolean() ? "doc_values" : null).endObject().endObject()
|
||||
.endObject().endObject().endObject().string();
|
||||
assertAcked(prepareCreate("test").addMapping("type1", mapping));
|
||||
ensureGreen();
|
||||
|
@ -39,8 +39,7 @@
|
||||
analyzer: whitespace
|
||||
fields:
|
||||
text_raw:
|
||||
type: string
|
||||
index: not_analyzed
|
||||
type: keyword
|
||||
|
||||
|
||||
- do:
|
||||
@ -48,4 +47,4 @@
|
||||
index: test_index
|
||||
|
||||
- match: {test_index.mappings.test_type.properties.text1.type: string}
|
||||
- match: {test_index.mappings.test_type.properties.text1.fields.text_raw.index: not_analyzed}
|
||||
- match: {test_index.mappings.test_type.properties.text1.fields.text_raw.type: keyword}
|
||||
|
Loading…
x
Reference in New Issue
Block a user