Add a text field.
This new field is intended to replace analyzed string fields.
This commit is contained in:
parent
d313cdd7d0
commit
4f8895eae3
|
@ -25,7 +25,7 @@ import org.elasticsearch.common.Strings;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.AbstractIndexComponent;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.TextFieldMapper;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.util.HashMap;
|
||||
|
@ -78,7 +78,7 @@ public class AnalysisService extends AbstractIndexComponent implements Closeable
|
|||
* and 100 afterwards so we override the positionIncrementGap if it
|
||||
* doesn't match here.
|
||||
*/
|
||||
int overridePositionIncrementGap = StringFieldMapper.Defaults.POSITION_INCREMENT_GAP;
|
||||
int overridePositionIncrementGap = TextFieldMapper.Defaults.POSITION_INCREMENT_GAP;
|
||||
if (analyzerFactory instanceof CustomAnalyzerProvider) {
|
||||
((CustomAnalyzerProvider) analyzerFactory).build(this);
|
||||
/*
|
||||
|
|
|
@ -22,7 +22,7 @@ package org.elasticsearch.index.analysis;
|
|||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.TextFieldMapper;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
@ -74,7 +74,7 @@ public class CustomAnalyzerProvider extends AbstractIndexAnalyzerProvider<Custom
|
|||
tokenFilters.add(tokenFilter);
|
||||
}
|
||||
|
||||
int positionIncrementGap = StringFieldMapper.Defaults.POSITION_INCREMENT_GAP;
|
||||
int positionIncrementGap = TextFieldMapper.Defaults.POSITION_INCREMENT_GAP;
|
||||
|
||||
if (analyzerSettings.getAsMap().containsKey("position_offset_gap")){
|
||||
if (indexSettings.getIndexVersionCreated().before(Version.V_2_0_0)){
|
||||
|
|
|
@ -37,6 +37,7 @@ import org.elasticsearch.index.mapper.MappedFieldType;
|
|||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.core.BooleanFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.KeywordFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.TextFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.IndexFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
|
@ -95,6 +96,7 @@ public class IndexFieldDataService extends AbstractIndexComponent implements Clo
|
|||
static {
|
||||
Map<String, IndexFieldData.Builder> buildersByTypeBuilder = new HashMap<>();
|
||||
buildersByTypeBuilder.put("string", new PagedBytesIndexFieldData.Builder());
|
||||
buildersByTypeBuilder.put(TextFieldMapper.CONTENT_TYPE, new PagedBytesIndexFieldData.Builder());
|
||||
buildersByTypeBuilder.put(KeywordFieldMapper.CONTENT_TYPE, MISSING_DOC_VALUES_BUILDER);
|
||||
buildersByTypeBuilder.put("float", MISSING_DOC_VALUES_BUILDER);
|
||||
buildersByTypeBuilder.put("double", MISSING_DOC_VALUES_BUILDER);
|
||||
|
@ -129,6 +131,9 @@ public class IndexFieldDataService extends AbstractIndexComponent implements Clo
|
|||
.put(Tuple.tuple("string", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder())
|
||||
.put(Tuple.tuple("string", DISABLED_FORMAT), DISABLED_BUILDER)
|
||||
|
||||
.put(Tuple.tuple(TextFieldMapper.CONTENT_TYPE, PAGED_BYTES_FORMAT), new PagedBytesIndexFieldData.Builder())
|
||||
.put(Tuple.tuple(TextFieldMapper.CONTENT_TYPE, DISABLED_FORMAT), DISABLED_BUILDER)
|
||||
|
||||
.put(Tuple.tuple(KeywordFieldMapper.CONTENT_TYPE, DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder())
|
||||
.put(Tuple.tuple(KeywordFieldMapper.CONTENT_TYPE, DISABLED_FORMAT), DISABLED_BUILDER)
|
||||
|
||||
|
|
|
@ -35,9 +35,13 @@ import org.elasticsearch.index.mapper.core.DateFieldMapper.DateFieldType;
|
|||
import org.elasticsearch.index.mapper.core.DoubleFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.FloatFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.IntegerFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.KeywordFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.KeywordFieldMapper.KeywordFieldType;
|
||||
import org.elasticsearch.index.mapper.core.LongFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper.StringFieldType;
|
||||
import org.elasticsearch.index.mapper.core.TextFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.TextFieldMapper.TextFieldType;
|
||||
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
||||
import org.elasticsearch.index.mapper.object.ArrayValueMapperParser;
|
||||
|
@ -452,6 +456,16 @@ class DocumentParser implements Closeable {
|
|||
if (builder == null) {
|
||||
builder = new StringFieldMapper.Builder(currentFieldName);
|
||||
}
|
||||
} else if (fieldType instanceof TextFieldType) {
|
||||
builder = context.root().findTemplateBuilder(context, currentFieldName, "string");
|
||||
if (builder == null) {
|
||||
builder = new TextFieldMapper.Builder(currentFieldName);
|
||||
}
|
||||
} else if (fieldType instanceof KeywordFieldType) {
|
||||
builder = context.root().findTemplateBuilder(context, currentFieldName, "string");
|
||||
if (builder == null) {
|
||||
builder = new KeywordFieldMapper.Builder(currentFieldName);
|
||||
}
|
||||
} else if (fieldType instanceof DateFieldType) {
|
||||
builder = context.root().findTemplateBuilder(context, currentFieldName, "date");
|
||||
if (builder == null) {
|
||||
|
|
|
@ -185,6 +185,11 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
|||
return builder;
|
||||
}
|
||||
|
||||
public T searchQuoteAnalyzer(NamedAnalyzer searchQuoteAnalyzer) {
|
||||
this.fieldType.setSearchQuoteAnalyzer(searchQuoteAnalyzer);
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T includeInAll(Boolean includeInAll) {
|
||||
this.includeInAll = includeInAll;
|
||||
return builder;
|
||||
|
@ -293,7 +298,9 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
|||
try {
|
||||
parseCreateField(context, fields);
|
||||
for (Field field : fields) {
|
||||
if (!customBoost()) {
|
||||
if (!customBoost()
|
||||
// don't set boosts eg. on dv fields
|
||||
&& field.fieldType().indexOptions() != IndexOptions.NONE) {
|
||||
field.setBoost(fieldType().boost());
|
||||
}
|
||||
context.doc().add(field);
|
||||
|
|
|
@ -92,6 +92,14 @@ public final class KeywordFieldMapper extends FieldMapper implements AllFieldMap
|
|||
return super.indexOptions(indexOptions);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setupFieldType(BuilderContext context) {
|
||||
if (!omitNormsSet && fieldType.boost() != 1.0f) {
|
||||
fieldType.setOmitNorms(false);
|
||||
}
|
||||
super.setupFieldType(context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public KeywordFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
|
|
|
@ -31,7 +31,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
|
@ -63,13 +62,6 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
|||
// NOTE, when adding defaults here, make sure you add them in the builder
|
||||
public static final String NULL_VALUE = null;
|
||||
|
||||
/**
|
||||
* Post 2.0 default for position_increment_gap. Set to 100 so that
|
||||
* phrase queries of reasonably high slop will not match across field
|
||||
* values.
|
||||
*/
|
||||
public static final int POSITION_INCREMENT_GAP = 100;
|
||||
|
||||
public static final int IGNORE_ABOVE = -1;
|
||||
}
|
||||
|
||||
|
@ -102,11 +94,6 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
|||
return this;
|
||||
}
|
||||
|
||||
public Builder searchQuotedAnalyzer(NamedAnalyzer analyzer) {
|
||||
this.fieldType.setSearchQuoteAnalyzer(analyzer);
|
||||
return builder;
|
||||
}
|
||||
|
||||
public Builder ignoreAbove(int ignoreAbove) {
|
||||
this.ignoreAbove = ignoreAbove;
|
||||
return this;
|
||||
|
@ -167,6 +154,9 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
|||
throw new IllegalArgumentException("Can't parse [index] value [" + index + "] for field [" + fieldName + "], expected [true], [false], [no], [not_analyzed] or [analyzed]");
|
||||
}
|
||||
}
|
||||
builder.fieldType().setIndexAnalyzer(parserContext.analysisService().defaultIndexAnalyzer());
|
||||
builder.fieldType().setSearchAnalyzer(parserContext.analysisService().defaultSearchAnalyzer());
|
||||
builder.fieldType().setSearchQuoteAnalyzer(parserContext.analysisService().defaultSearchQuoteAnalyzer());
|
||||
parseTextField(builder, fieldName, node, parserContext);
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
|
@ -178,30 +168,12 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
|||
}
|
||||
builder.nullValue(propNode.toString());
|
||||
iterator.remove();
|
||||
} else if (propName.equals("search_quote_analyzer")) {
|
||||
NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
|
||||
if (analyzer == null) {
|
||||
throw new MapperParsingException("Analyzer [" + propNode.toString() + "] not found for field [" + fieldName + "]");
|
||||
}
|
||||
builder.searchQuotedAnalyzer(analyzer);
|
||||
iterator.remove();
|
||||
} else if (propName.equals("position_increment_gap")) {
|
||||
int newPositionIncrementGap = XContentMapValues.nodeIntegerValue(propNode, -1);
|
||||
if (newPositionIncrementGap < 0) {
|
||||
throw new MapperParsingException("positions_increment_gap less than 0 aren't allowed.");
|
||||
}
|
||||
builder.positionIncrementGap(newPositionIncrementGap);
|
||||
// we need to update to actual analyzers if they are not set in this case...
|
||||
// so we can inject the position increment gap...
|
||||
if (builder.fieldType().indexAnalyzer() == null) {
|
||||
builder.fieldType().setIndexAnalyzer(parserContext.analysisService().defaultIndexAnalyzer());
|
||||
}
|
||||
if (builder.fieldType().searchAnalyzer() == null) {
|
||||
builder.fieldType().setSearchAnalyzer(parserContext.analysisService().defaultSearchAnalyzer());
|
||||
}
|
||||
if (builder.fieldType().searchQuoteAnalyzer() == null) {
|
||||
builder.fieldType().setSearchQuoteAnalyzer(parserContext.analysisService().defaultSearchQuoteAnalyzer());
|
||||
}
|
||||
iterator.remove();
|
||||
} else if (propName.equals("ignore_above")) {
|
||||
builder.ignoreAbove(XContentMapValues.nodeIntegerValue(propNode, -1));
|
||||
|
|
|
@ -0,0 +1,267 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper.core;
|
||||
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField;
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.parseTextField;
|
||||
|
||||
/** A {@link FieldMapper} for full-text fields. */
|
||||
public class TextFieldMapper extends FieldMapper implements AllFieldMapper.IncludeInAll {
|
||||
|
||||
public static final String CONTENT_TYPE = "text";
|
||||
private static final int POSITION_INCREMENT_GAP_USE_ANALYZER = -1;
|
||||
|
||||
public static class Defaults {
|
||||
public static final MappedFieldType FIELD_TYPE = new TextFieldType();
|
||||
|
||||
static {
|
||||
FIELD_TYPE.setTokenized(true);
|
||||
FIELD_TYPE.freeze();
|
||||
}
|
||||
|
||||
/**
|
||||
* The default position_increment_gap is set to 100 so that phrase
|
||||
* queries of reasonably high slop will not match across field values.
|
||||
*/
|
||||
public static final int POSITION_INCREMENT_GAP = 100;
|
||||
}
|
||||
|
||||
public static class Builder extends FieldMapper.Builder<Builder, TextFieldMapper> {
|
||||
|
||||
private int positionIncrementGap = POSITION_INCREMENT_GAP_USE_ANALYZER;
|
||||
|
||||
public Builder(String name) {
|
||||
super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE);
|
||||
builder = this;
|
||||
}
|
||||
|
||||
public Builder positionIncrementGap(int positionIncrementGap) {
|
||||
if (positionIncrementGap < 0) {
|
||||
throw new MapperParsingException("[positions_increment_gap] must be positive, got " + positionIncrementGap);
|
||||
}
|
||||
this.positionIncrementGap = positionIncrementGap;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder docValues(boolean docValues) {
|
||||
if (docValues) {
|
||||
throw new IllegalArgumentException("[text] fields do not support doc values");
|
||||
}
|
||||
return super.docValues(docValues);
|
||||
}
|
||||
|
||||
@Override
|
||||
public TextFieldMapper build(BuilderContext context) {
|
||||
if (positionIncrementGap != POSITION_INCREMENT_GAP_USE_ANALYZER) {
|
||||
fieldType.setIndexAnalyzer(new NamedAnalyzer(fieldType.indexAnalyzer(), positionIncrementGap));
|
||||
fieldType.setSearchAnalyzer(new NamedAnalyzer(fieldType.searchAnalyzer(), positionIncrementGap));
|
||||
fieldType.setSearchQuoteAnalyzer(new NamedAnalyzer(fieldType.searchQuoteAnalyzer(), positionIncrementGap));
|
||||
}
|
||||
setupFieldType(context);
|
||||
TextFieldMapper fieldMapper = new TextFieldMapper(
|
||||
name, fieldType, defaultFieldType, positionIncrementGap,
|
||||
context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
return fieldMapper.includeInAll(includeInAll);
|
||||
}
|
||||
}
|
||||
|
||||
public static class TypeParser implements Mapper.TypeParser {
|
||||
@Override
|
||||
public Mapper.Builder parse(String fieldName, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
TextFieldMapper.Builder builder = new TextFieldMapper.Builder(fieldName);
|
||||
builder.fieldType().setIndexAnalyzer(parserContext.analysisService().defaultIndexAnalyzer());
|
||||
builder.fieldType().setSearchAnalyzer(parserContext.analysisService().defaultSearchAnalyzer());
|
||||
builder.fieldType().setSearchQuoteAnalyzer(parserContext.analysisService().defaultSearchQuoteAnalyzer());
|
||||
parseTextField(builder, fieldName, node, parserContext);
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
String propName = Strings.toUnderscoreCase(entry.getKey());
|
||||
Object propNode = entry.getValue();
|
||||
if (propName.equals("position_increment_gap")) {
|
||||
int newPositionIncrementGap = XContentMapValues.nodeIntegerValue(propNode, -1);
|
||||
builder.positionIncrementGap(newPositionIncrementGap);
|
||||
iterator.remove();
|
||||
} else if (parseMultiField(builder, fieldName, parserContext, propName, propNode)) {
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
|
||||
public static final class TextFieldType extends MappedFieldType {
|
||||
|
||||
public TextFieldType() {}
|
||||
|
||||
protected TextFieldType(TextFieldType ref) {
|
||||
super(ref);
|
||||
}
|
||||
|
||||
public TextFieldType clone() {
|
||||
return new TextFieldType(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String typeName() {
|
||||
return CONTENT_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String value(Object value) {
|
||||
if (value == null) {
|
||||
return null;
|
||||
}
|
||||
return value.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query nullValueQuery() {
|
||||
if (nullValue() == null) {
|
||||
return null;
|
||||
}
|
||||
return termQuery(nullValue(), null);
|
||||
}
|
||||
}
|
||||
|
||||
private Boolean includeInAll;
|
||||
private int positionIncrementGap;
|
||||
|
||||
protected TextFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
|
||||
int positionIncrementGap,
|
||||
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo);
|
||||
assert fieldType.tokenized();
|
||||
assert fieldType.hasDocValues() == false;
|
||||
this.positionIncrementGap = positionIncrementGap;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected TextFieldMapper clone() {
|
||||
return (TextFieldMapper) super.clone();
|
||||
}
|
||||
|
||||
@Override
|
||||
public TextFieldMapper includeInAll(Boolean includeInAll) {
|
||||
if (includeInAll != null) {
|
||||
TextFieldMapper clone = clone();
|
||||
clone.includeInAll = includeInAll;
|
||||
return clone;
|
||||
} else {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public TextFieldMapper includeInAllIfNotSet(Boolean includeInAll) {
|
||||
if (includeInAll != null && this.includeInAll == null) {
|
||||
TextFieldMapper clone = clone();
|
||||
clone.includeInAll = includeInAll;
|
||||
return clone;
|
||||
} else {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public TextFieldMapper unsetIncludeInAll() {
|
||||
if (includeInAll != null) {
|
||||
TextFieldMapper clone = clone();
|
||||
clone.includeInAll = null;
|
||||
return clone;
|
||||
} else {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
public int getPositionIncrementGap() {
|
||||
return this.positionIncrementGap;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
|
||||
final String value;
|
||||
if (context.externalValueSet()) {
|
||||
value = context.externalValue().toString();
|
||||
} else {
|
||||
value = context.parser().textOrNull();
|
||||
}
|
||||
|
||||
if (value == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (context.includeInAll(includeInAll, this)) {
|
||||
context.allEntries().addText(fieldType().name(), value, fieldType().boost());
|
||||
}
|
||||
|
||||
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
|
||||
Field field = new Field(fieldType().name(), value, fieldType());
|
||||
fields.add(field);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String contentType() {
|
||||
return CONTENT_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
|
||||
super.doMerge(mergeWith, updateAllTypes);
|
||||
this.includeInAll = ((TextFieldMapper) mergeWith).includeInAll;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
|
||||
super.doXContentBody(builder, includeDefaults, params);
|
||||
doXContentAnalyzers(builder, includeDefaults);
|
||||
|
||||
if (includeInAll != null) {
|
||||
builder.field("include_in_all", includeInAll);
|
||||
} else if (includeDefaults) {
|
||||
builder.field("include_in_all", true);
|
||||
}
|
||||
|
||||
if (includeDefaults || positionIncrementGap != POSITION_INCREMENT_GAP_USE_ANALYZER) {
|
||||
builder.field("position_increment_gap", positionIncrementGap);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -100,8 +100,9 @@ public class TypeParsers {
|
|||
}
|
||||
|
||||
private static void parseAnalyzersAndTermVectors(FieldMapper.Builder builder, String name, Map<String, Object> fieldNode, Mapper.TypeParser.ParserContext parserContext) {
|
||||
NamedAnalyzer indexAnalyzer = builder.fieldType().indexAnalyzer();
|
||||
NamedAnalyzer searchAnalyzer = builder.fieldType().searchAnalyzer();
|
||||
NamedAnalyzer indexAnalyzer = null;
|
||||
NamedAnalyzer searchAnalyzer = null;
|
||||
NamedAnalyzer searchQuoteAnalyzer = null;
|
||||
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = fieldNode.entrySet().iterator(); iterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
|
@ -136,19 +137,42 @@ public class TypeParsers {
|
|||
}
|
||||
searchAnalyzer = analyzer;
|
||||
iterator.remove();
|
||||
} else if (propName.equals("search_quote_analyzer")) {
|
||||
NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
|
||||
if (analyzer == null) {
|
||||
throw new MapperParsingException("analyzer [" + propNode.toString() + "] not found for field [" + name + "]");
|
||||
}
|
||||
searchQuoteAnalyzer = analyzer;
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
|
||||
if (indexAnalyzer == null) {
|
||||
if (searchAnalyzer != null) {
|
||||
if (indexAnalyzer == null && searchAnalyzer != null) {
|
||||
throw new MapperParsingException("analyzer on field [" + name + "] must be set when search_analyzer is set");
|
||||
}
|
||||
} else if (searchAnalyzer == null) {
|
||||
|
||||
if (searchAnalyzer == null && searchQuoteAnalyzer != null) {
|
||||
throw new MapperParsingException("analyzer and search_analyzer on field [" + name + "] must be set when search_quote_analyzer is set");
|
||||
}
|
||||
|
||||
if (searchAnalyzer == null) {
|
||||
searchAnalyzer = indexAnalyzer;
|
||||
}
|
||||
|
||||
if (searchQuoteAnalyzer == null) {
|
||||
searchQuoteAnalyzer = searchAnalyzer;
|
||||
}
|
||||
|
||||
if (indexAnalyzer != null) {
|
||||
builder.indexAnalyzer(indexAnalyzer);
|
||||
}
|
||||
if (searchAnalyzer != null) {
|
||||
builder.searchAnalyzer(searchAnalyzer);
|
||||
}
|
||||
if (searchQuoteAnalyzer != null) {
|
||||
builder.searchQuoteAnalyzer(searchQuoteAnalyzer);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse text field attributes. In addition to {@link #parseField common attributes}
|
||||
|
|
|
@ -45,6 +45,7 @@ import org.elasticsearch.index.mapper.MappedFieldType;
|
|||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.TextFieldMapper;
|
||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
import org.elasticsearch.index.query.support.InnerHitsQueryParserHelper;
|
||||
import org.elasticsearch.index.query.support.NestedScope;
|
||||
|
@ -254,7 +255,7 @@ public class QueryShardContext extends QueryRewriteContext {
|
|||
if (fieldMapping != null || allowUnmappedFields) {
|
||||
return fieldMapping;
|
||||
} else if (mapUnmappedFieldAsString) {
|
||||
StringFieldMapper.Builder builder = new StringFieldMapper.Builder(name);
|
||||
TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name);
|
||||
return builder.build(new Mapper.BuilderContext(indexSettings.getSettings(), new ContentPath(1))).fieldType();
|
||||
} else {
|
||||
throw new QueryShardException(this, "No field mapping can be found for the field with name [{}]", name);
|
||||
|
|
|
@ -46,6 +46,7 @@ import org.elasticsearch.index.mapper.ParsedDocument;
|
|||
import org.elasticsearch.index.mapper.Uid;
|
||||
import org.elasticsearch.index.mapper.core.KeywordFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.TextFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.search.dfs.AggregatedDfs;
|
||||
|
@ -160,7 +161,8 @@ public class TermVectorsService {
|
|||
private static boolean isValidField(MappedFieldType fieldType) {
|
||||
// must be a string
|
||||
if (fieldType instanceof StringFieldMapper.StringFieldType == false
|
||||
&& fieldType instanceof KeywordFieldMapper.KeywordFieldType == false) {
|
||||
&& fieldType instanceof KeywordFieldMapper.KeywordFieldType == false
|
||||
&& fieldType instanceof TextFieldMapper.TextFieldType == false) {
|
||||
return false;
|
||||
}
|
||||
// and must be indexed
|
||||
|
|
|
@ -38,6 +38,7 @@ import org.elasticsearch.index.mapper.core.KeywordFieldMapper;
|
|||
import org.elasticsearch.index.mapper.core.LongFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.ShortFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.TextFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.TokenCountFieldMapper;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||
import org.elasticsearch.index.mapper.geo.GeoShapeFieldMapper;
|
||||
|
@ -97,6 +98,7 @@ public class IndicesModule extends AbstractModule {
|
|||
registerMapper(DateFieldMapper.CONTENT_TYPE, new DateFieldMapper.TypeParser());
|
||||
registerMapper(IpFieldMapper.CONTENT_TYPE, new IpFieldMapper.TypeParser());
|
||||
registerMapper(StringFieldMapper.CONTENT_TYPE, new StringFieldMapper.TypeParser());
|
||||
registerMapper(TextFieldMapper.CONTENT_TYPE, new TextFieldMapper.TypeParser());
|
||||
registerMapper(KeywordFieldMapper.CONTENT_TYPE, new KeywordFieldMapper.TypeParser());
|
||||
registerMapper(TokenCountFieldMapper.CONTENT_TYPE, new TokenCountFieldMapper.TypeParser());
|
||||
registerMapper(ObjectMapper.CONTENT_TYPE, new ObjectMapper.TypeParser());
|
||||
|
|
|
@ -247,13 +247,13 @@ public class CreateIndexIT extends ESIntegTestCase {
|
|||
CreateIndexRequestBuilder b = prepareCreate("test");
|
||||
b.addMapping("type1", jsonBuilder().startObject().startObject("properties")
|
||||
.startObject("text")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.field("analyzer", "standard")
|
||||
.field("search_analyzer", "whitespace")
|
||||
.endObject().endObject().endObject());
|
||||
b.addMapping("type2", jsonBuilder().humanReadable(true).startObject().startObject("properties")
|
||||
.startObject("text")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.endObject().endObject().endObject());
|
||||
try {
|
||||
b.get();
|
||||
|
|
|
@ -55,7 +55,7 @@ public class IndicesStatsTests extends ESSingleNodeTestCase {
|
|||
.field("store", true)
|
||||
.endObject()
|
||||
.startObject("bar")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.field("term_vector", "with_positions_offsets_payloads")
|
||||
.endObject()
|
||||
.endObject()
|
||||
|
|
|
@ -61,7 +61,7 @@ public class GetTermVectorsCheckDocFreqIT extends ESIntegTestCase {
|
|||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties")
|
||||
.startObject("field")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.field("term_vector", "with_positions_offsets_payloads")
|
||||
.field("analyzer", "tv_test")
|
||||
.endObject()
|
||||
|
|
|
@ -66,7 +66,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
|
|||
XContentBuilder mapping = jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties")
|
||||
.startObject("field")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.field("term_vector", "with_positions_offsets_payloads")
|
||||
.endObject()
|
||||
.endObject()
|
||||
|
@ -92,7 +92,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
|
|||
XContentBuilder mapping = jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties")
|
||||
.startObject("existingfield")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.field("term_vector", "with_positions_offsets_payloads")
|
||||
.endObject()
|
||||
.endObject()
|
||||
|
@ -119,7 +119,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
|
|||
XContentBuilder mapping = jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties")
|
||||
.startObject("existingfield")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.field("term_vector", "with_positions_offsets_payloads")
|
||||
.endObject()
|
||||
.endObject()
|
||||
|
@ -150,11 +150,11 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
|
|||
.addAlias(new Alias("alias"))
|
||||
.addMapping("type1",
|
||||
"field0", "type=integer,", // no tvs
|
||||
"field1", "type=string,index=no", // no tvs
|
||||
"field2", "type=string,index=no,store=true", // no tvs
|
||||
"field3", "type=string,index=no,term_vector=yes", // no tvs
|
||||
"field1", "type=text,index=false", // no tvs
|
||||
"field2", "type=text,index=false,store=true", // no tvs
|
||||
"field3", "type=text,index=false,term_vector=yes", // no tvs
|
||||
"field4", "type=keyword", // yes tvs
|
||||
"field5", "type=string,index=analyzed")); // yes tvs
|
||||
"field5", "type=text,index=true")); // yes tvs
|
||||
|
||||
ensureYellow();
|
||||
|
||||
|
@ -190,7 +190,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
|
|||
XContentBuilder mapping = jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties")
|
||||
.startObject("field")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.field("term_vector", "with_positions_offsets_payloads")
|
||||
.field("analyzer", "tv_test")
|
||||
.endObject()
|
||||
|
@ -278,7 +278,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
|
|||
XContentBuilder mapping = jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties")
|
||||
.startObject("field")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.field("term_vector", optionString)
|
||||
.field("analyzer", "tv_test")
|
||||
.endObject()
|
||||
|
@ -428,7 +428,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
|
|||
String queryString = createString(tokens, payloads, encoding, delimiter.charAt(0));
|
||||
//create the mapping
|
||||
XContentBuilder mapping = jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("field").field("type", "string").field("term_vector", "with_positions_offsets_payloads")
|
||||
.startObject("field").field("type", "text").field("term_vector", "with_positions_offsets_payloads")
|
||||
.field("analyzer", "payload_test").endObject().endObject().endObject().endObject();
|
||||
assertAcked(prepareCreate("test").addMapping("type1", mapping).setSettings(
|
||||
settingsBuilder()
|
||||
|
@ -586,7 +586,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
|
|||
XContentBuilder source = jsonBuilder().startObject();
|
||||
for (String field : fieldNames) {
|
||||
mapping.startObject(field)
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.field("term_vector", randomBoolean() ? "with_positions_offsets_payloads" : "no")
|
||||
.field("analyzer", "tv_test")
|
||||
.endObject();
|
||||
|
@ -672,9 +672,9 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
|
|||
// setup indices
|
||||
String[] indexNames = new String[] {"with_tv", "without_tv"};
|
||||
assertAcked(prepareCreate(indexNames[0])
|
||||
.addMapping("type1", "field1", "type=string,term_vector=with_positions_offsets,analyzer=keyword"));
|
||||
.addMapping("type1", "field1", "type=text,term_vector=with_positions_offsets,analyzer=keyword"));
|
||||
assertAcked(prepareCreate(indexNames[1])
|
||||
.addMapping("type1", "field1", "type=string,term_vector=no,analyzer=keyword"));
|
||||
.addMapping("type1", "field1", "type=text,term_vector=no,analyzer=keyword"));
|
||||
ensureGreen();
|
||||
|
||||
// index documents with and without term vectors
|
||||
|
@ -763,7 +763,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
|
|||
XContentBuilder source = jsonBuilder().startObject();
|
||||
for (int i = 0; i < numFields; i++) {
|
||||
mapping.startObject("field" + i)
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.field("term_vector", randomBoolean() ? "yes" : "no")
|
||||
.endObject();
|
||||
source.field("field" + i, "some text here");
|
||||
|
@ -790,7 +790,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
|
|||
.put("index.analysis.analyzer", "standard");
|
||||
assertAcked(prepareCreate("test")
|
||||
.setSettings(settings)
|
||||
.addMapping("type1", "field1", "type=string,term_vector=with_positions_offsets"));
|
||||
.addMapping("type1", "field1", "type=text,term_vector=with_positions_offsets"));
|
||||
ensureGreen();
|
||||
|
||||
// index documents existing document
|
||||
|
@ -848,7 +848,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
|
|||
.put("index.analysis.analyzer", "standard");
|
||||
assertAcked(prepareCreate("test")
|
||||
.setSettings(settings)
|
||||
.addMapping("type1", "field1", "type=string"));
|
||||
.addMapping("type1", "field1", "type=text"));
|
||||
ensureGreen();
|
||||
|
||||
// request tvs from artificial document
|
||||
|
@ -882,7 +882,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
|
|||
withTermVectors.add(fieldName);
|
||||
}
|
||||
mapping.startObject(fieldName)
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.field("term_vector", withTermVectors.contains(fieldName) ? "yes" : "no")
|
||||
.endObject();
|
||||
source.field(fieldName, "some text here");
|
||||
|
@ -1090,7 +1090,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
|
|||
.put("index.analysis.analyzer", "keyword");
|
||||
assertAcked(prepareCreate("test")
|
||||
.setSettings(settings)
|
||||
.addMapping("type1", "tags", "type=string"));
|
||||
.addMapping("type1", "tags", "type=text"));
|
||||
ensureYellow();
|
||||
|
||||
int numTerms = scaledRandomIntBetween(10, 50);
|
||||
|
@ -1128,7 +1128,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
|
|||
.put("index.analysis.analyzer", "keyword");
|
||||
assertAcked(prepareCreate("test")
|
||||
.setSettings(settings)
|
||||
.addMapping("type1", "tags", "type=string"));
|
||||
.addMapping("type1", "tags", "type=text"));
|
||||
ensureYellow();
|
||||
|
||||
logger.info("Indexing one document with tags of increasing frequencies ...");
|
||||
|
@ -1169,7 +1169,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
|
|||
.put("index.number_of_shards", 1); // no dfs
|
||||
assertAcked(prepareCreate("test")
|
||||
.setSettings(settings)
|
||||
.addMapping("type1", "tags", "type=string"));
|
||||
.addMapping("type1", "tags", "type=text"));
|
||||
ensureYellow();
|
||||
|
||||
int numDocs = scaledRandomIntBetween(10, 50); // as many terms as there are docs
|
||||
|
|
|
@ -136,7 +136,7 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
|||
|
||||
public void testFilteringAliases() throws Exception {
|
||||
logger.info("--> creating index [test]");
|
||||
assertAcked(prepareCreate("test").addMapping("type", "user", "type=string"));
|
||||
assertAcked(prepareCreate("test").addMapping("type", "user", "type=text"));
|
||||
|
||||
ensureGreen();
|
||||
|
||||
|
@ -163,7 +163,7 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
|||
|
||||
public void testSearchingFilteringAliasesSingleIndex() throws Exception {
|
||||
logger.info("--> creating index [test]");
|
||||
assertAcked(prepareCreate("test").addMapping("type1", "id", "type=string", "name", "type=string"));
|
||||
assertAcked(prepareCreate("test").addMapping("type1", "id", "type=text", "name", "type=text"));
|
||||
|
||||
ensureGreen();
|
||||
|
||||
|
@ -243,9 +243,9 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
|||
|
||||
public void testSearchingFilteringAliasesTwoIndices() throws Exception {
|
||||
logger.info("--> creating index [test1]");
|
||||
assertAcked(prepareCreate("test1").addMapping("type1", "name", "type=string"));
|
||||
assertAcked(prepareCreate("test1").addMapping("type1", "name", "type=text"));
|
||||
logger.info("--> creating index [test2]");
|
||||
assertAcked(prepareCreate("test2").addMapping("type1", "name", "type=string"));
|
||||
assertAcked(prepareCreate("test2").addMapping("type1", "name", "type=text"));
|
||||
ensureGreen();
|
||||
|
||||
logger.info("--> adding filtering aliases to index [test1]");
|
||||
|
@ -310,7 +310,7 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
|||
|
||||
assertAcked(client().admin().indices().preparePutMapping("test1", "test2", "test3")
|
||||
.setType("type1")
|
||||
.setSource("name", "type=string"));
|
||||
.setSource("name", "type=text"));
|
||||
|
||||
ensureGreen();
|
||||
|
||||
|
@ -370,8 +370,8 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
|||
|
||||
public void testDeletingByQueryFilteringAliases() throws Exception {
|
||||
logger.info("--> creating index [test1] and [test2");
|
||||
assertAcked(prepareCreate("test1").addMapping("type1", "name", "type=string"));
|
||||
assertAcked(prepareCreate("test2").addMapping("type1", "name", "type=string"));
|
||||
assertAcked(prepareCreate("test1").addMapping("type1", "name", "type=text"));
|
||||
assertAcked(prepareCreate("test2").addMapping("type1", "name", "type=text"));
|
||||
ensureGreen();
|
||||
|
||||
logger.info("--> adding filtering aliases to index [test1]");
|
||||
|
@ -407,8 +407,8 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
|||
|
||||
public void testDeleteAliases() throws Exception {
|
||||
logger.info("--> creating index [test1] and [test2]");
|
||||
assertAcked(prepareCreate("test1").addMapping("type", "name", "type=string"));
|
||||
assertAcked(prepareCreate("test2").addMapping("type", "name", "type=string"));
|
||||
assertAcked(prepareCreate("test1").addMapping("type", "name", "type=text"));
|
||||
assertAcked(prepareCreate("test2").addMapping("type", "name", "type=text"));
|
||||
ensureGreen();
|
||||
|
||||
logger.info("--> adding filtering aliases to index [test1]");
|
||||
|
@ -486,7 +486,7 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
|||
|
||||
public void testSameAlias() throws Exception {
|
||||
logger.info("--> creating index [test]");
|
||||
assertAcked(prepareCreate("test").addMapping("type", "name", "type=string"));
|
||||
assertAcked(prepareCreate("test").addMapping("type", "name", "type=text"));
|
||||
ensureGreen();
|
||||
|
||||
logger.info("--> creating alias1 ");
|
||||
|
@ -547,7 +547,7 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
|||
createIndex("bazbar");
|
||||
|
||||
assertAcked(client().admin().indices().preparePutMapping("foobar", "test", "test123", "foobarbaz", "bazbar")
|
||||
.setType("type").setSource("field", "type=string"));
|
||||
.setType("type").setSource("field", "type=text"));
|
||||
ensureGreen();
|
||||
|
||||
logger.info("--> creating aliases [alias1, alias2]");
|
||||
|
@ -956,7 +956,7 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
|||
|
||||
public void testCreateIndexWithAliases() throws Exception {
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type", "field", "type=string")
|
||||
.addMapping("type", "field", "type=text")
|
||||
.addAlias(new Alias("alias1"))
|
||||
.addAlias(new Alias("alias2").filter(QueryBuilders.boolQuery().mustNot(QueryBuilders.existsQuery("field"))))
|
||||
.addAlias(new Alias("alias3").indexRouting("index").searchRouting("search")));
|
||||
|
@ -978,7 +978,7 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
|||
|
||||
public void testCreateIndexWithAliasesSource() throws Exception {
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type", "field", "type=string")
|
||||
.addMapping("type", "field", "type=text")
|
||||
.setAliases("{\n" +
|
||||
" \"alias1\" : {},\n" +
|
||||
" \"alias2\" : {\"filter\" : {\"term\": {\"field\":\"value\"}}},\n" +
|
||||
|
|
|
@ -54,7 +54,7 @@ public class BasicAnalysisBackwardCompatibilityIT extends ESBackcompatTestCase {
|
|||
for (int i = 0; i < fields.length; i++) {
|
||||
fields[i++] = "field_" + fieldId++;
|
||||
String analyzer = randomAnalyzer();
|
||||
fields[i] = "type=string,analyzer=" + analyzer;
|
||||
fields[i] = "type=text,analyzer=" + analyzer;
|
||||
}
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type", (Object[])fields)
|
||||
|
|
|
@ -561,7 +561,7 @@ public class BasicBackwardsCompatibilityIT extends ESBackcompatTestCase {
|
|||
|
||||
public void testAnalyze() {
|
||||
createIndexWithAlias();
|
||||
assertAcked(client().admin().indices().preparePutMapping("test").setType("test").setSource("field", "type=string,analyzer=keyword"));
|
||||
assertAcked(client().admin().indices().preparePutMapping("test").setType("test").setSource("field", "type=text,analyzer=keyword"));
|
||||
ensureYellow("test");
|
||||
AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze("this is a test").setIndex(indexOrAlias()).setField("field").get();
|
||||
assertThat(analyzeResponse.getTokens().size(), equalTo(1));
|
||||
|
@ -586,7 +586,7 @@ public class BasicBackwardsCompatibilityIT extends ESBackcompatTestCase {
|
|||
|
||||
public void testGetTermVector() throws IOException {
|
||||
createIndexWithAlias();
|
||||
assertAcked(client().admin().indices().preparePutMapping("test").setType("type1").setSource("field", "type=string,term_vector=with_positions_offsets_payloads").get());
|
||||
assertAcked(client().admin().indices().preparePutMapping("test").setType("type1").setSource("field", "type=text,term_vector=with_positions_offsets_payloads").get());
|
||||
ensureYellow("test");
|
||||
|
||||
client().prepareIndex(indexOrAlias(), "type1", "1")
|
||||
|
|
|
@ -90,7 +90,7 @@ public class SimpleClusterStateIT extends ESIntegTestCase {
|
|||
.setTemplate("te*")
|
||||
.setOrder(0)
|
||||
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("field1").field("type", "string").field("store", true).endObject()
|
||||
.startObject("field1").field("type", "text").field("store", true).endObject()
|
||||
.startObject("field2").field("type", "keyword").field("store", true).endObject()
|
||||
.endObject().endObject().endObject())
|
||||
.get();
|
||||
|
@ -99,7 +99,7 @@ public class SimpleClusterStateIT extends ESIntegTestCase {
|
|||
.setTemplate("test*")
|
||||
.setOrder(1)
|
||||
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("field2").field("type", "string").field("store", "no").endObject()
|
||||
.startObject("field2").field("type", "text").field("store", "no").endObject()
|
||||
.endObject().endObject().endObject())
|
||||
.get();
|
||||
|
||||
|
@ -132,7 +132,7 @@ public class SimpleClusterStateIT extends ESIntegTestCase {
|
|||
int counter = 0;
|
||||
int numberOfFields = 0;
|
||||
while (true) {
|
||||
mapping.startObject(Strings.randomBase64UUID()).field("type", "string").endObject();
|
||||
mapping.startObject(Strings.randomBase64UUID()).field("type", "text").endObject();
|
||||
counter += 10; // each field is about 10 bytes, assuming compression in place
|
||||
numberOfFields++;
|
||||
if (counter > estimatedBytesSize) {
|
||||
|
|
|
@ -209,7 +209,7 @@ public class ExplainActionIT extends ESIntegTestCase {
|
|||
|
||||
public void testExplainWithFilteredAlias() throws Exception {
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("test", "field2", "type=string")
|
||||
.addMapping("test", "field2", "type=text")
|
||||
.addAlias(new Alias("alias1").filter(QueryBuilders.termQuery("field2", "value2"))));
|
||||
ensureGreen("test");
|
||||
|
||||
|
@ -225,7 +225,7 @@ public class ExplainActionIT extends ESIntegTestCase {
|
|||
|
||||
public void testExplainWithFilteredAliasFetchSource() throws Exception {
|
||||
assertAcked(client().admin().indices().prepareCreate("test")
|
||||
.addMapping("test", "field2", "type=string")
|
||||
.addMapping("test", "field2", "type=text")
|
||||
.addAlias(new Alias("alias1").filter(QueryBuilders.termQuery("field2", "value2"))));
|
||||
ensureGreen("test");
|
||||
|
||||
|
|
|
@ -47,7 +47,7 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase {
|
|||
|
||||
public void testRandom() throws Exception {
|
||||
assertAcked(prepareCreate("test").addMapping(
|
||||
"test", "string", "type=string", "date", "type=date", "double", "type=double", "double", "type=double",
|
||||
"test", "string", "type=text", "date", "type=date", "double", "type=double", "double", "type=double",
|
||||
"float", "type=float", "long", "type=long", "integer", "type=integer", "short", "type=short", "byte", "type=byte"
|
||||
));
|
||||
ensureGreen("test");
|
||||
|
@ -185,7 +185,7 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase {
|
|||
"test", "value", "type=long"
|
||||
));
|
||||
assertAcked(prepareCreate("test2").addMapping(
|
||||
"test", "value", "type=string"
|
||||
"test", "value", "type=text"
|
||||
));
|
||||
ensureGreen("test1", "test2");
|
||||
|
||||
|
|
|
@ -72,7 +72,7 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
|||
}
|
||||
|
||||
public void testString() {
|
||||
createIndex("test", Settings.EMPTY, "test", "field", "type=string");
|
||||
createIndex("test", Settings.EMPTY, "test", "field", "type=text");
|
||||
for (int value = 0; value <= 10; value++) {
|
||||
client().prepareIndex("test", "test").setSource("field", String.format(Locale.ENGLISH, "%03d", value)).get();
|
||||
}
|
||||
|
@ -185,11 +185,11 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
|||
}
|
||||
|
||||
public void testInvalidField() {
|
||||
createIndex("test1", Settings.EMPTY, "test", "field1", "type=string");
|
||||
createIndex("test1", Settings.EMPTY, "test", "field1", "type=text");
|
||||
client().prepareIndex("test1", "test").setSource("field1", "a").get();
|
||||
client().prepareIndex("test1", "test").setSource("field1", "b").get();
|
||||
|
||||
createIndex("test2", Settings.EMPTY, "test", "field2", "type=string");
|
||||
createIndex("test2", Settings.EMPTY, "test", "field2", "type=text");
|
||||
client().prepareIndex("test2", "test").setSource("field2", "a").get();
|
||||
client().prepareIndex("test2", "test").setSource("field2", "b").get();
|
||||
client().admin().indices().prepareRefresh().get();
|
||||
|
|
|
@ -69,7 +69,7 @@ public class RecoveryFromGatewayIT extends ESIntegTestCase {
|
|||
internalCluster().startNode();
|
||||
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("appAccountIds").field("type", "string").endObject().endObject()
|
||||
.startObject("properties").startObject("appAccountIds").field("type", "text").endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
assertAcked(prepareCreate("test").addMapping("type1", mapping));
|
||||
|
||||
|
@ -109,7 +109,7 @@ public class RecoveryFromGatewayIT extends ESIntegTestCase {
|
|||
internalCluster().startNode();
|
||||
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("field").field("type", "string").endObject().startObject("num").field("type", "integer").endObject().endObject()
|
||||
.startObject("properties").startObject("field").field("type", "text").endObject().startObject("num").field("type", "integer").endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
// note: default replica settings are tied to #data nodes-1 which is 0 here. We can do with 1 in this test.
|
||||
int numberOfShards = numberOfShards();
|
||||
|
@ -301,7 +301,7 @@ public class RecoveryFromGatewayIT extends ESIntegTestCase {
|
|||
.setTemplate("te*")
|
||||
.setOrder(0)
|
||||
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("field1").field("type", "string").field("store", true).endObject()
|
||||
.startObject("field1").field("type", "text").field("store", true).endObject()
|
||||
.startObject("field2").field("type", "keyword").field("store", true).endObject()
|
||||
.endObject().endObject().endObject())
|
||||
.execute().actionGet();
|
||||
|
|
|
@ -253,12 +253,12 @@ public class GetActionIT extends ESIntegTestCase {
|
|||
public void testGetDocWithMultivaluedFields() throws Exception {
|
||||
String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties")
|
||||
.startObject("field").field("type", "string").field("store", true).endObject()
|
||||
.startObject("field").field("type", "text").field("store", true).endObject()
|
||||
.endObject()
|
||||
.endObject().endObject().string();
|
||||
String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type2")
|
||||
.startObject("properties")
|
||||
.startObject("field").field("type", "string").field("store", true).endObject()
|
||||
.startObject("field").field("type", "text").field("store", true).endObject()
|
||||
.endObject()
|
||||
.endObject().endObject().string();
|
||||
assertAcked(prepareCreate("test")
|
||||
|
@ -718,7 +718,7 @@ public class GetActionIT extends ESIntegTestCase {
|
|||
assertAcked(prepareCreate("test").addAlias(new Alias("alias"))
|
||||
.addMapping("my-type1", jsonBuilder().startObject().startObject("my-type1").startObject("properties")
|
||||
.startObject("field1").startObject("properties")
|
||||
.startObject("field2").field("type", "string").endObject()
|
||||
.startObject("field2").field("type", "text").endObject()
|
||||
.endObject().endObject()
|
||||
.endObject().endObject().endObject())
|
||||
.setSettings(Settings.settingsBuilder().put("index.refresh_interval", -1)));
|
||||
|
@ -846,7 +846,7 @@ public class GetActionIT extends ESIntegTestCase {
|
|||
.endObject()
|
||||
.startObject("properties")
|
||||
.startObject("some_field")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
|
|
|
@ -188,7 +188,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase {
|
|||
.put(IndexMetaData.SETTING_SHARED_FILESYSTEM, true)
|
||||
.build();
|
||||
|
||||
prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=string").get();
|
||||
prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=text").get();
|
||||
ensureGreen(IDX);
|
||||
|
||||
// So basically, the primary should fail and the replica will need to
|
||||
|
@ -265,7 +265,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase {
|
|||
.put(IndexMetaData.SETTING_SHARED_FILESYSTEM, true)
|
||||
.build();
|
||||
|
||||
prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=string").get();
|
||||
prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=text").get();
|
||||
ensureYellow(IDX);
|
||||
client().prepareIndex(IDX, "doc", "1").setSource("foo", "bar").get();
|
||||
client().prepareIndex(IDX, "doc", "2").setSource("foo", "bar").get();
|
||||
|
@ -323,7 +323,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase {
|
|||
.put(IndexMetaData.SETTING_SHARED_FILESYSTEM, true)
|
||||
.build();
|
||||
|
||||
prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=string").get();
|
||||
prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=text").get();
|
||||
ensureYellow(IDX);
|
||||
client().prepareIndex(IDX, "doc", "1").setSource("foo", "bar").get();
|
||||
client().prepareIndex(IDX, "doc", "2").setSource("foo", "bar").get();
|
||||
|
@ -383,7 +383,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase {
|
|||
.put(IndexMetaData.SETTING_SHARED_FILESYSTEM, true)
|
||||
.build();
|
||||
|
||||
prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=string").get();
|
||||
prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=text").get();
|
||||
ensureYellow(IDX);
|
||||
// Node1 has the primary, now node2 has the replica
|
||||
String node2 = internalCluster().startNode(nodeSettings);
|
||||
|
@ -458,7 +458,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase {
|
|||
.put(IndexMetaData.SETTING_SHARED_FILESYSTEM, true)
|
||||
.build();
|
||||
|
||||
prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=string").get();
|
||||
prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=text").get();
|
||||
ensureYellow(IDX);
|
||||
// Node1 has the primary, now node2 has the replica
|
||||
String node2 = internalCluster().startNode(nodeSettings);
|
||||
|
@ -551,7 +551,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase {
|
|||
.put(IndexMetaData.SETTING_SHARED_FILESYSTEM, true)
|
||||
.build();
|
||||
|
||||
prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=string").get();
|
||||
prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=text").get();
|
||||
ensureGreen(IDX);
|
||||
client().prepareIndex(IDX, "doc", "1").setSource("foo", "bar").get();
|
||||
client().prepareIndex(IDX, "doc", "2").setSource("foo", "bar").get();
|
||||
|
@ -590,7 +590,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase {
|
|||
.put(IndexMetaData.SETTING_SHARED_FILESYSTEM, true)
|
||||
.build();
|
||||
|
||||
prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=string").get();
|
||||
prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=text").get();
|
||||
ensureGreen(IDX);
|
||||
|
||||
int docCount = randomIntBetween(10, 100);
|
||||
|
@ -797,8 +797,8 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase {
|
|||
.put(IndexMetaData.SETTING_SHARED_FILESYSTEM, true)
|
||||
.build();
|
||||
|
||||
prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=string").get();
|
||||
prepareCreate(IDX2).setSettings(idx2Settings).addMapping("doc", "foo", "type=string").get();
|
||||
prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=text").get();
|
||||
prepareCreate(IDX2).setSettings(idx2Settings).addMapping("doc", "foo", "type=text").get();
|
||||
ensureGreen(IDX, IDX2);
|
||||
|
||||
int docCount = randomIntBetween(10, 100);
|
||||
|
|
|
@ -86,7 +86,7 @@ public class PreBuiltAnalyzerTests extends ESSingleNodeTestCase {
|
|||
NamedAnalyzer namedAnalyzer = new PreBuiltAnalyzerProvider(analyzerName, AnalyzerScope.INDEX, randomPreBuiltAnalyzer.getAnalyzer(randomVersion)).get();
|
||||
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", "string").field("analyzer", analyzerName).endObject().endObject()
|
||||
.startObject("properties").startObject("field").field("type", "text").field("analyzer", analyzerName).endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
|
||||
|
|
|
@ -46,7 +46,7 @@ public class FieldDataFilterIntegrationIT extends ESIntegTestCase {
|
|||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("name")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.startObject("fielddata")
|
||||
.startObject("filter")
|
||||
.startObject("regex")
|
||||
|
@ -56,7 +56,7 @@ public class FieldDataFilterIntegrationIT extends ESIntegTestCase {
|
|||
.endObject()
|
||||
.endObject()
|
||||
.startObject("not_filtered")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject().endObject();
|
||||
|
|
|
@ -33,7 +33,7 @@ public class FieldDataLoadingIT extends ESIntegTestCase {
|
|||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type", jsonBuilder().startObject().startObject("type").startObject("properties")
|
||||
.startObject("name")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.startObject("fielddata").field("loading", "eager").endObject()
|
||||
.endObject()
|
||||
.endObject().endObject().endObject()));
|
||||
|
@ -50,7 +50,7 @@ public class FieldDataLoadingIT extends ESIntegTestCase {
|
|||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type", jsonBuilder().startObject().startObject("type").startObject("properties")
|
||||
.startObject("name")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.startObject("fielddata").field("loading", "eager_global_ordinals").endObject()
|
||||
.endObject()
|
||||
.endObject().endObject().endObject()));
|
||||
|
|
|
@ -38,7 +38,7 @@ import org.elasticsearch.index.mapper.core.FloatFieldMapper;
|
|||
import org.elasticsearch.index.mapper.core.IntegerFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.LongFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.LongFieldMapper.LongFieldType;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.TextFieldMapper;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -55,7 +55,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
|
|||
String mapping = jsonBuilder().startObject().startObject("type")
|
||||
.field("dynamic", "true")
|
||||
.startObject("properties")
|
||||
.startObject("field1").field("type", "string").endObject()
|
||||
.startObject("field1").field("type", "text").endObject()
|
||||
.endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
|
@ -75,7 +75,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
|
|||
String mapping = jsonBuilder().startObject().startObject("type")
|
||||
.field("dynamic", "false")
|
||||
.startObject("properties")
|
||||
.startObject("field1").field("type", "string").endObject()
|
||||
.startObject("field1").field("type", "text").endObject()
|
||||
.endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
|
@ -96,7 +96,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
|
|||
String mapping = jsonBuilder().startObject().startObject("type")
|
||||
.field("dynamic", "strict")
|
||||
.startObject("properties")
|
||||
.startObject("field1").field("type", "string").endObject()
|
||||
.startObject("field1").field("type", "text").endObject()
|
||||
.endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
|
@ -130,7 +130,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
|
|||
.field("dynamic", "false")
|
||||
.startObject("properties")
|
||||
.startObject("obj1").startObject("properties")
|
||||
.startObject("field1").field("type", "string").endObject()
|
||||
.startObject("field1").field("type", "text").endObject()
|
||||
.endObject().endObject()
|
||||
.endObject()
|
||||
.endObject().endObject().string();
|
||||
|
@ -153,7 +153,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
|
|||
.field("dynamic", "strict")
|
||||
.startObject("properties")
|
||||
.startObject("obj1").startObject("properties")
|
||||
.startObject("field1").field("type", "string").endObject()
|
||||
.startObject("field1").field("type", "text").endObject()
|
||||
.endObject().endObject()
|
||||
.endObject()
|
||||
.endObject().endObject().string();
|
||||
|
@ -218,7 +218,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
|
|||
IndexService indexService = createIndex("test");
|
||||
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("foo").field("type", "string").endObject().endObject()
|
||||
.startObject("properties").startObject("foo").field("type", "text").endObject().endObject()
|
||||
.endObject().string();
|
||||
|
||||
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
|
@ -251,7 +251,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
|
|||
// Make sure that mapping updates are incremental, this is important for performance otherwise
|
||||
// every new field introduction runs in linear time with the total number of fields
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("foo").field("type", "string").endObject().endObject()
|
||||
.startObject("properties").startObject("foo").field("type", "text").endObject().endObject()
|
||||
.endObject().string();
|
||||
|
||||
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
|
@ -374,7 +374,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
|
|||
|
||||
public void testReuseExistingMappings() throws IOException, Exception {
|
||||
IndexService indexService = createIndex("test", Settings.EMPTY, "type",
|
||||
"my_field1", "type=string,store=true",
|
||||
"my_field1", "type=text,store=true",
|
||||
"my_field2", "type=integer,precision_step=10",
|
||||
"my_field3", "type=long,doc_values=false",
|
||||
"my_field4", "type=float,index_options=freqs",
|
||||
|
@ -423,9 +423,9 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
|
|||
}
|
||||
assertNotNull(myField1Mapper);
|
||||
// same type
|
||||
assertTrue(myField1Mapper instanceof StringFieldMapper);
|
||||
assertTrue(myField1Mapper instanceof TextFieldMapper);
|
||||
// and same option
|
||||
assertTrue(((StringFieldMapper) myField1Mapper).fieldType().stored());
|
||||
assertTrue(((TextFieldMapper) myField1Mapper).fieldType().stored());
|
||||
|
||||
// Even if dynamic mappings would map a numeric field as a long, here it should map it as a integer
|
||||
// since we already have a mapping of type integer
|
||||
|
@ -470,7 +470,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
|
|||
.startObject("template1")
|
||||
.field("match_mapping_type", "string")
|
||||
.startObject("mapping")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.startObject("fields")
|
||||
.startObject("raw")
|
||||
.field("type", "keyword")
|
||||
|
@ -486,7 +486,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
|
|||
.startObject("type2")
|
||||
.startObject("properties")
|
||||
.startObject("field")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject().endObject();
|
||||
|
|
|
@ -60,7 +60,7 @@ public class MapperServiceTests extends ESSingleNodeTestCase {
|
|||
.admin()
|
||||
.indices()
|
||||
.prepareCreate(index)
|
||||
.addMapping(type, field, "type=string")
|
||||
.addMapping(type, field, "type=text")
|
||||
.execute()
|
||||
.actionGet();
|
||||
}
|
||||
|
@ -76,7 +76,7 @@ public class MapperServiceTests extends ESSingleNodeTestCase {
|
|||
.admin()
|
||||
.indices()
|
||||
.prepareCreate(index)
|
||||
.addMapping(type, field, "type=string")
|
||||
.addMapping(type, field, "type=text")
|
||||
.execute()
|
||||
.actionGet();
|
||||
}
|
||||
|
|
|
@ -362,7 +362,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase {
|
|||
fail("Expected MapperParsingException");
|
||||
} catch (MapperParsingException e) {
|
||||
assertThat(e.getMessage(), containsString("Root mapping definition has unsupported parameters"));
|
||||
assertThat(e.getMessage(), containsString("[type : string]"));
|
||||
assertThat(e.getMessage(), containsString("[type : text]"));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -374,7 +374,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase {
|
|||
fail("Expected MapperParsingException");
|
||||
} catch (MapperParsingException e) {
|
||||
assertThat(e.getMessage(), containsString("Root mapping definition has unsupported parameters"));
|
||||
assertThat(e.getMessage(), containsString("type=string"));
|
||||
assertThat(e.getMessage(), containsString("type=text"));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -386,7 +386,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase {
|
|||
fail("Expected MapperParsingException");
|
||||
} catch (MapperParsingException e) {
|
||||
assertThat(e.getMessage(), containsString("Root mapping definition has unsupported parameters"));
|
||||
assertThat(e.getMessage(), containsString("type=string"));
|
||||
assertThat(e.getMessage(), containsString("type=text"));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -451,7 +451,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase {
|
|||
public void testAutoBoost() throws Exception {
|
||||
for (boolean boost : new boolean[] {false, true}) {
|
||||
String index = "test_" + boost;
|
||||
IndexService indexService = createIndex(index, client().admin().indices().prepareCreate(index).addMapping("type", "foo", "type=string" + (boost ? ",boost=2" : "")));
|
||||
IndexService indexService = createIndex(index, client().admin().indices().prepareCreate(index).addMapping("type", "foo", "type=text" + (boost ? ",boost=2" : "")));
|
||||
client().prepareIndex(index, "type").setSource("foo", "bar").get();
|
||||
client().admin().indices().prepareRefresh(index).get();
|
||||
Query query = indexService.mapperService().documentMapper("type").allFieldMapper().fieldType().termQuery("bar", null);
|
||||
|
|
|
@ -72,7 +72,7 @@ public class CopyToMapperIntegrationIT extends ESIntegTestCase {
|
|||
public void testDynamicObjectCopyTo() throws Exception {
|
||||
String mapping = jsonBuilder().startObject().startObject("doc").startObject("properties")
|
||||
.startObject("foo")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.field("copy_to", "root.top.child")
|
||||
.endObject()
|
||||
.endObject().endObject().endObject().string();
|
||||
|
@ -102,7 +102,7 @@ public class CopyToMapperIntegrationIT extends ESIntegTestCase {
|
|||
.startObject().startObject("template_all")
|
||||
.field("match", "*")
|
||||
.field("match_mapping_type", "string")
|
||||
.startObject("mapping").field("type", "string").field("copy_to", "{name}_raw").endObject()
|
||||
.startObject("mapping").field("type", "text").field("copy_to", "{name}_raw").endObject()
|
||||
.endObject().endObject()
|
||||
|
||||
.endArray();
|
||||
|
|
|
@ -37,7 +37,7 @@ import org.elasticsearch.index.mapper.ParseContext;
|
|||
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.mapper.core.LongFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.TextFieldMapper;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
@ -58,16 +58,16 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
|
|||
public void testCopyToFieldsParsing() throws Exception {
|
||||
String mapping = jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("copy_test")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.array("copy_to", "another_field", "cyclic_test")
|
||||
.endObject()
|
||||
|
||||
.startObject("another_field")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.endObject()
|
||||
|
||||
.startObject("cyclic_test")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.array("copy_to", "copy_test")
|
||||
.endObject()
|
||||
|
||||
|
@ -84,7 +84,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
|
|||
FieldMapper fieldMapper = docMapper.mappers().getMapper("copy_test");
|
||||
|
||||
// Check json serialization
|
||||
StringFieldMapper stringFieldMapper = (StringFieldMapper) fieldMapper;
|
||||
TextFieldMapper stringFieldMapper = (TextFieldMapper) fieldMapper;
|
||||
XContentBuilder builder = jsonBuilder().startObject();
|
||||
stringFieldMapper.toXContent(builder, ToXContent.EMPTY_PARAMS).endObject();
|
||||
builder.close();
|
||||
|
@ -93,7 +93,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
|
|||
serializedMap = parser.map();
|
||||
}
|
||||
Map<String, Object> copyTestMap = (Map<String, Object>) serializedMap.get("copy_test");
|
||||
assertThat(copyTestMap.get("type").toString(), is("string"));
|
||||
assertThat(copyTestMap.get("type").toString(), is("text"));
|
||||
List<String> copyToList = (List<String>) copyTestMap.get("copy_to");
|
||||
assertThat(copyToList.size(), equalTo(2));
|
||||
assertThat(copyToList.get(0).toString(), equalTo("another_field"));
|
||||
|
@ -138,7 +138,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
|
|||
String mapping = jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
|
||||
.startObject("copy_test")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.field("copy_to", "very.inner.field")
|
||||
.endObject()
|
||||
|
||||
|
@ -173,7 +173,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
|
|||
String mapping = jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties")
|
||||
.startObject("copy_test")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.field("copy_to", "very.inner.field")
|
||||
.endObject()
|
||||
.endObject()
|
||||
|
@ -201,7 +201,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
|
|||
String mapping = jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties")
|
||||
.startObject("copy_test")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.field("copy_to", "very.far.inner.field")
|
||||
.endObject()
|
||||
.startObject("very")
|
||||
|
@ -238,7 +238,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
|
|||
.field("dynamic", "strict")
|
||||
.startObject("properties")
|
||||
.startObject("copy_test")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.field("copy_to", "very.inner.field")
|
||||
.endObject()
|
||||
.endObject()
|
||||
|
@ -262,7 +262,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
|
|||
String mapping = jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties")
|
||||
.startObject("copy_test")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.field("copy_to", "very.far.field")
|
||||
.endObject()
|
||||
.startObject("very")
|
||||
|
@ -296,7 +296,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
|
|||
String mappingBefore = jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
|
||||
.startObject("copy_test")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.array("copy_to", "foo", "bar")
|
||||
.endObject()
|
||||
|
||||
|
@ -305,7 +305,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
|
|||
String mappingAfter = jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
|
||||
.startObject("copy_test")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.array("copy_to", "baz", "bar")
|
||||
.endObject()
|
||||
|
||||
|
@ -438,7 +438,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
|
|||
.endArray()
|
||||
.startObject("properties")
|
||||
.startObject("copy_test")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.field("copy_to", "very.inner.field")
|
||||
.endObject()
|
||||
.endObject()
|
||||
|
|
|
@ -134,8 +134,7 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase {
|
|||
.field("type", "boolean")
|
||||
.startObject("fields")
|
||||
.startObject("as_string")
|
||||
.field("type", "string")
|
||||
.field("index", "not_analyzed")
|
||||
.field("type", "keyword")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject().endObject()
|
||||
|
|
|
@ -69,13 +69,13 @@ public class MultiFieldCopyToMapperTests extends ESTestCase {
|
|||
.startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("a")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.endObject()
|
||||
.startObject("b")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.startObject("fields")
|
||||
.startObject("c")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.field("copy_to", "a")
|
||||
.endObject()
|
||||
.endObject()
|
||||
|
|
|
@ -0,0 +1,389 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper.core;
|
||||
|
||||
import org.apache.lucene.index.DocValuesType;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.apache.lucene.index.IndexableFieldType;
|
||||
import org.apache.lucene.index.LeafReader;
|
||||
import org.apache.lucene.index.PostingsEnum;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||
import org.elasticsearch.index.mapper.MapperService.MergeReason;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class TextFieldMapperTests extends ESSingleNodeTestCase {
|
||||
|
||||
IndexService indexService;
|
||||
DocumentMapperParser parser;
|
||||
|
||||
@Before
|
||||
public void before() {
|
||||
indexService = createIndex("test");
|
||||
parser = indexService.mapperService().documentMapperParser();
|
||||
}
|
||||
|
||||
public void testDefaults() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", "text").endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
|
||||
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("field", "1234")
|
||||
.endObject()
|
||||
.bytes());
|
||||
|
||||
IndexableField[] fields = doc.rootDoc().getFields("field");
|
||||
assertEquals(1, fields.length);
|
||||
|
||||
assertEquals("1234", fields[0].stringValue());
|
||||
IndexableFieldType fieldType = fields[0].fieldType();
|
||||
assertThat(fieldType.omitNorms(), equalTo(false));
|
||||
assertTrue(fieldType.tokenized());
|
||||
assertFalse(fieldType.stored());
|
||||
assertThat(fieldType.indexOptions(), equalTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS));
|
||||
assertThat(fieldType.storeTermVectors(), equalTo(false));
|
||||
assertThat(fieldType.storeTermVectorOffsets(), equalTo(false));
|
||||
assertThat(fieldType.storeTermVectorPositions(), equalTo(false));
|
||||
assertThat(fieldType.storeTermVectorPayloads(), equalTo(false));
|
||||
assertEquals(DocValuesType.NONE, fieldType.docValuesType());
|
||||
}
|
||||
|
||||
public void testEnableStore() throws IOException {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", "text").field("store", true).endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
|
||||
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("field", "1234")
|
||||
.endObject()
|
||||
.bytes());
|
||||
|
||||
IndexableField[] fields = doc.rootDoc().getFields("field");
|
||||
assertEquals(1, fields.length);
|
||||
assertTrue(fields[0].fieldType().stored());
|
||||
}
|
||||
|
||||
public void testDisableIndex() throws IOException {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", "text").field("index", false).endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
|
||||
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("field", "1234")
|
||||
.endObject()
|
||||
.bytes());
|
||||
|
||||
IndexableField[] fields = doc.rootDoc().getFields("field");
|
||||
assertEquals(0, fields.length);
|
||||
}
|
||||
|
||||
public void testDisableNorms() throws IOException {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field")
|
||||
.field("type", "text")
|
||||
.startObject("norms")
|
||||
.field("enabled", false)
|
||||
.endObject()
|
||||
.endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
|
||||
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("field", "1234")
|
||||
.endObject()
|
||||
.bytes());
|
||||
|
||||
IndexableField[] fields = doc.rootDoc().getFields("field");
|
||||
assertEquals(1, fields.length);
|
||||
assertTrue(fields[0].fieldType().omitNorms());
|
||||
}
|
||||
|
||||
public void testIndexOptions() throws IOException {
|
||||
Map<String, IndexOptions> supportedOptions = new HashMap<>();
|
||||
supportedOptions.put("docs", IndexOptions.DOCS);
|
||||
supportedOptions.put("freqs", IndexOptions.DOCS_AND_FREQS);
|
||||
supportedOptions.put("positions", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
|
||||
supportedOptions.put("offsets", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
|
||||
|
||||
XContentBuilder mappingBuilder = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties");
|
||||
for (String option : supportedOptions.keySet()) {
|
||||
mappingBuilder.startObject(option).field("type", "text").field("index_options", option).endObject();
|
||||
}
|
||||
String mapping = mappingBuilder.endObject().endObject().endObject().string();
|
||||
|
||||
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
|
||||
XContentBuilder jsonDoc = XContentFactory.jsonBuilder().startObject();
|
||||
for (String option : supportedOptions.keySet()) {
|
||||
jsonDoc.field(option, "1234");
|
||||
}
|
||||
ParsedDocument doc = mapper.parse("test", "type", "1", jsonDoc.endObject().bytes());
|
||||
|
||||
for (Map.Entry<String, IndexOptions> entry : supportedOptions.entrySet()) {
|
||||
String field = entry.getKey();
|
||||
IndexOptions options = entry.getValue();
|
||||
IndexableField[] fields = doc.rootDoc().getFields(field);
|
||||
assertEquals(1, fields.length);
|
||||
assertEquals(options, fields[0].fieldType().indexOptions());
|
||||
}
|
||||
}
|
||||
|
||||
public void testDefaultPositionIncrementGap() throws IOException {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", "text").endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
DocumentMapper mapper = indexService.mapperService().merge("type",
|
||||
new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, false);
|
||||
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
|
||||
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("field", new String[] {"a", "b"})
|
||||
.endObject()
|
||||
.bytes());
|
||||
|
||||
IndexableField[] fields = doc.rootDoc().getFields("field");
|
||||
assertEquals(2, fields.length);
|
||||
|
||||
assertEquals("a", fields[0].stringValue());
|
||||
assertEquals("b", fields[1].stringValue());
|
||||
|
||||
IndexShard shard = indexService.getShard(0);
|
||||
shard.index(new Engine.Index(new Term("_uid", "1"), doc));
|
||||
shard.refresh("test");
|
||||
try (Engine.Searcher searcher = shard.acquireSearcher("test")) {
|
||||
LeafReader leaf = searcher.getDirectoryReader().leaves().get(0).reader();
|
||||
TermsEnum terms = leaf.terms("field").iterator();
|
||||
assertTrue(terms.seekExact(new BytesRef("b")));
|
||||
PostingsEnum postings = terms.postings(null, PostingsEnum.POSITIONS);
|
||||
assertEquals(0, postings.nextDoc());
|
||||
assertEquals(TextFieldMapper.Defaults.POSITION_INCREMENT_GAP + 1, postings.nextPosition());
|
||||
}
|
||||
}
|
||||
|
||||
public void testPositionIncrementGap() throws IOException {
|
||||
final int positionIncrementGap = randomIntBetween(1, 1000);
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field")
|
||||
.field("type", "text")
|
||||
.field("position_increment_gap", positionIncrementGap)
|
||||
.endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
DocumentMapper mapper = indexService.mapperService().merge("type",
|
||||
new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, false);
|
||||
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
|
||||
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("field", new String[] {"a", "b"})
|
||||
.endObject()
|
||||
.bytes());
|
||||
|
||||
IndexableField[] fields = doc.rootDoc().getFields("field");
|
||||
assertEquals(2, fields.length);
|
||||
|
||||
assertEquals("a", fields[0].stringValue());
|
||||
assertEquals("b", fields[1].stringValue());
|
||||
|
||||
IndexShard shard = indexService.getShard(0);
|
||||
shard.index(new Engine.Index(new Term("_uid", "1"), doc));
|
||||
shard.refresh("test");
|
||||
try (Engine.Searcher searcher = shard.acquireSearcher("test")) {
|
||||
LeafReader leaf = searcher.getDirectoryReader().leaves().get(0).reader();
|
||||
TermsEnum terms = leaf.terms("field").iterator();
|
||||
assertTrue(terms.seekExact(new BytesRef("b")));
|
||||
PostingsEnum postings = terms.postings(null, PostingsEnum.POSITIONS);
|
||||
assertEquals(0, postings.nextDoc());
|
||||
assertEquals(positionIncrementGap + 1, postings.nextPosition());
|
||||
}
|
||||
}
|
||||
|
||||
public void testSearchAnalyzerSerialization() throws IOException {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("field")
|
||||
.field("type", "text")
|
||||
.field("analyzer", "standard")
|
||||
.field("search_analyzer", "keyword")
|
||||
.endObject()
|
||||
.endObject().endObject().endObject().string();
|
||||
|
||||
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
|
||||
// special case: default index analyzer
|
||||
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("field")
|
||||
.field("type", "text")
|
||||
.field("analyzer", "default")
|
||||
.field("search_analyzer", "keyword")
|
||||
.endObject()
|
||||
.endObject().endObject().endObject().string();
|
||||
|
||||
mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
}
|
||||
|
||||
public void testSearchQuoteAnalyzerSerialization() throws IOException {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("field")
|
||||
.field("type", "text")
|
||||
.field("analyzer", "standard")
|
||||
.field("search_analyzer", "standard")
|
||||
.field("search_quote_analyzer", "keyword")
|
||||
.endObject()
|
||||
.endObject().endObject().endObject().string();
|
||||
|
||||
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
|
||||
// special case: default index/search analyzer
|
||||
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("field")
|
||||
.field("type", "text")
|
||||
.field("analyzer", "default")
|
||||
.field("search_analyzer", "default")
|
||||
.field("search_quote_analyzer", "keyword")
|
||||
.endObject()
|
||||
.endObject().endObject().endObject().string();
|
||||
|
||||
mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
}
|
||||
|
||||
public void testTermVectors() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("field1")
|
||||
.field("type", "text")
|
||||
.field("term_vector", "no")
|
||||
.endObject()
|
||||
.startObject("field2")
|
||||
.field("type", "text")
|
||||
.field("term_vector", "yes")
|
||||
.endObject()
|
||||
.startObject("field3")
|
||||
.field("type", "text")
|
||||
.field("term_vector", "with_offsets")
|
||||
.endObject()
|
||||
.startObject("field4")
|
||||
.field("type", "text")
|
||||
.field("term_vector", "with_positions")
|
||||
.endObject()
|
||||
.startObject("field5")
|
||||
.field("type", "text")
|
||||
.field("term_vector", "with_positions_offsets")
|
||||
.endObject()
|
||||
.startObject("field6")
|
||||
.field("type", "text")
|
||||
.field("term_vector", "with_positions_offsets_payloads")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
|
||||
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("field1", "1234")
|
||||
.field("field2", "1234")
|
||||
.field("field3", "1234")
|
||||
.field("field4", "1234")
|
||||
.field("field5", "1234")
|
||||
.field("field6", "1234")
|
||||
.endObject()
|
||||
.bytes());
|
||||
|
||||
assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectors(), equalTo(false));
|
||||
assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectorOffsets(), equalTo(false));
|
||||
assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectorPositions(), equalTo(false));
|
||||
assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectorPayloads(), equalTo(false));
|
||||
|
||||
assertThat(doc.rootDoc().getField("field2").fieldType().storeTermVectors(), equalTo(true));
|
||||
assertThat(doc.rootDoc().getField("field2").fieldType().storeTermVectorOffsets(), equalTo(false));
|
||||
assertThat(doc.rootDoc().getField("field2").fieldType().storeTermVectorPositions(), equalTo(false));
|
||||
assertThat(doc.rootDoc().getField("field2").fieldType().storeTermVectorPayloads(), equalTo(false));
|
||||
|
||||
assertThat(doc.rootDoc().getField("field3").fieldType().storeTermVectors(), equalTo(true));
|
||||
assertThat(doc.rootDoc().getField("field3").fieldType().storeTermVectorOffsets(), equalTo(true));
|
||||
assertThat(doc.rootDoc().getField("field3").fieldType().storeTermVectorPositions(), equalTo(false));
|
||||
assertThat(doc.rootDoc().getField("field3").fieldType().storeTermVectorPayloads(), equalTo(false));
|
||||
|
||||
assertThat(doc.rootDoc().getField("field4").fieldType().storeTermVectors(), equalTo(true));
|
||||
assertThat(doc.rootDoc().getField("field4").fieldType().storeTermVectorOffsets(), equalTo(false));
|
||||
assertThat(doc.rootDoc().getField("field4").fieldType().storeTermVectorPositions(), equalTo(true));
|
||||
assertThat(doc.rootDoc().getField("field4").fieldType().storeTermVectorPayloads(), equalTo(false));
|
||||
|
||||
assertThat(doc.rootDoc().getField("field5").fieldType().storeTermVectors(), equalTo(true));
|
||||
assertThat(doc.rootDoc().getField("field5").fieldType().storeTermVectorOffsets(), equalTo(true));
|
||||
assertThat(doc.rootDoc().getField("field5").fieldType().storeTermVectorPositions(), equalTo(true));
|
||||
assertThat(doc.rootDoc().getField("field5").fieldType().storeTermVectorPayloads(), equalTo(false));
|
||||
|
||||
assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectors(), equalTo(true));
|
||||
assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorOffsets(), equalTo(true));
|
||||
assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorPositions(), equalTo(true));
|
||||
assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorPayloads(), equalTo(true));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,29 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.index.mapper.core;
|
||||
|
||||
import org.elasticsearch.index.mapper.FieldTypeTestCase;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
|
||||
public class TextFieldTypeTests extends FieldTypeTestCase {
|
||||
@Override
|
||||
protected MappedFieldType createDefaultFieldType() {
|
||||
return new TextFieldMapper.TextFieldType();
|
||||
}
|
||||
}
|
|
@ -36,6 +36,7 @@ import org.elasticsearch.index.mapper.ParseContext;
|
|||
import org.elasticsearch.index.mapper.core.BinaryFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.BooleanFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.TextFieldMapper;
|
||||
import org.elasticsearch.index.mapper.geo.BaseGeoPointFieldMapper;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapperLegacy;
|
||||
|
@ -81,7 +82,7 @@ public class ExternalMapper extends FieldMapper {
|
|||
public Builder(String name, String generatedValue, String mapperName) {
|
||||
super(name, new ExternalFieldType(), new ExternalFieldType());
|
||||
this.builder = this;
|
||||
this.stringBuilder = new StringFieldMapper.Builder(name).store(false);
|
||||
this.stringBuilder = new TextFieldMapper.Builder(name).store(false);
|
||||
this.generatedValue = generatedValue;
|
||||
this.mapperName = mapperName;
|
||||
}
|
||||
|
@ -224,7 +225,7 @@ public class ExternalMapper extends FieldMapper {
|
|||
BooleanFieldMapper boolMapperUpdate = (BooleanFieldMapper) boolMapper.updateFieldType(fullNameToFieldType);
|
||||
GeoPointFieldMapper pointMapperUpdate = (GeoPointFieldMapper) pointMapper.updateFieldType(fullNameToFieldType);
|
||||
GeoShapeFieldMapper shapeMapperUpdate = (GeoShapeFieldMapper) shapeMapper.updateFieldType(fullNameToFieldType);
|
||||
StringFieldMapper stringMapperUpdate = (StringFieldMapper) stringMapper.updateFieldType(fullNameToFieldType);
|
||||
TextFieldMapper stringMapperUpdate = (TextFieldMapper) stringMapper.updateFieldType(fullNameToFieldType);
|
||||
if (update == this
|
||||
&& multiFieldsUpdate == multiFields
|
||||
&& binMapperUpdate == binMapper
|
||||
|
|
|
@ -88,7 +88,7 @@ public class ExternalValuesMapperIntegrationIT extends ESIntegTestCase {
|
|||
.field("type", ExternalMapperPlugin.EXTERNAL_UPPER)
|
||||
.startObject("fields")
|
||||
.startObject("g")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.field("store", true)
|
||||
.startObject("fields")
|
||||
.startObject("raw")
|
||||
|
|
|
@ -31,7 +31,7 @@ import org.elasticsearch.index.mapper.DocumentMapperParser;
|
|||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.mapper.core.KeywordFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.TextFieldMapper;
|
||||
import org.elasticsearch.indices.mapper.MapperRegistry;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
|
@ -106,7 +106,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
|
|||
IndexService indexService = createIndex("test", settings);
|
||||
Map<String, Mapper.TypeParser> mapperParsers = new HashMap<>();
|
||||
mapperParsers.put(ExternalMapperPlugin.EXTERNAL, new ExternalMapper.TypeParser(ExternalMapperPlugin.EXTERNAL, "foo"));
|
||||
mapperParsers.put(StringFieldMapper.CONTENT_TYPE, new StringFieldMapper.TypeParser());
|
||||
mapperParsers.put(TextFieldMapper.CONTENT_TYPE, new TextFieldMapper.TypeParser());
|
||||
mapperParsers.put(KeywordFieldMapper.CONTENT_TYPE, new KeywordFieldMapper.TypeParser());
|
||||
MapperRegistry mapperRegistry = new MapperRegistry(mapperParsers, Collections.emptyMap());
|
||||
|
||||
|
@ -119,7 +119,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
|
|||
.field("type", ExternalMapperPlugin.EXTERNAL)
|
||||
.startObject("fields")
|
||||
.startObject("field")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.field("store", true)
|
||||
.startObject("fields")
|
||||
.startObject("raw")
|
||||
|
@ -165,7 +165,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
|
|||
Map<String, Mapper.TypeParser> mapperParsers = new HashMap<>();
|
||||
mapperParsers.put(ExternalMapperPlugin.EXTERNAL, new ExternalMapper.TypeParser(ExternalMapperPlugin.EXTERNAL, "foo"));
|
||||
mapperParsers.put(ExternalMapperPlugin.EXTERNAL_BIS, new ExternalMapper.TypeParser(ExternalMapperPlugin.EXTERNAL, "bar"));
|
||||
mapperParsers.put(StringFieldMapper.CONTENT_TYPE, new StringFieldMapper.TypeParser());
|
||||
mapperParsers.put(TextFieldMapper.CONTENT_TYPE, new TextFieldMapper.TypeParser());
|
||||
MapperRegistry mapperRegistry = new MapperRegistry(mapperParsers, Collections.emptyMap());
|
||||
|
||||
DocumentMapperParser parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(),
|
||||
|
@ -177,18 +177,18 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
|
|||
.field("type", ExternalMapperPlugin.EXTERNAL)
|
||||
.startObject("fields")
|
||||
.startObject("field")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.startObject("fields")
|
||||
.startObject("generated")
|
||||
.field("type", ExternalMapperPlugin.EXTERNAL_BIS)
|
||||
.endObject()
|
||||
.startObject("raw")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.startObject("raw")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
|
|
|
@ -30,7 +30,6 @@ import org.elasticsearch.index.mapper.DocumentMapperParser;
|
|||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.Mapping;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
|
||||
|
@ -49,12 +48,12 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase {
|
|||
public void test1Merge() throws Exception {
|
||||
|
||||
String stage1Mapping = XContentFactory.jsonBuilder().startObject().startObject("person").startObject("properties")
|
||||
.startObject("name").field("type", "string").endObject()
|
||||
.startObject("name").field("type", "text").endObject()
|
||||
.endObject().endObject().endObject().string();
|
||||
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
||||
DocumentMapper stage1 = parser.parse("person", new CompressedXContent(stage1Mapping));
|
||||
String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("person").startObject("properties")
|
||||
.startObject("name").field("type", "string").endObject()
|
||||
.startObject("name").field("type", "text").endObject()
|
||||
.startObject("age").field("type", "integer").endObject()
|
||||
.startObject("obj1").startObject("properties").startObject("prop1").field("type", "integer").endObject().endObject().endObject()
|
||||
.endObject().endObject().endObject().string();
|
||||
|
@ -112,10 +111,10 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase {
|
|||
public void testMergeSearchAnalyzer() throws Exception {
|
||||
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
||||
String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", "string").field("analyzer", "standard").field("search_analyzer", "whitespace").endObject().endObject()
|
||||
.startObject("properties").startObject("field").field("type", "text").field("analyzer", "standard").field("search_analyzer", "whitespace").endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", "string").field("analyzer", "standard").field("search_analyzer", "keyword").endObject().endObject()
|
||||
.startObject("properties").startObject("field").field("type", "text").field("analyzer", "standard").field("search_analyzer", "keyword").endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
DocumentMapper existing = parser.parse("type", new CompressedXContent(mapping1));
|
||||
|
@ -130,10 +129,10 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase {
|
|||
public void testChangeSearchAnalyzerToDefault() throws Exception {
|
||||
MapperService mapperService = createIndex("test").mapperService();
|
||||
String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", "string").field("analyzer", "standard").field("search_analyzer", "whitespace").endObject().endObject()
|
||||
.startObject("properties").startObject("field").field("type", "text").field("analyzer", "standard").field("search_analyzer", "whitespace").endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", "string").field("analyzer", "standard").field("ignore_above", 14).endObject().endObject()
|
||||
.startObject("properties").startObject("field").field("type", "text").field("analyzer", "standard").endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
DocumentMapper existing = mapperService.merge("type", new CompressedXContent(mapping1), MapperService.MergeReason.MAPPING_UPDATE, false);
|
||||
|
@ -142,7 +141,6 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase {
|
|||
assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("whitespace"));
|
||||
|
||||
assertThat(((NamedAnalyzer) merged.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("standard"));
|
||||
assertThat(((StringFieldMapper) (merged.mappers().getMapper("field"))).getIgnoreAbove(), equalTo(14));
|
||||
}
|
||||
|
||||
public void testConcurrentMergeTest() throws Throwable {
|
||||
|
@ -219,7 +217,7 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase {
|
|||
.startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("foo")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject().endObject().bytes());
|
||||
|
|
|
@ -35,7 +35,9 @@ import org.elasticsearch.index.mapper.MapperParsingException;
|
|||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||
import org.elasticsearch.index.mapper.core.DateFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.KeywordFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.TextFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.TokenCountFieldMapper;
|
||||
import org.elasticsearch.index.mapper.object.RootObjectMapper;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
|
@ -94,25 +96,25 @@ public class MultiFieldTests extends ESSingleNodeTestCase {
|
|||
assertThat(f.stringValue(), equalTo("2010-01-01"));
|
||||
|
||||
assertThat(docMapper.mappers().getMapper("name"), notNullValue());
|
||||
assertThat(docMapper.mappers().getMapper("name"), instanceOf(StringFieldMapper.class));
|
||||
assertThat(docMapper.mappers().getMapper("name"), instanceOf(TextFieldMapper.class));
|
||||
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());
|
||||
assertThat(docMapper.mappers().getMapper("name").fieldType().stored(), equalTo(true));
|
||||
assertThat(docMapper.mappers().getMapper("name").fieldType().tokenized(), equalTo(true));
|
||||
|
||||
assertThat(docMapper.mappers().getMapper("name.indexed"), notNullValue());
|
||||
assertThat(docMapper.mappers().getMapper("name.indexed"), instanceOf(StringFieldMapper.class));
|
||||
assertThat(docMapper.mappers().getMapper("name.indexed"), instanceOf(TextFieldMapper.class));
|
||||
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name.indexed").fieldType().indexOptions());
|
||||
assertThat(docMapper.mappers().getMapper("name.indexed").fieldType().stored(), equalTo(false));
|
||||
assertThat(docMapper.mappers().getMapper("name.indexed").fieldType().tokenized(), equalTo(true));
|
||||
|
||||
assertThat(docMapper.mappers().getMapper("name.not_indexed"), notNullValue());
|
||||
assertThat(docMapper.mappers().getMapper("name.not_indexed"), instanceOf(StringFieldMapper.class));
|
||||
assertThat(docMapper.mappers().getMapper("name.not_indexed"), instanceOf(TextFieldMapper.class));
|
||||
assertEquals(IndexOptions.NONE, docMapper.mappers().getMapper("name.not_indexed").fieldType().indexOptions());
|
||||
assertThat(docMapper.mappers().getMapper("name.not_indexed").fieldType().stored(), equalTo(true));
|
||||
assertThat(docMapper.mappers().getMapper("name.not_indexed").fieldType().tokenized(), equalTo(true));
|
||||
|
||||
assertThat(docMapper.mappers().getMapper("name.test1"), notNullValue());
|
||||
assertThat(docMapper.mappers().getMapper("name.test1"), instanceOf(StringFieldMapper.class));
|
||||
assertThat(docMapper.mappers().getMapper("name.test1"), instanceOf(TextFieldMapper.class));
|
||||
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name.test1").fieldType().indexOptions());
|
||||
assertThat(docMapper.mappers().getMapper("name.test1").fieldType().stored(), equalTo(true));
|
||||
assertThat(docMapper.mappers().getMapper("name.test1").fieldType().tokenized(), equalTo(true));
|
||||
|
@ -129,7 +131,7 @@ public class MultiFieldTests extends ESSingleNodeTestCase {
|
|||
assertThat(docMapper.mappers().getMapper("object1.multi1"), notNullValue());
|
||||
assertThat(docMapper.mappers().getMapper("object1.multi1"), instanceOf(DateFieldMapper.class));
|
||||
assertThat(docMapper.mappers().getMapper("object1.multi1.string"), notNullValue());
|
||||
assertThat(docMapper.mappers().getMapper("object1.multi1.string"), instanceOf(StringFieldMapper.class));
|
||||
assertThat(docMapper.mappers().getMapper("object1.multi1.string"), instanceOf(KeywordFieldMapper.class));
|
||||
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("object1.multi1.string").fieldType().indexOptions());
|
||||
assertThat(docMapper.mappers().getMapper("object1.multi1.string").fieldType().tokenized(), equalTo(false));
|
||||
}
|
||||
|
@ -139,8 +141,8 @@ public class MultiFieldTests extends ESSingleNodeTestCase {
|
|||
|
||||
DocumentMapper builderDocMapper = new DocumentMapper.Builder(new RootObjectMapper.Builder("person").add(
|
||||
new StringFieldMapper.Builder("name").store(true)
|
||||
.addMultiField(new StringFieldMapper.Builder("indexed").index(true).tokenized(true))
|
||||
.addMultiField(new StringFieldMapper.Builder("not_indexed").index(false).store(true))
|
||||
.addMultiField(new TextFieldMapper.Builder("indexed").index(true).tokenized(true))
|
||||
.addMultiField(new TextFieldMapper.Builder("not_indexed").index(false).store(true))
|
||||
), indexService.mapperService()).build(indexService.mapperService());
|
||||
|
||||
String builtMapping = builderDocMapper.mappingSource().string();
|
||||
|
@ -181,9 +183,9 @@ public class MultiFieldTests extends ESSingleNodeTestCase {
|
|||
}
|
||||
|
||||
XContentBuilder builder = jsonBuilder().startObject().startObject("type").startObject("properties")
|
||||
.startObject("my_field").field("type", "string").startObject("fields");
|
||||
.startObject("my_field").field("type", "text").startObject("fields");
|
||||
for (String multiFieldName : multiFieldNames) {
|
||||
builder = builder.startObject(multiFieldName).field("type", "string").endObject();
|
||||
builder = builder.startObject(multiFieldName).field("type", "text").endObject();
|
||||
}
|
||||
builder = builder.endObject().endObject().endObject().endObject().endObject();
|
||||
String mapping = builder.string();
|
||||
|
@ -218,8 +220,8 @@ public class MultiFieldTests extends ESSingleNodeTestCase {
|
|||
|
||||
// Generate a mapping with the a random subset of possible fielddata settings
|
||||
XContentBuilder builder = jsonBuilder().startObject().startObject("type").startObject("properties")
|
||||
.startObject("my_field").field("type", "string").startObject("fields").startObject(MY_MULTI_FIELD)
|
||||
.field("type", "string").startObject("fielddata");
|
||||
.startObject("my_field").field("type", "text").startObject("fields").startObject(MY_MULTI_FIELD)
|
||||
.field("type", "text").startObject("fielddata");
|
||||
String[] keys = possibleSettings.keySet().toArray(new String[]{});
|
||||
Collections.shuffle(Arrays.asList(keys), random());
|
||||
for(int i = randomIntBetween(0, possibleSettings.size()-1); i >= 0; --i)
|
||||
|
@ -235,7 +237,7 @@ public class MultiFieldTests extends ESSingleNodeTestCase {
|
|||
|
||||
public void testObjectFieldNotAllowed() throws Exception {
|
||||
String mapping = jsonBuilder().startObject().startObject("type").startObject("properties").startObject("my_field")
|
||||
.field("type", "string").startObject("fields").startObject("multi").field("type", "object").endObject().endObject()
|
||||
.field("type", "text").startObject("fields").startObject("multi").field("type", "object").endObject().endObject()
|
||||
.endObject().endObject().endObject().endObject().string();
|
||||
final DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
||||
try {
|
||||
|
@ -248,7 +250,7 @@ public class MultiFieldTests extends ESSingleNodeTestCase {
|
|||
|
||||
public void testNestedFieldNotAllowed() throws Exception {
|
||||
String mapping = jsonBuilder().startObject().startObject("type").startObject("properties").startObject("my_field")
|
||||
.field("type", "string").startObject("fields").startObject("multi").field("type", "nested").endObject().endObject()
|
||||
.field("type", "text").startObject("fields").startObject("multi").field("type", "nested").endObject().endObject()
|
||||
.endObject().endObject().endObject().endObject().string();
|
||||
final DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
||||
try {
|
||||
|
@ -265,10 +267,10 @@ public class MultiFieldTests extends ESSingleNodeTestCase {
|
|||
.startObject("my_type")
|
||||
.startObject("properties")
|
||||
.startObject("city")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.startObject("fields")
|
||||
.startObject("raw.foo")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.field("index", "not_analyzed")
|
||||
.endObject()
|
||||
.endObject()
|
||||
|
|
|
@ -234,7 +234,7 @@ public class MultiFieldsIntegrationIT extends ESIntegTestCase {
|
|||
return XContentFactory.jsonBuilder().startObject().startObject("my-type")
|
||||
.startObject("properties")
|
||||
.startObject("title")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.startObject("fields")
|
||||
.startObject("not_analyzed")
|
||||
.field("type", "keyword")
|
||||
|
@ -249,10 +249,10 @@ public class MultiFieldsIntegrationIT extends ESIntegTestCase {
|
|||
return XContentFactory.jsonBuilder().startObject().startObject("my-type")
|
||||
.startObject("properties")
|
||||
.startObject("title")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.startObject("fields")
|
||||
.startObject("uncased")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.field("analyzer", "whitespace")
|
||||
.endObject()
|
||||
.endObject()
|
||||
|
|
|
@ -320,7 +320,7 @@ public class NestedMappingTests extends ESSingleNodeTestCase {
|
|||
public void testNestedArrayStrict() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
|
||||
.startObject("nested1").field("type", "nested").field("dynamic", "strict").startObject("properties")
|
||||
.startObject("field1").field("type", "string")
|
||||
.startObject("field1").field("type", "text")
|
||||
.endObject().endObject()
|
||||
.endObject().endObject().endObject().string();
|
||||
|
||||
|
|
|
@ -69,8 +69,7 @@ public class SimpleObjectMappingTests extends ESSingleNodeTestCase {
|
|||
.startObject("tweet")
|
||||
.startObject("properties")
|
||||
.startObject("name")
|
||||
.field("type", "string")
|
||||
.field("index", "analyzed")
|
||||
.field("type", "text")
|
||||
.startArray("fields")
|
||||
.endArray()
|
||||
.endObject()
|
||||
|
@ -87,8 +86,7 @@ public class SimpleObjectMappingTests extends ESSingleNodeTestCase {
|
|||
.startObject("tweet")
|
||||
.startObject("properties")
|
||||
.startObject("name")
|
||||
.field("type", "string")
|
||||
.field("index", "analyzed")
|
||||
.field("type", "text")
|
||||
.startArray("fields")
|
||||
.startObject().field("test", "string").endObject()
|
||||
.startObject().field("test2", "string").endObject()
|
||||
|
@ -148,8 +146,7 @@ public class SimpleObjectMappingTests extends ESSingleNodeTestCase {
|
|||
.startObject("tweet")
|
||||
.startObject("properties")
|
||||
.startObject("name")
|
||||
.field("type", "string")
|
||||
.field("index", "analyzed")
|
||||
.field("type", "text")
|
||||
.startObject("fields")
|
||||
.startObject("raw")
|
||||
.field("type", "keyword")
|
||||
|
|
|
@ -28,7 +28,7 @@ import org.elasticsearch.index.mapper.DocumentMapper;
|
|||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.TextFieldMapper;
|
||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
import org.elasticsearch.index.mapper.object.RootObjectMapper;
|
||||
import org.elasticsearch.index.mapper.Uid;
|
||||
|
@ -48,7 +48,7 @@ public class SimpleMapperTests extends ESSingleNodeTestCase {
|
|||
IndexService indexService = createIndex("test");
|
||||
DocumentMapper docMapper = new DocumentMapper.Builder(
|
||||
new RootObjectMapper.Builder("person")
|
||||
.add(new ObjectMapper.Builder("name").add(new StringFieldMapper.Builder("first").store(true).index(false))),
|
||||
.add(new ObjectMapper.Builder("name").add(new TextFieldMapper.Builder("first").store(true).index(false))),
|
||||
indexService.mapperService()).build(indexService.mapperService());
|
||||
|
||||
BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json"));
|
||||
|
@ -108,7 +108,7 @@ public class SimpleMapperTests extends ESSingleNodeTestCase {
|
|||
IndexService indexService = createIndex("test");
|
||||
DocumentMapper docMapper = new DocumentMapper.Builder(
|
||||
new RootObjectMapper.Builder("person")
|
||||
.add(new ObjectMapper.Builder("name").add(new StringFieldMapper.Builder("first").store(true).index(false))),
|
||||
.add(new ObjectMapper.Builder("name").add(new TextFieldMapper.Builder("first").store(true).index(false))),
|
||||
indexService.mapperService()).build(indexService.mapperService());
|
||||
|
||||
BytesReference json = new BytesArray("".getBytes(StandardCharsets.UTF_8));
|
||||
|
@ -124,7 +124,7 @@ public class SimpleMapperTests extends ESSingleNodeTestCase {
|
|||
IndexService indexService = createIndex("test");
|
||||
DocumentMapperParser mapperParser = indexService.mapperService().documentMapperParser();
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
|
||||
.startObject("foo.bar").field("type", "string").endObject()
|
||||
.startObject("foo.bar").field("type", "text").endObject()
|
||||
.endObject().endObject().string();
|
||||
try {
|
||||
mapperParser.parse("type", new CompressedXContent(mapping));
|
||||
|
|
|
@ -259,12 +259,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase {
|
|||
// Cases where search_quote_analyzer should be present.
|
||||
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("field1")
|
||||
.field("type", "string")
|
||||
.field("position_increment_gap", 1000)
|
||||
.field("search_quote_analyzer", "simple")
|
||||
.endObject()
|
||||
.startObject("field2")
|
||||
.startObject("field")
|
||||
.field("type", "string")
|
||||
.field("position_increment_gap", 1000)
|
||||
.field("analyzer", "standard")
|
||||
|
@ -275,11 +270,9 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase {
|
|||
.endObject().endObject().string();
|
||||
|
||||
mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
for (String fieldName : Arrays.asList("field1", "field2")) {
|
||||
Map<String, Object> serializedMap = getSerializedMap(fieldName, mapper);
|
||||
Map<String, Object> serializedMap = getSerializedMap("field", mapper);
|
||||
assertEquals(serializedMap.get("search_quote_analyzer"), "simple");
|
||||
}
|
||||
}
|
||||
|
||||
public void testSearchAnalyzerSerialization() throws IOException {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
|
|
|
@ -84,14 +84,14 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
|
|||
|
||||
public void testThatEnablingTTLFieldOnMergeWorks() throws Exception {
|
||||
String mappingWithoutTtl = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").field("field").startObject().field("type", "string").endObject().endObject()
|
||||
.startObject("properties").field("field").startObject().field("type", "text").endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
String mappingWithTtl = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_ttl")
|
||||
.field("enabled", "yes")
|
||||
.endObject()
|
||||
.startObject("properties").field("field").startObject().field("type", "string").endObject().endObject()
|
||||
.startObject("properties").field("field").startObject().field("type", "text").endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
MapperService mapperService = createIndex("test").mapperService();
|
||||
|
@ -107,14 +107,14 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
|
|||
.startObject("_ttl")
|
||||
.field("enabled", "yes")
|
||||
.endObject()
|
||||
.startObject("properties").field("field").startObject().field("type", "string").endObject().endObject()
|
||||
.startObject("properties").field("field").startObject().field("type", "text").endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
String updatedMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_ttl")
|
||||
.field("default", "1w")
|
||||
.endObject()
|
||||
.startObject("properties").field("field").startObject().field("type", "string").endObject().endObject()
|
||||
.startObject("properties").field("field").startObject().field("type", "text").endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
MapperService mapperService = createIndex("test").mapperService();
|
||||
|
@ -185,18 +185,18 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
|
|||
XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m");
|
||||
indexService.mapperService().merge("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), MapperService.MergeReason.MAPPING_UPDATE, false);
|
||||
CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource();
|
||||
assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":360000},\"properties\":{\"field\":{\"type\":\"string\"}}}}")));
|
||||
assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":360000},\"properties\":{\"field\":{\"type\":\"text\"}}}}")));
|
||||
}
|
||||
|
||||
public void testMergeWithOnlyDefaultSetTtlDisabled() throws Exception {
|
||||
XContentBuilder mappingWithTtlEnabled = getMappingWithTtlDisabled("7d");
|
||||
IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithTtlEnabled);
|
||||
CompressedXContent mappingAfterCreation = indexService.mapperService().documentMapper("type").mappingSource();
|
||||
assertThat(mappingAfterCreation, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}")));
|
||||
assertThat(mappingAfterCreation, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"text\"}}}}")));
|
||||
XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m");
|
||||
indexService.mapperService().merge("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), MapperService.MergeReason.MAPPING_UPDATE, false);
|
||||
CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource();
|
||||
assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}")));
|
||||
assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"text\"}}}}")));
|
||||
}
|
||||
|
||||
public void testIncludeInObjectNotAllowed() throws Exception {
|
||||
|
@ -230,7 +230,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
|
|||
mapping.field("default", defaultValue);
|
||||
}
|
||||
return mapping.endObject()
|
||||
.startObject("properties").field("field").startObject().field("type", "string").endObject().endObject()
|
||||
.startObject("properties").field("field").startObject().field("type", "text").endObject().endObject()
|
||||
.endObject().endObject();
|
||||
}
|
||||
|
||||
|
@ -242,14 +242,14 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
|
|||
mapping.field("default", defaultValue);
|
||||
}
|
||||
return mapping.endObject()
|
||||
.startObject("properties").field("field").startObject().field("type", "string").endObject().endObject()
|
||||
.startObject("properties").field("field").startObject().field("type", "text").endObject().endObject()
|
||||
.endObject().endObject();
|
||||
}
|
||||
|
||||
private org.elasticsearch.common.xcontent.XContentBuilder getMappingWithOnlyTtlDefaultSet(String defaultValue) throws IOException {
|
||||
return XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_ttl").field("default", defaultValue).endObject()
|
||||
.startObject("properties").field("field").startObject().field("type", "string").endObject().endObject()
|
||||
.startObject("properties").field("field").startObject().field("type", "text").endObject().endObject()
|
||||
.endObject().endObject();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -40,7 +40,7 @@ public class UpdateMappingOnClusterIT extends ESIntegTestCase {
|
|||
|
||||
public void testAllEnabled() throws Exception {
|
||||
XContentBuilder mapping = jsonBuilder().startObject().startObject("mappings").startObject(TYPE).startObject("_all").field("enabled", "false").endObject().endObject().endObject().endObject();
|
||||
XContentBuilder mappingUpdate = jsonBuilder().startObject().startObject("_all").field("enabled", "true").endObject().startObject("properties").startObject("text").field("type", "string").endObject().endObject().endObject();
|
||||
XContentBuilder mappingUpdate = jsonBuilder().startObject().startObject("_all").field("enabled", "true").endObject().startObject("properties").startObject("text").field("type", "text").endObject().endObject().endObject();
|
||||
String errorMessage = "[_all] enabled is false now encountering true";
|
||||
testConflict(mapping.string(), mappingUpdate.string(), errorMessage);
|
||||
}
|
||||
|
@ -63,7 +63,7 @@ public class UpdateMappingOnClusterIT extends ESIntegTestCase {
|
|||
|
||||
public void testAllDisabled() throws Exception {
|
||||
XContentBuilder mapping = jsonBuilder().startObject().startObject("mappings").startObject(TYPE).startObject("_all").field("enabled", true).endObject().endObject().endObject().endObject();
|
||||
XContentBuilder mappingUpdate = jsonBuilder().startObject().startObject("_all").field("enabled", false).endObject().startObject("properties").startObject("text").field("type", "string").endObject().endObject().endObject();
|
||||
XContentBuilder mappingUpdate = jsonBuilder().startObject().startObject("_all").field("enabled", false).endObject().startObject("properties").startObject("text").field("type", "text").endObject().endObject().endObject();
|
||||
String errorMessage = "[_all] enabled is true now encountering false";
|
||||
testConflict(mapping.string(), mappingUpdate.string(), errorMessage);
|
||||
}
|
||||
|
@ -84,7 +84,7 @@ public class UpdateMappingOnClusterIT extends ESIntegTestCase {
|
|||
String docMappingUpdate = jsonBuilder().startObject().startObject("doc")
|
||||
.startObject("properties")
|
||||
.startObject("text")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
|
@ -125,7 +125,7 @@ public class UpdateMappingOnClusterIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testDocValuesInvalidMappingOnUpdate() throws Exception {
|
||||
String mapping = jsonBuilder().startObject().startObject(TYPE).startObject("properties").startObject("text").field("type", "string").endObject().endObject().endObject().string();
|
||||
String mapping = jsonBuilder().startObject().startObject(TYPE).startObject("properties").startObject("text").field("type", "text").endObject().endObject().endObject().string();
|
||||
prepareCreate(INDEX).addMapping(TYPE, mapping).get();
|
||||
String mappingUpdate = jsonBuilder().startObject().startObject(TYPE).startObject("_all").startObject("fielddata").field("format", "doc_values").endObject().endObject().endObject().endObject().string();
|
||||
GetMappingsResponse mappingsBeforeUpdateResponse = client().admin().indices().prepareGetMappings(INDEX).addTypes(TYPE).get();
|
||||
|
|
|
@ -51,33 +51,33 @@ public class UpdateMappingTests extends ESSingleNodeTestCase {
|
|||
|
||||
public void testAllEnabledAfterDisabled() throws Exception {
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("_all").field("enabled", false).endObject().endObject();
|
||||
XContentBuilder mappingUpdate = XContentFactory.jsonBuilder().startObject().startObject("_all").field("enabled", true).endObject().startObject("properties").startObject("text").field("type", "string").endObject().endObject().endObject();
|
||||
XContentBuilder mappingUpdate = XContentFactory.jsonBuilder().startObject().startObject("_all").field("enabled", true).endObject().startObject("properties").startObject("text").field("type", "text").endObject().endObject().endObject();
|
||||
testConflictWhileMergingAndMappingUnchanged(mapping, mappingUpdate);
|
||||
}
|
||||
|
||||
public void testAllDisabledAfterEnabled() throws Exception {
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("_all").field("enabled", true).endObject().endObject();
|
||||
XContentBuilder mappingUpdate = XContentFactory.jsonBuilder().startObject().startObject("_all").field("enabled", false).endObject().startObject("properties").startObject("text").field("type", "string").endObject().endObject().endObject();
|
||||
XContentBuilder mappingUpdate = XContentFactory.jsonBuilder().startObject().startObject("_all").field("enabled", false).endObject().startObject("properties").startObject("text").field("type", "text").endObject().endObject().endObject();
|
||||
testConflictWhileMergingAndMappingUnchanged(mapping, mappingUpdate);
|
||||
}
|
||||
|
||||
public void testAllDisabledAfterDefaultEnabled() throws Exception {
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("properties").startObject("some_text").field("type", "string").endObject().endObject().endObject();
|
||||
XContentBuilder mappingUpdate = XContentFactory.jsonBuilder().startObject().startObject("_all").field("enabled", false).endObject().startObject("properties").startObject("text").field("type", "string").endObject().endObject().endObject();
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("properties").startObject("some_text").field("type", "text").endObject().endObject().endObject();
|
||||
XContentBuilder mappingUpdate = XContentFactory.jsonBuilder().startObject().startObject("_all").field("enabled", false).endObject().startObject("properties").startObject("text").field("type", "text").endObject().endObject().endObject();
|
||||
testConflictWhileMergingAndMappingUnchanged(mapping, mappingUpdate);
|
||||
}
|
||||
|
||||
public void testAllEnabledAfterEnabled() throws Exception {
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("_all").field("enabled", true).endObject().endObject();
|
||||
XContentBuilder mappingUpdate = XContentFactory.jsonBuilder().startObject().startObject("_all").field("enabled", true).endObject().startObject("properties").startObject("text").field("type", "string").endObject().endObject().endObject();
|
||||
XContentBuilder expectedMapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("_all").field("enabled", true).endObject().startObject("properties").startObject("text").field("type", "string").endObject().endObject().endObject().endObject();
|
||||
XContentBuilder mappingUpdate = XContentFactory.jsonBuilder().startObject().startObject("_all").field("enabled", true).endObject().startObject("properties").startObject("text").field("type", "text").endObject().endObject().endObject();
|
||||
XContentBuilder expectedMapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("_all").field("enabled", true).endObject().startObject("properties").startObject("text").field("type", "text").endObject().endObject().endObject().endObject();
|
||||
testNoConflictWhileMergingAndMappingChanged(mapping, mappingUpdate, expectedMapping);
|
||||
}
|
||||
|
||||
public void testAllDisabledAfterDisabled() throws Exception {
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("_all").field("enabled", false).endObject().endObject();
|
||||
XContentBuilder mappingUpdate = XContentFactory.jsonBuilder().startObject().startObject("_all").field("enabled", false).endObject().startObject("properties").startObject("text").field("type", "string").endObject().endObject().endObject();
|
||||
XContentBuilder expectedMapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("_all").field("enabled", false).endObject().startObject("properties").startObject("text").field("type", "string").endObject().endObject().endObject().endObject();
|
||||
XContentBuilder mappingUpdate = XContentFactory.jsonBuilder().startObject().startObject("_all").field("enabled", false).endObject().startObject("properties").startObject("text").field("type", "text").endObject().endObject().endObject();
|
||||
XContentBuilder expectedMapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("_all").field("enabled", false).endObject().startObject("properties").startObject("text").field("type", "text").endObject().endObject().endObject().endObject();
|
||||
testNoConflictWhileMergingAndMappingChanged(mapping, mappingUpdate, expectedMapping);
|
||||
}
|
||||
|
||||
|
@ -97,7 +97,7 @@ public class UpdateMappingTests extends ESSingleNodeTestCase {
|
|||
.endObject().endObject();
|
||||
XContentBuilder mappingUpdate = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject(fieldName).field("enabled", true).field("store", "yes").endObject()
|
||||
.startObject("properties").startObject("text").field("type", "string").endObject().endObject()
|
||||
.startObject("properties").startObject("text").field("type", "text").endObject().endObject()
|
||||
.endObject().endObject();
|
||||
testConflictWhileMergingAndMappingUnchanged(mapping, mappingUpdate);
|
||||
}
|
||||
|
@ -212,7 +212,7 @@ public class UpdateMappingTests extends ESSingleNodeTestCase {
|
|||
|
||||
public void testReuseMetaField() throws IOException {
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("_id").field("type", "string").endObject()
|
||||
.startObject("properties").startObject("_id").field("type", "text").endObject()
|
||||
.endObject().endObject().endObject();
|
||||
MapperService mapperService = createIndex("test", Settings.settingsBuilder().build()).mapperService();
|
||||
|
||||
|
@ -233,7 +233,7 @@ public class UpdateMappingTests extends ESSingleNodeTestCase {
|
|||
|
||||
public void testReuseMetaFieldBackCompat() throws IOException {
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("_id").field("type", "string").endObject()
|
||||
.startObject("properties").startObject("_id").field("type", "text").endObject()
|
||||
.endObject().endObject().endObject();
|
||||
// the logic is different for 2.x indices since they record some meta mappers (including _id)
|
||||
// in the root object
|
||||
|
|
|
@ -44,7 +44,7 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
|
|||
mapperService = indexService.mapperService();
|
||||
|
||||
String mapper = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", "string").endObject().endObject()
|
||||
.startObject("properties").startObject("field").field("type", "text").endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
mapperService.merge("type", new CompressedXContent(mapper), MapperService.MergeReason.MAPPING_UPDATE, true);
|
||||
|
||||
|
|
|
@ -293,8 +293,8 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
|
|||
for (int i = 0; i < currentTypes.length; i++) {
|
||||
String type = randomAsciiOfLengthBetween(1, 10);
|
||||
mapperService.merge(type, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(type,
|
||||
STRING_FIELD_NAME, "type=string",
|
||||
STRING_FIELD_NAME_2, "type=string",
|
||||
STRING_FIELD_NAME, "type=text",
|
||||
STRING_FIELD_NAME_2, "type=text",
|
||||
INT_FIELD_NAME, "type=integer",
|
||||
DOUBLE_FIELD_NAME, "type=double",
|
||||
BOOLEAN_FIELD_NAME, "type=boolean",
|
||||
|
|
|
@ -32,7 +32,7 @@ public class CommonTermsQueryParserTests extends ESSingleNodeTestCase {
|
|||
.admin()
|
||||
.indices()
|
||||
.prepareCreate(index)
|
||||
.addMapping(type, "name", "type=string,analyzer=stop")
|
||||
.addMapping(type, "name", "type=text,analyzer=stop")
|
||||
.execute()
|
||||
.actionGet();
|
||||
ensureGreen();
|
||||
|
|
|
@ -62,7 +62,7 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
|
|||
public static void before() throws Exception {
|
||||
MapperService mapperService = queryShardContext().getMapperService();
|
||||
mapperService.merge(PARENT_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE,
|
||||
STRING_FIELD_NAME, "type=string",
|
||||
STRING_FIELD_NAME, "type=text",
|
||||
INT_FIELD_NAME, "type=integer",
|
||||
DOUBLE_FIELD_NAME, "type=double",
|
||||
BOOLEAN_FIELD_NAME, "type=boolean",
|
||||
|
@ -71,7 +71,7 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
|
|||
).string()), MapperService.MergeReason.MAPPING_UPDATE, false);
|
||||
mapperService.merge(CHILD_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(CHILD_TYPE,
|
||||
"_parent", "type=" + PARENT_TYPE,
|
||||
STRING_FIELD_NAME, "type=string",
|
||||
STRING_FIELD_NAME, "type=text",
|
||||
INT_FIELD_NAME, "type=integer",
|
||||
DOUBLE_FIELD_NAME, "type=double",
|
||||
BOOLEAN_FIELD_NAME, "type=boolean",
|
||||
|
|
|
@ -57,7 +57,7 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase<HasParentQ
|
|||
public static void beforeClass() throws Exception {
|
||||
MapperService mapperService = queryShardContext().getMapperService();
|
||||
mapperService.merge(PARENT_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE,
|
||||
STRING_FIELD_NAME, "type=string",
|
||||
STRING_FIELD_NAME, "type=text",
|
||||
INT_FIELD_NAME, "type=integer",
|
||||
DOUBLE_FIELD_NAME, "type=double",
|
||||
BOOLEAN_FIELD_NAME, "type=boolean",
|
||||
|
@ -66,7 +66,7 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase<HasParentQ
|
|||
).string()), MapperService.MergeReason.MAPPING_UPDATE, false);
|
||||
mapperService.merge(CHILD_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(CHILD_TYPE,
|
||||
"_parent", "type=" + PARENT_TYPE,
|
||||
STRING_FIELD_NAME, "type=string",
|
||||
STRING_FIELD_NAME, "type=text",
|
||||
INT_FIELD_NAME, "type=integer",
|
||||
DOUBLE_FIELD_NAME, "type=double",
|
||||
BOOLEAN_FIELD_NAME, "type=boolean",
|
||||
|
|
|
@ -46,7 +46,7 @@ public class NestedQueryBuilderTests extends AbstractQueryTestCase<NestedQueryBu
|
|||
super.setUp();
|
||||
MapperService mapperService = queryShardContext().getMapperService();
|
||||
mapperService.merge("nested_doc", new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef("nested_doc",
|
||||
STRING_FIELD_NAME, "type=string",
|
||||
STRING_FIELD_NAME, "type=text",
|
||||
INT_FIELD_NAME, "type=integer",
|
||||
DOUBLE_FIELD_NAME, "type=double",
|
||||
BOOLEAN_FIELD_NAME, "type=boolean",
|
||||
|
|
|
@ -42,7 +42,7 @@ public class ParentIdQueryBuilderTests extends AbstractQueryTestCase<ParentIdQue
|
|||
super.setUp();
|
||||
MapperService mapperService = queryShardContext().getMapperService();
|
||||
mapperService.merge(PARENT_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE,
|
||||
STRING_FIELD_NAME, "type=string",
|
||||
STRING_FIELD_NAME, "type=text",
|
||||
INT_FIELD_NAME, "type=integer",
|
||||
DOUBLE_FIELD_NAME, "type=double",
|
||||
BOOLEAN_FIELD_NAME, "type=boolean",
|
||||
|
@ -51,7 +51,7 @@ public class ParentIdQueryBuilderTests extends AbstractQueryTestCase<ParentIdQue
|
|||
).string()), MapperService.MergeReason.MAPPING_UPDATE, false);
|
||||
mapperService.merge(CHILD_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(CHILD_TYPE,
|
||||
"_parent", "type=" + PARENT_TYPE,
|
||||
STRING_FIELD_NAME, "type=string",
|
||||
STRING_FIELD_NAME, "type=text",
|
||||
INT_FIELD_NAME, "type=integer",
|
||||
DOUBLE_FIELD_NAME, "type=double",
|
||||
BOOLEAN_FIELD_NAME, "type=boolean",
|
||||
|
|
|
@ -24,7 +24,7 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.TextFieldMapper;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
@ -51,7 +51,7 @@ public class QueryShardContextTests extends ESTestCase {
|
|||
);
|
||||
|
||||
context.setAllowUnmappedFields(false);
|
||||
MappedFieldType fieldType = new StringFieldMapper.StringFieldType();
|
||||
MappedFieldType fieldType = new TextFieldMapper.TextFieldType();
|
||||
MappedFieldType result = context.failIfFieldMappingNotFound("name", fieldType);
|
||||
assertThat(result, sameInstance(fieldType));
|
||||
try {
|
||||
|
@ -73,7 +73,7 @@ public class QueryShardContextTests extends ESTestCase {
|
|||
assertThat(result, sameInstance(fieldType));
|
||||
result = context.failIfFieldMappingNotFound("name", null);
|
||||
assertThat(result, notNullValue());
|
||||
assertThat(result, instanceOf(StringFieldMapper.StringFieldType.class));
|
||||
assertThat(result, instanceOf(TextFieldMapper.TextFieldType.class));
|
||||
assertThat(result.name(), equalTo("name"));
|
||||
}
|
||||
|
||||
|
|
|
@ -1206,7 +1206,7 @@ public class IndexShardTests extends ESSingleNodeTestCase {
|
|||
.startObject("testtype")
|
||||
.startObject("properties")
|
||||
.startObject("foo")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.endObject()
|
||||
.endObject().endObject().endObject()).get();
|
||||
ensureGreen();
|
||||
|
@ -1233,7 +1233,7 @@ public class IndexShardTests extends ESSingleNodeTestCase {
|
|||
.startObject("testtype")
|
||||
.startObject("properties")
|
||||
.startObject("foo")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.endObject()
|
||||
.endObject().endObject().endObject()).get();
|
||||
ensureGreen();
|
||||
|
@ -1262,7 +1262,7 @@ public class IndexShardTests extends ESSingleNodeTestCase {
|
|||
.startObject("testtype")
|
||||
.startObject("properties")
|
||||
.startObject("foo")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.endObject()
|
||||
.endObject().endObject().endObject()).get();
|
||||
ensureGreen();
|
||||
|
|
|
@ -70,7 +70,7 @@ public class SimilarityTests extends ESSingleNodeTestCase {
|
|||
public void testResolveSimilaritiesFromMapping_classic() throws IOException {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("field1").field("type", "string").field("similarity", "my_similarity").endObject()
|
||||
.startObject("field1").field("type", "text").field("similarity", "my_similarity").endObject()
|
||||
.endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
|
@ -89,7 +89,7 @@ public class SimilarityTests extends ESSingleNodeTestCase {
|
|||
public void testResolveSimilaritiesFromMapping_bm25() throws IOException {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("field1").field("type", "string").field("similarity", "my_similarity").endObject()
|
||||
.startObject("field1").field("type", "text").field("similarity", "my_similarity").endObject()
|
||||
.endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
|
@ -112,7 +112,7 @@ public class SimilarityTests extends ESSingleNodeTestCase {
|
|||
public void testResolveSimilaritiesFromMapping_DFR() throws IOException {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("field1").field("type", "string").field("similarity", "my_similarity").endObject()
|
||||
.startObject("field1").field("type", "text").field("similarity", "my_similarity").endObject()
|
||||
.endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
|
@ -137,7 +137,7 @@ public class SimilarityTests extends ESSingleNodeTestCase {
|
|||
public void testResolveSimilaritiesFromMapping_IB() throws IOException {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("field1").field("type", "string").field("similarity", "my_similarity").endObject()
|
||||
.startObject("field1").field("type", "text").field("similarity", "my_similarity").endObject()
|
||||
.endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
|
@ -162,7 +162,7 @@ public class SimilarityTests extends ESSingleNodeTestCase {
|
|||
public void testResolveSimilaritiesFromMapping_DFI() throws IOException {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("field1").field("type", "string").field("similarity", "my_similarity").endObject()
|
||||
.startObject("field1").field("type", "text").field("similarity", "my_similarity").endObject()
|
||||
.endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
|
@ -181,7 +181,7 @@ public class SimilarityTests extends ESSingleNodeTestCase {
|
|||
public void testResolveSimilaritiesFromMapping_LMDirichlet() throws IOException {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("field1").field("type", "string").field("similarity", "my_similarity").endObject()
|
||||
.startObject("field1").field("type", "text").field("similarity", "my_similarity").endObject()
|
||||
.endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
|
@ -200,7 +200,7 @@ public class SimilarityTests extends ESSingleNodeTestCase {
|
|||
public void testResolveSimilaritiesFromMapping_LMJelinekMercer() throws IOException {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("field1").field("type", "string").field("similarity", "my_similarity").endObject()
|
||||
.startObject("field1").field("type", "text").field("similarity", "my_similarity").endObject()
|
||||
.endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
|
@ -219,7 +219,7 @@ public class SimilarityTests extends ESSingleNodeTestCase {
|
|||
public void testResolveSimilaritiesFromMapping_Unknown() throws IOException {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("field1").field("type", "string").field("similarity", "unknown_similarity").endObject()
|
||||
.startObject("field1").field("type", "text").field("similarity", "unknown_similarity").endObject()
|
||||
.endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
|
@ -237,7 +237,7 @@ public class SimilarityTests extends ESSingleNodeTestCase {
|
|||
.startObject("properties")
|
||||
.startObject("field1")
|
||||
.field("similarity", "default")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject().string();
|
||||
|
|
|
@ -66,11 +66,11 @@ public class SuggestStatsIT extends ESIntegTestCase {
|
|||
assertAcked(prepareCreate("test1").setSettings(Settings.builder()
|
||||
.put(SETTING_NUMBER_OF_SHARDS, shardsIdx1)
|
||||
.put(SETTING_NUMBER_OF_REPLICAS, 0))
|
||||
.addMapping("type", "f", "type=string"));
|
||||
.addMapping("type", "f", "type=text"));
|
||||
assertAcked(prepareCreate("test2").setSettings(Settings.builder()
|
||||
.put(SETTING_NUMBER_OF_SHARDS, shardsIdx2)
|
||||
.put(SETTING_NUMBER_OF_REPLICAS, 0))
|
||||
.addMapping("type", "f", "type=string"));
|
||||
.addMapping("type", "f", "type=text"));
|
||||
assertThat(shardsIdx1 + shardsIdx2, equalTo(numAssignedShards("test1", "test2")));
|
||||
assertThat(numAssignedShards("test1", "test2"), greaterThanOrEqualTo(2));
|
||||
ensureGreen();
|
||||
|
|
|
@ -601,33 +601,33 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testPutMapping() throws Exception {
|
||||
verify(client().admin().indices().preparePutMapping("foo").setType("type1").setSource("field", "type=string"), true);
|
||||
verify(client().admin().indices().preparePutMapping("_all").setType("type1").setSource("field", "type=string"), true);
|
||||
verify(client().admin().indices().preparePutMapping("foo").setType("type1").setSource("field", "type=text"), true);
|
||||
verify(client().admin().indices().preparePutMapping("_all").setType("type1").setSource("field", "type=text"), true);
|
||||
|
||||
createIndex("foo", "foobar", "bar", "barbaz");
|
||||
ensureYellow();
|
||||
|
||||
verify(client().admin().indices().preparePutMapping("foo").setType("type1").setSource("field", "type=string"), false);
|
||||
verify(client().admin().indices().preparePutMapping("foo").setType("type1").setSource("field", "type=text"), false);
|
||||
assertThat(client().admin().indices().prepareGetMappings("foo").get().mappings().get("foo").get("type1"), notNullValue());
|
||||
verify(client().admin().indices().preparePutMapping("b*").setType("type1").setSource("field", "type=string"), false);
|
||||
verify(client().admin().indices().preparePutMapping("b*").setType("type1").setSource("field", "type=text"), false);
|
||||
assertThat(client().admin().indices().prepareGetMappings("bar").get().mappings().get("bar").get("type1"), notNullValue());
|
||||
assertThat(client().admin().indices().prepareGetMappings("barbaz").get().mappings().get("barbaz").get("type1"), notNullValue());
|
||||
verify(client().admin().indices().preparePutMapping("_all").setType("type2").setSource("field", "type=string"), false);
|
||||
verify(client().admin().indices().preparePutMapping("_all").setType("type2").setSource("field", "type=text"), false);
|
||||
assertThat(client().admin().indices().prepareGetMappings("foo").get().mappings().get("foo").get("type2"), notNullValue());
|
||||
assertThat(client().admin().indices().prepareGetMappings("foobar").get().mappings().get("foobar").get("type2"), notNullValue());
|
||||
assertThat(client().admin().indices().prepareGetMappings("bar").get().mappings().get("bar").get("type2"), notNullValue());
|
||||
assertThat(client().admin().indices().prepareGetMappings("barbaz").get().mappings().get("barbaz").get("type2"), notNullValue());
|
||||
verify(client().admin().indices().preparePutMapping().setType("type3").setSource("field", "type=string"), false);
|
||||
verify(client().admin().indices().preparePutMapping().setType("type3").setSource("field", "type=text"), false);
|
||||
assertThat(client().admin().indices().prepareGetMappings("foo").get().mappings().get("foo").get("type3"), notNullValue());
|
||||
assertThat(client().admin().indices().prepareGetMappings("foobar").get().mappings().get("foobar").get("type3"), notNullValue());
|
||||
assertThat(client().admin().indices().prepareGetMappings("bar").get().mappings().get("bar").get("type3"), notNullValue());
|
||||
assertThat(client().admin().indices().prepareGetMappings("barbaz").get().mappings().get("barbaz").get("type3"), notNullValue());
|
||||
|
||||
|
||||
verify(client().admin().indices().preparePutMapping("c*").setType("type1").setSource("field", "type=string"), true);
|
||||
verify(client().admin().indices().preparePutMapping("c*").setType("type1").setSource("field", "type=text"), true);
|
||||
|
||||
assertAcked(client().admin().indices().prepareClose("barbaz").get());
|
||||
verify(client().admin().indices().preparePutMapping("barbaz").setType("type4").setSource("field", "type=string"), false);
|
||||
verify(client().admin().indices().preparePutMapping("barbaz").setType("type4").setSource("field", "type=text"), false);
|
||||
assertThat(client().admin().indices().prepareGetMappings("barbaz").get().mappings().get("barbaz").get("type4"), notNullValue());
|
||||
}
|
||||
|
||||
|
|
|
@ -72,7 +72,7 @@ public class PreBuiltAnalyzerIntegrationIT extends ESIntegTestCase {
|
|||
.startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("foo")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.field("analyzer", name)
|
||||
.endObject()
|
||||
.endObject()
|
||||
|
@ -124,11 +124,11 @@ public class PreBuiltAnalyzerIntegrationIT extends ESIntegTestCase {
|
|||
.startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("foo")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.field("analyzer", "dummy")
|
||||
.endObject()
|
||||
.startObject("bar")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.field("analyzer", "my_dummy")
|
||||
.endObject()
|
||||
.endObject()
|
||||
|
|
|
@ -158,7 +158,7 @@ public class AnalyzeActionIT extends ESIntegTestCase {
|
|||
ensureGreen();
|
||||
|
||||
client().admin().indices().preparePutMapping("test")
|
||||
.setType("document").setSource("simple", "type=string,analyzer=simple").get();
|
||||
.setType("document").setSource("simple", "type=text,analyzer=simple").get();
|
||||
|
||||
for (int i = 0; i < 10; i++) {
|
||||
final AnalyzeRequestBuilder requestBuilder = client().admin().indices().prepareAnalyze("THIS IS A TEST");
|
||||
|
@ -248,7 +248,7 @@ public class AnalyzeActionIT extends ESIntegTestCase {
|
|||
ensureGreen();
|
||||
|
||||
client().admin().indices().preparePutMapping("test")
|
||||
.setType("document").setSource("simple", "type=string,analyzer=simple,position_increment_gap=100").get();
|
||||
.setType("document").setSource("simple", "type=text,analyzer=simple,position_increment_gap=100").get();
|
||||
|
||||
String[] texts = new String[]{"THIS IS A TEST", "THE SECOND TEXT"};
|
||||
|
||||
|
@ -401,7 +401,7 @@ public class AnalyzeActionIT extends ESIntegTestCase {
|
|||
assertAcked(prepareCreate("test").addAlias(new Alias("alias")));
|
||||
ensureGreen();
|
||||
client().admin().indices().preparePutMapping("test")
|
||||
.setType("document").setSource("simple", "type=string,analyzer=simple,position_increment_gap=100").get();
|
||||
.setType("document").setSource("simple", "type=text,analyzer=simple,position_increment_gap=100").get();
|
||||
|
||||
String[] texts = new String[]{"THIS IS A TEST", "THE SECOND TEXT"};
|
||||
AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze().setIndex(indexOrAlias()).setText(texts)
|
||||
|
@ -437,7 +437,7 @@ public class AnalyzeActionIT extends ESIntegTestCase {
|
|||
ensureGreen();
|
||||
|
||||
client().admin().indices().preparePutMapping("test")
|
||||
.setType("document").setSource("simple", "type=string,analyzer=simple,position_increment_gap=100").get();
|
||||
.setType("document").setSource("simple", "type=text,analyzer=simple,position_increment_gap=100").get();
|
||||
|
||||
//only analyzer =
|
||||
String[] texts = new String[]{"this is a PHISH", "the troubled text"};
|
||||
|
|
|
@ -59,7 +59,7 @@ public class SimpleGetFieldMappingsIT extends ESIntegTestCase {
|
|||
|
||||
private XContentBuilder getMappingForType(String type) throws IOException {
|
||||
return jsonBuilder().startObject().startObject(type).startObject("properties")
|
||||
.startObject("field1").field("type", "string").endObject()
|
||||
.startObject("field1").field("type", "text").endObject()
|
||||
.startObject("obj").startObject("properties").startObject("subfield").field("type", "keyword").endObject().endObject().endObject()
|
||||
.endObject().endObject().endObject();
|
||||
}
|
||||
|
@ -145,8 +145,8 @@ public class SimpleGetFieldMappingsIT extends ESIntegTestCase {
|
|||
|
||||
assertThat((Map<String, Object>) response.fieldMappings("test", "type", "num").sourceAsMap().get("num"), hasEntry("index", Boolean.TRUE));
|
||||
assertThat((Map<String, Object>) response.fieldMappings("test", "type", "num").sourceAsMap().get("num"), hasEntry("type", (Object) "long"));
|
||||
assertThat((Map<String, Object>) response.fieldMappings("test", "type", "field1").sourceAsMap().get("field1"), hasEntry("index", (Object) "analyzed"));
|
||||
assertThat((Map<String, Object>) response.fieldMappings("test", "type", "field1").sourceAsMap().get("field1"), hasEntry("type", (Object) "string"));
|
||||
assertThat((Map<String, Object>) response.fieldMappings("test", "type", "field1").sourceAsMap().get("field1"), hasEntry("index", Boolean.TRUE));
|
||||
assertThat((Map<String, Object>) response.fieldMappings("test", "type", "field1").sourceAsMap().get("field1"), hasEntry("type", (Object) "text"));
|
||||
assertThat((Map<String, Object>) response.fieldMappings("test", "type", "obj.subfield").sourceAsMap().get("subfield"), hasEntry("type", (Object) "keyword"));
|
||||
|
||||
|
||||
|
|
|
@ -53,7 +53,7 @@ public class SimpleGetMappingsIT extends ESIntegTestCase {
|
|||
|
||||
private XContentBuilder getMappingForType(String type) throws IOException {
|
||||
return jsonBuilder().startObject().startObject(type).startObject("properties")
|
||||
.startObject("field1").field("type", "string").endObject()
|
||||
.startObject("field1").field("type", "text").endObject()
|
||||
.endObject().endObject().endObject();
|
||||
}
|
||||
|
||||
|
|
|
@ -220,7 +220,7 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase {
|
|||
logger.info("Creating _default_ mappings with an analyzed field");
|
||||
putResponse = client().admin().indices().preparePutMapping("test").setType(MapperService.DEFAULT_MAPPING).setSource(
|
||||
JsonXContent.contentBuilder().startObject().startObject(MapperService.DEFAULT_MAPPING)
|
||||
.startObject("properties").startObject("f").field("type", "string").field("index", "analyzed").endObject().endObject()
|
||||
.startObject("properties").startObject("f").field("type", "text").field("index", true).endObject().endObject()
|
||||
.endObject().endObject()
|
||||
).get();
|
||||
assertThat(putResponse.isAcknowledged(), equalTo(true));
|
||||
|
@ -287,7 +287,7 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase {
|
|||
|
||||
PutMappingResponse response = client1.admin().indices().preparePutMapping(indexName).setType(typeName).setSource(
|
||||
JsonXContent.contentBuilder().startObject().startObject(typeName)
|
||||
.startObject("properties").startObject(fieldName).field("type", "string").endObject().endObject()
|
||||
.startObject("properties").startObject(fieldName).field("type", "text").endObject().endObject()
|
||||
.endObject().endObject()
|
||||
).get();
|
||||
|
||||
|
@ -340,12 +340,12 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testUpdateMappingOnAllTypes() throws IOException {
|
||||
assertAcked(prepareCreate("index").addMapping("type1", "f", "type=string").addMapping("type2", "f", "type=string"));
|
||||
assertAcked(prepareCreate("index").addMapping("type1", "f", "type=keyword").addMapping("type2", "f", "type=keyword"));
|
||||
|
||||
assertAcked(client().admin().indices().preparePutMapping("index")
|
||||
.setType("type1")
|
||||
.setUpdateAllTypes(true)
|
||||
.setSource("f", "type=string,analyzer=default,null_value=n/a")
|
||||
.setSource("f", "type=keyword,null_value=n/a")
|
||||
.get());
|
||||
|
||||
GetMappingsResponse mappings = client().admin().indices().prepareGetMappings("index").setTypes("type2").get();
|
||||
|
|
|
@ -44,7 +44,7 @@ public class IndexPrimaryRelocationIT extends ESIntegTestCase {
|
|||
internalCluster().ensureAtLeastNumDataNodes(randomIntBetween(2, 3));
|
||||
client().admin().indices().prepareCreate("test")
|
||||
.setSettings(Settings.settingsBuilder().put("index.number_of_shards", 1).put("index.number_of_replicas", 0))
|
||||
.addMapping("type", "field", "type=string")
|
||||
.addMapping("type", "field", "type=text")
|
||||
.get();
|
||||
ensureGreen("test");
|
||||
|
||||
|
|
|
@ -38,7 +38,7 @@ public class IndexTemplateBlocksIT extends ESIntegTestCase {
|
|||
.setTemplate("te*")
|
||||
.setOrder(0)
|
||||
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("field1").field("type", "string").field("store", true).endObject()
|
||||
.startObject("field1").field("type", "text").field("store", true).endObject()
|
||||
.startObject("field2").field("type", "keyword").field("store", true).endObject()
|
||||
.endObject().endObject().endObject())
|
||||
.execute().actionGet();
|
||||
|
|
|
@ -47,15 +47,15 @@ public class IndexTemplateFilteringIT extends ESIntegTestCase {
|
|||
public void testTemplateFiltering() throws Exception {
|
||||
client().admin().indices().preparePutTemplate("template1")
|
||||
.setTemplate("test*")
|
||||
.addMapping("type1", "field1", "type=string").get();
|
||||
.addMapping("type1", "field1", "type=text").get();
|
||||
|
||||
client().admin().indices().preparePutTemplate("template2")
|
||||
.setTemplate("test*")
|
||||
.addMapping("type2", "field2", "type=string").get();
|
||||
.addMapping("type2", "field2", "type=text").get();
|
||||
|
||||
client().admin().indices().preparePutTemplate("template3")
|
||||
.setTemplate("no_match")
|
||||
.addMapping("type3", "field3", "type=string").get();
|
||||
.addMapping("type3", "field3", "type=text").get();
|
||||
|
||||
assertAcked(prepareCreate("test"));
|
||||
|
||||
|
|
|
@ -78,7 +78,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
|
|||
.setSettings(indexSettings())
|
||||
.setOrder(0)
|
||||
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("field1").field("type", "string").field("store", true).endObject()
|
||||
.startObject("field1").field("type", "text").field("store", true).endObject()
|
||||
.startObject("field2").field("type", "keyword").field("store", true).endObject()
|
||||
.endObject().endObject().endObject())
|
||||
.get();
|
||||
|
@ -88,7 +88,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
|
|||
.setSettings(indexSettings())
|
||||
.setOrder(1)
|
||||
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("field2").field("type", "string").field("store", false).endObject()
|
||||
.startObject("field2").field("type", "text").field("store", false).endObject()
|
||||
.endObject().endObject().endObject())
|
||||
.get();
|
||||
|
||||
|
@ -99,7 +99,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
|
|||
.setCreate(true)
|
||||
.setOrder(1)
|
||||
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("field2").field("type", "string").field("store", false).endObject()
|
||||
.startObject("field2").field("type", "text").field("store", false).endObject()
|
||||
.endObject().endObject().endObject())
|
||||
, IndexTemplateAlreadyExistsException.class
|
||||
);
|
||||
|
@ -145,8 +145,8 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
|
|||
.setTemplate("te*")
|
||||
.setOrder(0)
|
||||
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("field1").field("type", "string").field("store", true).endObject()
|
||||
.startObject("field2").field("type", "string").field("store", true).endObject()
|
||||
.startObject("field1").field("type", "text").field("store", true).endObject()
|
||||
.startObject("field2").field("type", "text").field("store", true).endObject()
|
||||
.endObject().endObject().endObject())
|
||||
.execute().actionGet();
|
||||
|
||||
|
@ -154,7 +154,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
|
|||
.setTemplate("test*")
|
||||
.setOrder(1)
|
||||
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("field2").field("type", "string").field("store", "no").endObject()
|
||||
.startObject("field2").field("type", "text").field("store", "no").endObject()
|
||||
.endObject().endObject().endObject())
|
||||
.execute().actionGet();
|
||||
|
||||
|
@ -170,7 +170,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
|
|||
.setTemplate("te*")
|
||||
.setOrder(0)
|
||||
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("field1").field("type", "string").field("store", true).endObject()
|
||||
.startObject("field1").field("type", "text").field("store", true).endObject()
|
||||
.startObject("field2").field("type", "keyword").field("store", true).endObject()
|
||||
.endObject().endObject().endObject())
|
||||
.execute().actionGet();
|
||||
|
@ -190,7 +190,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
|
|||
.setTemplate("te*")
|
||||
.setOrder(0)
|
||||
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("field1").field("type", "string").field("store", true).endObject()
|
||||
.startObject("field1").field("type", "text").field("store", true).endObject()
|
||||
.startObject("field2").field("type", "keyword").field("store", true).endObject()
|
||||
.endObject().endObject().endObject())
|
||||
.execute().actionGet();
|
||||
|
@ -213,7 +213,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
|
|||
.setTemplate("te*")
|
||||
.setOrder(0)
|
||||
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("field1").field("type", "string").field("store", true).endObject()
|
||||
.startObject("field1").field("type", "text").field("store", true).endObject()
|
||||
.startObject("field2").field("type", "keyword").field("store", true).endObject()
|
||||
.endObject().endObject().endObject())
|
||||
.execute().actionGet();
|
||||
|
@ -223,7 +223,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
|
|||
.setTemplate("te*")
|
||||
.setOrder(0)
|
||||
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("field1").field("type", "string").field("store", true).endObject()
|
||||
.startObject("field1").field("type", "text").field("store", true).endObject()
|
||||
.startObject("field2").field("type", "keyword").field("store", true).endObject()
|
||||
.endObject().endObject().endObject())
|
||||
.execute().actionGet();
|
||||
|
@ -233,7 +233,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
|
|||
.setTemplate("te*")
|
||||
.setOrder(0)
|
||||
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("field1").field("type", "string").field("store", true).endObject()
|
||||
.startObject("field1").field("type", "text").field("store", true).endObject()
|
||||
.startObject("field2").field("type", "keyword").field("store", true).endObject()
|
||||
.endObject().endObject().endObject())
|
||||
.execute().actionGet();
|
||||
|
@ -599,20 +599,20 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
|
|||
client().admin().indices().preparePutTemplate("template1")
|
||||
.setTemplate("a*")
|
||||
.setOrder(0)
|
||||
.addMapping("test", "field", "type=string")
|
||||
.addMapping("test", "field", "type=text")
|
||||
.addAlias(new Alias("alias1").filter(termQuery("field", "value"))).get();
|
||||
// Indexing into b should succeed, because the field mapping for field 'field' is defined in the _default_ mapping and the test type exists.
|
||||
client().admin().indices().preparePutTemplate("template2")
|
||||
.setTemplate("b*")
|
||||
.setOrder(0)
|
||||
.addMapping("_default_", "field", "type=string")
|
||||
.addMapping("_default_", "field", "type=text")
|
||||
.addMapping("test")
|
||||
.addAlias(new Alias("alias2").filter(termQuery("field", "value"))).get();
|
||||
// Indexing into c should succeed, because the field mapping for field 'field' is defined in the _default_ mapping.
|
||||
client().admin().indices().preparePutTemplate("template3")
|
||||
.setTemplate("c*")
|
||||
.setOrder(0)
|
||||
.addMapping("_default_", "field", "type=string")
|
||||
.addMapping("_default_", "field", "type=text")
|
||||
.addAlias(new Alias("alias3").filter(termQuery("field", "value"))).get();
|
||||
// Indexing into d index should fail, since there is field with name 'field' in the mapping
|
||||
client().admin().indices().preparePutTemplate("template4")
|
||||
|
|
|
@ -58,7 +58,7 @@ public class ConcurrentPercolatorIT extends ESIntegTestCase {
|
|||
// We need to index a document / define mapping, otherwise field1 doesn't get recognized as number field.
|
||||
// If we don't do this, then 'test2' percolate query gets parsed as a TermQuery and not a RangeQuery.
|
||||
// The percolate api doesn't parse the doc if no queries have registered, so it can't lazily create a mapping
|
||||
assertAcked(prepareCreate("index").addMapping("type", "field1", "type=long", "field2", "type=string")); // random # shards better has a mapping!
|
||||
assertAcked(prepareCreate("index").addMapping("type", "field1", "type=long", "field2", "type=text")); // random # shards better has a mapping!
|
||||
ensureGreen();
|
||||
|
||||
final BytesReference onlyField1 = XContentFactory.jsonBuilder().startObject().startObject("doc")
|
||||
|
@ -149,7 +149,7 @@ public class ConcurrentPercolatorIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testConcurrentAddingAndPercolating() throws Exception {
|
||||
assertAcked(prepareCreate("index").addMapping("type", "field1", "type=string", "field2", "type=string"));
|
||||
assertAcked(prepareCreate("index").addMapping("type", "field1", "type=text", "field2", "type=text"));
|
||||
ensureGreen();
|
||||
final int numIndexThreads = scaledRandomIntBetween(1, 3);
|
||||
final int numPercolateThreads = scaledRandomIntBetween(2, 6);
|
||||
|
@ -298,7 +298,7 @@ public class ConcurrentPercolatorIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testConcurrentAddingAndRemovingWhilePercolating() throws Exception {
|
||||
assertAcked(prepareCreate("index").addMapping("type", "field1", "type=string"));
|
||||
assertAcked(prepareCreate("index").addMapping("type", "field1", "type=text"));
|
||||
ensureGreen();
|
||||
final int numIndexThreads = scaledRandomIntBetween(1, 3);
|
||||
final int numberPercolateOperation = scaledRandomIntBetween(10, 100);
|
||||
|
|
|
@ -56,7 +56,7 @@ import static org.hamcrest.Matchers.nullValue;
|
|||
*/
|
||||
public class MultiPercolatorIT extends ESIntegTestCase {
|
||||
public void testBasics() throws Exception {
|
||||
assertAcked(prepareCreate("test").addMapping("type", "field1", "type=string"));
|
||||
assertAcked(prepareCreate("test").addMapping("type", "field1", "type=text"));
|
||||
ensureGreen();
|
||||
|
||||
logger.info("--> register a queries");
|
||||
|
@ -126,7 +126,7 @@ public class MultiPercolatorIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testWithRouting() throws Exception {
|
||||
assertAcked(prepareCreate("test").addMapping("type", "field1", "type=string"));
|
||||
assertAcked(prepareCreate("test").addMapping("type", "field1", "type=text"));
|
||||
ensureGreen();
|
||||
|
||||
logger.info("--> register a queries");
|
||||
|
@ -394,9 +394,9 @@ public class MultiPercolatorIT extends ESIntegTestCase {
|
|||
|
||||
void initNestedIndexAndPercolation() throws IOException {
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder();
|
||||
mapping.startObject().startObject("properties").startObject("companyname").field("type", "string").endObject()
|
||||
mapping.startObject().startObject("properties").startObject("companyname").field("type", "text").endObject()
|
||||
.startObject("employee").field("type", "nested").startObject("properties")
|
||||
.startObject("name").field("type", "string").endObject().endObject().endObject().endObject()
|
||||
.startObject("name").field("type", "text").endObject().endObject().endObject().endObject()
|
||||
.endObject();
|
||||
|
||||
assertAcked(client().admin().indices().prepareCreate("nestedindex").addMapping("company", mapping));
|
||||
|
|
|
@ -53,7 +53,7 @@ public class PercolatorAggregationsIT extends ESIntegTestCase {
|
|||
|
||||
// Just test the integration with facets and aggregations, not the facet and aggregation functionality!
|
||||
public void testAggregations() throws Exception {
|
||||
assertAcked(prepareCreate("test").addMapping("type", "field1", "type=string", "field2", "type=string"));
|
||||
assertAcked(prepareCreate("test").addMapping("type", "field1", "type=text", "field2", "type=text"));
|
||||
ensureGreen();
|
||||
|
||||
int numQueries = scaledRandomIntBetween(250, 500);
|
||||
|
@ -118,7 +118,7 @@ public class PercolatorAggregationsIT extends ESIntegTestCase {
|
|||
|
||||
// Just test the integration with facets and aggregations, not the facet and aggregation functionality!
|
||||
public void testAggregationsAndPipelineAggregations() throws Exception {
|
||||
assertAcked(prepareCreate("test").addMapping("type", "field1", "type=string", "field2", "type=string"));
|
||||
assertAcked(prepareCreate("test").addMapping("type", "field1", "type=text", "field2", "type=text"));
|
||||
ensureGreen();
|
||||
|
||||
int numQueries = scaledRandomIntBetween(250, 500);
|
||||
|
@ -203,7 +203,7 @@ public class PercolatorAggregationsIT extends ESIntegTestCase {
|
|||
|
||||
public void testSingleShardAggregations() throws Exception {
|
||||
assertAcked(prepareCreate("test").setSettings(Settings.builder().put(indexSettings()).put("index.number_of_shards", 1))
|
||||
.addMapping("type", "field1", "type=string", "field2", "type=string"));
|
||||
.addMapping("type", "field1", "type=text", "field2", "type=text"));
|
||||
ensureGreen();
|
||||
|
||||
int numQueries = scaledRandomIntBetween(250, 500);
|
||||
|
|
|
@ -178,7 +178,7 @@ public class PercolatorIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testSimple2() throws Exception {
|
||||
assertAcked(prepareCreate("test").addMapping("type1", "field1", "type=long,doc_values=true", "field2", "type=string"));
|
||||
assertAcked(prepareCreate("test").addMapping("type1", "field1", "type=long,doc_values=true", "field2", "type=text"));
|
||||
ensureGreen();
|
||||
|
||||
// introduce the doc
|
||||
|
@ -315,7 +315,7 @@ public class PercolatorIT extends ESIntegTestCase {
|
|||
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("doc")
|
||||
.startObject("properties")
|
||||
.startObject("filingcategory").field("type", "string").field("analyzer", "lwhitespacecomma").endObject()
|
||||
.startObject("filingcategory").field("type", "text").field("analyzer", "lwhitespacecomma").endObject()
|
||||
.endObject()
|
||||
.endObject().endObject();
|
||||
|
||||
|
@ -346,7 +346,7 @@ public class PercolatorIT extends ESIntegTestCase {
|
|||
|
||||
public void testCreateIndexAndThenRegisterPercolator() throws Exception {
|
||||
prepareCreate("test")
|
||||
.addMapping("type1", "field1", "type=string")
|
||||
.addMapping("type1", "field1", "type=text")
|
||||
.get();
|
||||
ensureGreen();
|
||||
|
||||
|
@ -394,7 +394,7 @@ public class PercolatorIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testMultiplePercolators() throws Exception {
|
||||
assertAcked(prepareCreate("test").addMapping("type1", "field1", "type=string"));
|
||||
assertAcked(prepareCreate("test").addMapping("type1", "field1", "type=text"));
|
||||
ensureGreen();
|
||||
|
||||
logger.info("--> register a query 1");
|
||||
|
@ -436,7 +436,7 @@ public class PercolatorIT extends ESIntegTestCase {
|
|||
public void testDynamicAddingRemovingQueries() throws Exception {
|
||||
assertAcked(
|
||||
prepareCreate("test")
|
||||
.addMapping("type1", "field1", "type=string")
|
||||
.addMapping("type1", "field1", "type=text")
|
||||
);
|
||||
ensureGreen();
|
||||
|
||||
|
@ -1273,7 +1273,7 @@ public class PercolatorIT extends ESIntegTestCase {
|
|||
|
||||
public void testPercolateSortingUnsupportedField() throws Exception {
|
||||
client().admin().indices().prepareCreate("my-index")
|
||||
.addMapping("my-type", "field", "type=string")
|
||||
.addMapping("my-type", "field", "type=text")
|
||||
.addMapping(PercolatorService.TYPE_NAME, "level", "type=integer", "query", "type=object,enabled=false")
|
||||
.get();
|
||||
ensureGreen();
|
||||
|
@ -1312,7 +1312,7 @@ public class PercolatorIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testPercolatorWithHighlighting() throws Exception {
|
||||
StringBuilder fieldMapping = new StringBuilder("type=string")
|
||||
StringBuilder fieldMapping = new StringBuilder("type=text")
|
||||
.append(",store=").append(randomBoolean());
|
||||
if (randomBoolean()) {
|
||||
fieldMapping.append(",term_vector=with_positions_offsets");
|
||||
|
@ -1521,7 +1521,7 @@ public class PercolatorIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testPercolateNonMatchingConstantScoreQuery() throws Exception {
|
||||
assertAcked(prepareCreate("test").addMapping("doc", "message", "type=string"));
|
||||
assertAcked(prepareCreate("test").addMapping("doc", "message", "type=text"));
|
||||
ensureGreen();
|
||||
|
||||
logger.info("--> register a query");
|
||||
|
@ -1646,9 +1646,9 @@ public class PercolatorIT extends ESIntegTestCase {
|
|||
|
||||
void initNestedIndexAndPercolation() throws IOException {
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder();
|
||||
mapping.startObject().startObject("properties").startObject("companyname").field("type", "string").endObject()
|
||||
mapping.startObject().startObject("properties").startObject("companyname").field("type", "text").endObject()
|
||||
.startObject("employee").field("type", "nested").startObject("properties")
|
||||
.startObject("name").field("type", "string").endObject().endObject().endObject().endObject()
|
||||
.startObject("name").field("type", "text").endObject().endObject().endObject().endObject()
|
||||
.endObject();
|
||||
|
||||
assertAcked(client().admin().indices().prepareCreate("nestedindex").addMapping("company", mapping));
|
||||
|
@ -1869,7 +1869,7 @@ public class PercolatorIT extends ESIntegTestCase {
|
|||
.field("type", "nested")
|
||||
.startObject("properties")
|
||||
.startObject("name")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
|
|
|
@ -69,7 +69,7 @@ public class RecoveryPercolatorIT extends ESIntegTestCase {
|
|||
|
||||
public void testRestartNodePercolator1() throws Exception {
|
||||
internalCluster().startNode();
|
||||
assertAcked(prepareCreate("test").addMapping("type1", "field1", "type=string").addMapping(PercolatorService.TYPE_NAME, "color", "type=string"));
|
||||
assertAcked(prepareCreate("test").addMapping("type1", "field1", "type=text").addMapping(PercolatorService.TYPE_NAME, "color", "type=text"));
|
||||
|
||||
logger.info("--> register a query");
|
||||
client().prepareIndex("test", PercolatorService.TYPE_NAME, "kuku")
|
||||
|
@ -105,7 +105,7 @@ public class RecoveryPercolatorIT extends ESIntegTestCase {
|
|||
|
||||
public void testRestartNodePercolator2() throws Exception {
|
||||
internalCluster().startNode();
|
||||
assertAcked(prepareCreate("test").addMapping("type1", "field1", "type=string").addMapping(PercolatorService.TYPE_NAME, "color", "type=string"));
|
||||
assertAcked(prepareCreate("test").addMapping("type1", "field1", "type=text").addMapping(PercolatorService.TYPE_NAME, "color", "type=text"));
|
||||
|
||||
logger.info("--> register a query");
|
||||
client().prepareIndex("test", PercolatorService.TYPE_NAME, "kuku")
|
||||
|
@ -138,7 +138,7 @@ public class RecoveryPercolatorIT extends ESIntegTestCase {
|
|||
|
||||
DeleteIndexResponse actionGet = client().admin().indices().prepareDelete("test").get();
|
||||
assertThat(actionGet.isAcknowledged(), equalTo(true));
|
||||
assertAcked(prepareCreate("test").addMapping("type1", "field1", "type=string").addMapping(PercolatorService.TYPE_NAME, "color", "type=string"));
|
||||
assertAcked(prepareCreate("test").addMapping("type1", "field1", "type=text").addMapping(PercolatorService.TYPE_NAME, "color", "type=text"));
|
||||
clusterHealth = client().admin().cluster().health(clusterHealthRequest().waitForYellowStatus().waitForActiveShards(1)).actionGet();
|
||||
logger.info("Done Cluster Health, status " + clusterHealth.getStatus());
|
||||
assertThat(clusterHealth.isTimedOut(), equalTo(false));
|
||||
|
|
|
@ -73,7 +73,7 @@ public class TTLPercolatorIT extends ESIntegTestCase {
|
|||
String typeMapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("_ttl").field("enabled", true).endObject()
|
||||
.startObject("_timestamp").field("enabled", true).endObject()
|
||||
.startObject("properties").startObject("field1").field("type", "string").endObject().endObject()
|
||||
.startObject("properties").startObject("field1").field("type", "text").endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
client.admin().indices().prepareCreate("test")
|
||||
|
|
|
@ -92,7 +92,7 @@ public class TruncatedRecoveryIT extends ESIntegTestCase {
|
|||
// create the index and prevent allocation on any other nodes than the lucky one
|
||||
// we have no replicas so far and make sure that we allocate the primary on the lucky node
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", "field1", "type=string", "the_id", "type=string")
|
||||
.addMapping("type1", "field1", "type=text", "the_id", "type=text")
|
||||
.setSettings(settingsBuilder().put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0).put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, numberOfShards())
|
||||
.put("index.routing.allocation.include._name", primariesNode.getNode().name()))); // only allocate on the lucky node
|
||||
|
||||
|
|
|
@ -40,7 +40,7 @@ public class AggregationsIntegrationIT extends ESIntegTestCase {
|
|||
|
||||
@Override
|
||||
public void setupSuiteScopeCluster() throws Exception {
|
||||
assertAcked(prepareCreate("index").addMapping("type", "f", "type=string").get());
|
||||
assertAcked(prepareCreate("index").addMapping("type", "f", "type=text").get());
|
||||
ensureYellow("index");
|
||||
numDocs = randomIntBetween(1, 20);
|
||||
List<IndexRequestBuilder> docs = new ArrayList<>();
|
||||
|
|
|
@ -117,7 +117,7 @@ public class CombiIT extends ESIntegTestCase {
|
|||
prepareCreate("idx").addMapping("type", jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("type").startObject("properties")
|
||||
.startObject("name").field("type", "string").endObject()
|
||||
.startObject("name").field("type", "text").endObject()
|
||||
.startObject("value").field("type", "integer").endObject()
|
||||
.endObject().endObject()
|
||||
.endObject()).execute().actionGet();
|
||||
|
|
|
@ -294,8 +294,8 @@ public class ChildrenIT extends ESIntegTestCase {
|
|||
String childType = "variantsku";
|
||||
assertAcked(
|
||||
prepareCreate(indexName)
|
||||
.addMapping(masterType, "brand", "type=string", "name", "type=string", "material", "type=string")
|
||||
.addMapping(childType, "_parent", "type=masterprod", "color", "type=string", "size", "type=string")
|
||||
.addMapping(masterType, "brand", "type=text", "name", "type=text", "material", "type=text")
|
||||
.addMapping(childType, "_parent", "type=masterprod", "color", "type=text", "size", "type=text")
|
||||
);
|
||||
|
||||
List<IndexRequestBuilder> requests = new ArrayList<>();
|
||||
|
|
|
@ -462,7 +462,7 @@ public class NestedIT extends ESIntegTestCase {
|
|||
assertAcked(
|
||||
prepareCreate("idx4")
|
||||
.setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0))
|
||||
.addMapping("product", "categories", "type=string", "name", "type=string", "property", "type=nested")
|
||||
.addMapping("product", "categories", "type=text", "name", "type=text", "property", "type=nested")
|
||||
);
|
||||
ensureGreen("idx4");
|
||||
|
||||
|
|
|
@ -66,9 +66,9 @@ public class ReverseNestedIT extends ESIntegTestCase {
|
|||
.addMapping(
|
||||
"type1",
|
||||
jsonBuilder().startObject().startObject("properties")
|
||||
.startObject("field1").field("type", "string").endObject()
|
||||
.startObject("field1").field("type", "text").endObject()
|
||||
.startObject("nested1").field("type", "nested").startObject("properties")
|
||||
.startObject("field2").field("type", "string").endObject()
|
||||
.startObject("field2").field("type", "text").endObject()
|
||||
.endObject().endObject()
|
||||
.endObject().endObject()
|
||||
)
|
||||
|
@ -76,9 +76,9 @@ public class ReverseNestedIT extends ESIntegTestCase {
|
|||
"type2",
|
||||
jsonBuilder().startObject().startObject("properties")
|
||||
.startObject("nested1").field("type", "nested").startObject("properties")
|
||||
.startObject("field1").field("type", "string").endObject()
|
||||
.startObject("field1").field("type", "text").endObject()
|
||||
.startObject("nested2").field("type", "nested").startObject("properties")
|
||||
.startObject("field2").field("type", "string").endObject()
|
||||
.startObject("field2").field("type", "text").endObject()
|
||||
.endObject().endObject()
|
||||
.endObject().endObject()
|
||||
.endObject().endObject()
|
||||
|
@ -487,17 +487,17 @@ public class ReverseNestedIT extends ESIntegTestCase {
|
|||
.startObject("category")
|
||||
.field("type", "nested")
|
||||
.startObject("properties")
|
||||
.startObject("name").field("type", "string").endObject()
|
||||
.startObject("name").field("type", "text").endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.startObject("sku")
|
||||
.field("type", "nested")
|
||||
.startObject("properties")
|
||||
.startObject("sku_type").field("type", "string").endObject()
|
||||
.startObject("sku_type").field("type", "text").endObject()
|
||||
.startObject("colors")
|
||||
.field("type", "nested")
|
||||
.startObject("properties")
|
||||
.startObject("name").field("type", "string").endObject()
|
||||
.startObject("name").field("type", "text").endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
|
|
|
@ -61,12 +61,12 @@ public class SamplerIT extends ESIntegTestCase {
|
|||
@Override
|
||||
public void setupSuiteScopeCluster() throws Exception {
|
||||
assertAcked(prepareCreate("test").setSettings(SETTING_NUMBER_OF_SHARDS, NUM_SHARDS, SETTING_NUMBER_OF_REPLICAS, 0).addMapping(
|
||||
"book", "author", "type=keyword", "name", "type=string,index=analyzed", "genre",
|
||||
"book", "author", "type=keyword", "name", "type=text", "genre",
|
||||
"type=keyword", "price", "type=float"));
|
||||
createIndex("idx_unmapped");
|
||||
// idx_unmapped_author is same as main index but missing author field
|
||||
assertAcked(prepareCreate("idx_unmapped_author").setSettings(SETTING_NUMBER_OF_SHARDS, NUM_SHARDS, SETTING_NUMBER_OF_REPLICAS, 0)
|
||||
.addMapping("book", "name", "type=string,index=analyzed", "genre", "type=keyword", "price", "type=float"));
|
||||
.addMapping("book", "name", "type=text", "genre", "type=keyword", "price", "type=float"));
|
||||
|
||||
ensureGreen();
|
||||
String data[] = {
|
||||
|
|
|
@ -76,7 +76,7 @@ public class SignificantTermsIT extends ESIntegTestCase {
|
|||
public void setupSuiteScopeCluster() throws Exception {
|
||||
assertAcked(prepareCreate("test").setSettings(SETTING_NUMBER_OF_SHARDS, 5, SETTING_NUMBER_OF_REPLICAS, 0).addMapping("fact",
|
||||
"_routing", "required=true", "routing_id", "type=keyword", "fact_category",
|
||||
"type=integer,index=true", "description", "type=string,index=analyzed"));
|
||||
"type=integer,index=true", "description", "type=text,index=true"));
|
||||
createIndex("idx_unmapped");
|
||||
|
||||
ensureGreen();
|
||||
|
|
|
@ -419,7 +419,7 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
|
|||
|
||||
private void indexEqualTestData() throws ExecutionException, InterruptedException {
|
||||
assertAcked(prepareCreate("test").setSettings(SETTING_NUMBER_OF_SHARDS, 1, SETTING_NUMBER_OF_REPLICAS, 0).addMapping("doc",
|
||||
"text", "type=string", "class", "type=string"));
|
||||
"text", "type=text", "class", "type=text"));
|
||||
createIndex("idx_unmapped");
|
||||
|
||||
ensureGreen();
|
||||
|
|
|
@ -111,7 +111,7 @@ public class TopHitsIT extends ESIntegTestCase {
|
|||
.field("type", "long")
|
||||
.endObject()
|
||||
.startObject("message")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.field("store", true)
|
||||
.field("term_vector", "with_positions_offsets")
|
||||
.field("index_options", "offsets")
|
||||
|
|
|
@ -61,7 +61,7 @@ public class SearchWhileRelocatingIT extends ESIntegTestCase {
|
|||
final int numShards = between(1, 20);
|
||||
client().admin().indices().prepareCreate("test")
|
||||
.setSettings(settingsBuilder().put("index.number_of_shards", numShards).put("index.number_of_replicas", numberOfReplicas))
|
||||
.addMapping("type", "loc", "type=geo_point", "test", "type=string").execute().actionGet();
|
||||
.addMapping("type", "loc", "type=geo_point", "test", "type=text").execute().actionGet();
|
||||
ensureGreen();
|
||||
List<IndexRequestBuilder> indexBuilders = new ArrayList<>();
|
||||
final int numDocs = between(10, 20);
|
||||
|
|
|
@ -824,8 +824,8 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
|||
|
||||
public void testSimpleQueryRewrite() throws Exception {
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("parent", "p_field", "type=string")
|
||||
.addMapping("child", "_parent", "type=parent", "c_field", "type=string"));
|
||||
.addMapping("parent", "p_field", "type=text")
|
||||
.addMapping("child", "_parent", "type=parent", "c_field", "type=text"));
|
||||
ensureGreen();
|
||||
|
||||
// index simple data
|
||||
|
|
|
@ -74,7 +74,7 @@ public class FetchSubPhasePluginIT extends ESIntegTestCase {
|
|||
.startObject().startObject("type1")
|
||||
.startObject("properties")
|
||||
.startObject("test")
|
||||
.field("type", "string").field("term_vector", "yes")
|
||||
.field("type", "text").field("term_vector", "yes")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject().endObject()).execute().actionGet();
|
||||
|
|
|
@ -77,7 +77,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
|
|||
public void testDistanceScoreGeoLinGaussExp() throws Exception {
|
||||
assertAcked(prepareCreate("test").addMapping(
|
||||
"type1",
|
||||
jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("test").field("type", "string")
|
||||
jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("test").field("type", "text")
|
||||
.endObject().startObject("loc").field("type", "geo_point").endObject().endObject().endObject().endObject()));
|
||||
ensureYellow();
|
||||
|
||||
|
@ -175,7 +175,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
|
|||
public void testDistanceScoreGeoLinGaussExpWithOffset() throws Exception {
|
||||
assertAcked(prepareCreate("test").addMapping(
|
||||
"type1",
|
||||
jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("test").field("type", "string")
|
||||
jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("test").field("type", "text")
|
||||
.endObject().startObject("num").field("type", "double").endObject().endObject().endObject().endObject()));
|
||||
ensureYellow();
|
||||
|
||||
|
@ -249,7 +249,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
|
|||
public void testBoostModeSettingWorks() throws Exception {
|
||||
assertAcked(prepareCreate("test").addMapping(
|
||||
"type1",
|
||||
jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("test").field("type", "string")
|
||||
jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("test").field("type", "text")
|
||||
.endObject().startObject("loc").field("type", "geo_point").endObject().endObject().endObject().endObject()));
|
||||
ensureYellow();
|
||||
|
||||
|
@ -303,7 +303,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
|
|||
public void testParseGeoPoint() throws Exception {
|
||||
assertAcked(prepareCreate("test").addMapping(
|
||||
"type1",
|
||||
jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("test").field("type", "string")
|
||||
jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("test").field("type", "text")
|
||||
.endObject().startObject("loc").field("type", "geo_point").endObject().endObject().endObject().endObject()));
|
||||
ensureYellow();
|
||||
|
||||
|
@ -343,7 +343,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
|
|||
public void testCombineModes() throws Exception {
|
||||
assertAcked(prepareCreate("test").addMapping(
|
||||
"type1",
|
||||
jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("test").field("type", "string")
|
||||
jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("test").field("type", "text")
|
||||
.endObject().startObject("num").field("type", "double").endObject().endObject().endObject().endObject()));
|
||||
ensureYellow();
|
||||
|
||||
|
@ -424,7 +424,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
|
|||
public void testExceptionThrownIfScaleLE0() throws Exception {
|
||||
assertAcked(prepareCreate("test").addMapping(
|
||||
"type1",
|
||||
jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("test").field("type", "string")
|
||||
jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("test").field("type", "text")
|
||||
.endObject().startObject("num1").field("type", "date").endObject().endObject().endObject().endObject()));
|
||||
ensureYellow();
|
||||
client().index(
|
||||
|
@ -450,7 +450,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
|
|||
public void testParseDateMath() throws Exception {
|
||||
assertAcked(prepareCreate("test").addMapping(
|
||||
"type1",
|
||||
jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("test").field("type", "string")
|
||||
jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("test").field("type", "text")
|
||||
.endObject().startObject("num1").field("type", "date").field("format", "epoch_millis").endObject().endObject().endObject().endObject()));
|
||||
ensureYellow();
|
||||
client().index(
|
||||
|
@ -482,7 +482,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
|
|||
public void testValueMissingLin() throws Exception {
|
||||
assertAcked(prepareCreate("test").addMapping(
|
||||
"type1",
|
||||
jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("test").field("type", "string")
|
||||
jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("test").field("type", "text")
|
||||
.endObject().startObject("num1").field("type", "date").endObject().startObject("num2").field("type", "double")
|
||||
.endObject().endObject().endObject().endObject())
|
||||
);
|
||||
|
@ -533,7 +533,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
|
|||
|
||||
assertAcked(prepareCreate("test").addMapping(
|
||||
"type1",
|
||||
jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("test").field("type", "string")
|
||||
jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("test").field("type", "text")
|
||||
.endObject().startObject("num1").field("type", "date").endObject().endObject().endObject().endObject()));
|
||||
ensureYellow();
|
||||
|
||||
|
@ -581,7 +581,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
|
|||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
|
||||
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
XContentBuilder xContentBuilder = jsonBuilder().startObject().startObject("type").startObject("properties")
|
||||
.startObject("test").field("type", "string").endObject().startObject("date").field("type", "date")
|
||||
.startObject("test").field("type", "text").endObject().startObject("date").field("type", "date")
|
||||
.field("doc_values", true).endObject().startObject("num").field("type", "double")
|
||||
.field("doc_values", true).endObject().startObject("geo").field("type", "geo_point")
|
||||
.field("ignore_malformed", true);
|
||||
|
@ -667,8 +667,8 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
|
|||
public void testParsingExceptionIfFieldTypeDoesNotMatch() throws Exception {
|
||||
assertAcked(prepareCreate("test").addMapping(
|
||||
"type",
|
||||
jsonBuilder().startObject().startObject("type").startObject("properties").startObject("test").field("type", "string")
|
||||
.endObject().startObject("num").field("type", "string").endObject().endObject().endObject().endObject()));
|
||||
jsonBuilder().startObject().startObject("type").startObject("properties").startObject("test").field("type", "text")
|
||||
.endObject().startObject("num").field("type", "text").endObject().endObject().endObject().endObject()));
|
||||
ensureYellow();
|
||||
client().index(
|
||||
indexRequest("test").type("type").source(
|
||||
|
@ -690,7 +690,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
|
|||
public void testNoQueryGiven() throws Exception {
|
||||
assertAcked(prepareCreate("test").addMapping(
|
||||
"type",
|
||||
jsonBuilder().startObject().startObject("type").startObject("properties").startObject("test").field("type", "string")
|
||||
jsonBuilder().startObject().startObject("type").startObject("properties").startObject("test").field("type", "text")
|
||||
.endObject().startObject("num").field("type", "double").endObject().endObject().endObject().endObject()));
|
||||
ensureYellow();
|
||||
client().index(
|
||||
|
@ -709,7 +709,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
|
|||
public void testMultiFieldOptions() throws Exception {
|
||||
assertAcked(prepareCreate("test").addMapping(
|
||||
"type1",
|
||||
jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("test").field("type", "string")
|
||||
jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("test").field("type", "text")
|
||||
.endObject().startObject("loc").field("type", "geo_point").endObject().startObject("num").field("type", "float").endObject().endObject().endObject().endObject()));
|
||||
ensureYellow();
|
||||
|
||||
|
|
|
@ -56,7 +56,7 @@ public class FunctionScoreBackwardCompatibilityIT extends ESBackcompatTestCase {
|
|||
.startObject("type1")
|
||||
.startObject("properties")
|
||||
.startObject("text")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.endObject()
|
||||
.startObject("loc")
|
||||
.field("type", "geo_point")
|
||||
|
|
|
@ -50,7 +50,7 @@ public class FunctionScoreFieldValueIT extends ESIntegTestCase {
|
|||
.field("type", randomFrom(new String[]{"short", "float", "long", "integer", "double"}))
|
||||
.endObject()
|
||||
.startObject("body")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
|
|
|
@ -63,7 +63,7 @@ public class FunctionScorePluginIT extends ESIntegTestCase {
|
|||
.addMapping(
|
||||
"type1",
|
||||
jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("test")
|
||||
.field("type", "string").endObject().startObject("num1").field("type", "date").endObject().endObject()
|
||||
.field("type", "text").endObject().startObject("num1").field("type", "date").endObject().endObject()
|
||||
.endObject().endObject()).execute().actionGet();
|
||||
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForYellowStatus().execute().actionGet();
|
||||
|
||||
|
|
|
@ -104,7 +104,7 @@ public class QueryRescorerIT extends ESIntegTestCase {
|
|||
.addMapping(
|
||||
"type1",
|
||||
jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("field1")
|
||||
.field("analyzer", "whitespace").field("type", "string").endObject().endObject().endObject().endObject())
|
||||
.field("analyzer", "whitespace").field("type", "text").endObject().endObject().endObject().endObject())
|
||||
.setSettings(Settings.settingsBuilder().put(indexSettings()).put("index.number_of_shards", 1)));
|
||||
|
||||
client().prepareIndex("test", "type1", "1").setSource("field1", "the quick brown fox").execute().actionGet();
|
||||
|
@ -153,7 +153,7 @@ public class QueryRescorerIT extends ESIntegTestCase {
|
|||
builder.putArray("index.analysis.filter.synonym.synonyms", "ave => ave, avenue", "street => str, street");
|
||||
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("field1").field("type", "string").field("analyzer", "whitespace").field("search_analyzer", "synonym")
|
||||
.startObject("field1").field("type", "text").field("analyzer", "whitespace").field("search_analyzer", "synonym")
|
||||
.endObject().endObject().endObject().endObject();
|
||||
|
||||
assertAcked(client().admin().indices().prepareCreate("test").addMapping("type1", mapping).setSettings(builder.put("index.number_of_shards", 1)));
|
||||
|
@ -231,7 +231,7 @@ public class QueryRescorerIT extends ESIntegTestCase {
|
|||
builder.putArray("index.analysis.filter.synonym.synonyms", "ave => ave, avenue", "street => str, street");
|
||||
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("field1").field("type", "string").field("analyzer", "whitespace").field("search_analyzer", "synonym")
|
||||
.startObject("field1").field("type", "text").field("analyzer", "whitespace").field("search_analyzer", "synonym")
|
||||
.endObject().endObject().endObject().endObject();
|
||||
|
||||
assertAcked(client().admin().indices().prepareCreate("test").addMapping("type1", mapping).setSettings(builder.put("index.number_of_shards", 1)));
|
||||
|
@ -301,7 +301,7 @@ public class QueryRescorerIT extends ESIntegTestCase {
|
|||
builder.putArray("index.analysis.filter.synonym.synonyms", "ave => ave, avenue", "street => str, street");
|
||||
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("field1").field("type", "string").field("analyzer", "whitespace").field("search_analyzer", "synonym")
|
||||
.startObject("field1").field("type", "text").field("analyzer", "whitespace").field("search_analyzer", "synonym")
|
||||
.endObject().endObject().endObject().endObject();
|
||||
|
||||
assertAcked(client().admin().indices().prepareCreate("test").addMapping("type1", mapping).setSettings(builder.put("index.number_of_shards", 1)));
|
||||
|
@ -494,7 +494,7 @@ public class QueryRescorerIT extends ESIntegTestCase {
|
|||
.addMapping(
|
||||
"type1",
|
||||
jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("field1")
|
||||
.field("analyzer", "whitespace").field("type", "string").endObject().endObject().endObject().endObject())
|
||||
.field("analyzer", "whitespace").field("type", "text").endObject().endObject().endObject().endObject())
|
||||
);
|
||||
ensureGreen();
|
||||
client().prepareIndex("test", "type1", "1").setSource("field1", "the quick brown fox").execute().actionGet();
|
||||
|
@ -731,7 +731,7 @@ public class QueryRescorerIT extends ESIntegTestCase {
|
|||
.addMapping(
|
||||
"type1",
|
||||
jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("field1")
|
||||
.field("analyzer", analyzer).field("type", "string").endObject().endObject().endObject().endObject())
|
||||
.field("analyzer", analyzer).field("type", "text").endObject().endObject().endObject().endObject())
|
||||
.setSettings(builder));
|
||||
int numDocs = randomIntBetween(100, 150);
|
||||
IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs];
|
||||
|
|
|
@ -39,7 +39,7 @@ import org.elasticsearch.index.IndexSettings;
|
|||
import org.elasticsearch.index.mapper.ContentPath;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.TextFieldMapper;
|
||||
import org.elasticsearch.index.query.IdsQueryBuilder;
|
||||
import org.elasticsearch.index.query.MatchAllQueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
|
@ -280,7 +280,7 @@ public class HighlightBuilderTests extends ESTestCase {
|
|||
QueryShardContext mockShardContext = new QueryShardContext(idxSettings, null, null, null, null, null, indicesQueriesRegistry) {
|
||||
@Override
|
||||
public MappedFieldType fieldMapper(String name) {
|
||||
StringFieldMapper.Builder builder = new StringFieldMapper.Builder(name);
|
||||
TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name);
|
||||
return builder.build(new Mapper.BuilderContext(idxSettings.getSettings(), new ContentPath(1))).fieldType();
|
||||
}
|
||||
};
|
||||
|
|
|
@ -92,7 +92,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
mappings.startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("text")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.field("analyzer", "keyword")
|
||||
.field("index_options", "offsets")
|
||||
.field("term_vector", "with_positions_offsets")
|
||||
|
@ -126,7 +126,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
.field("ignore_above", 1)
|
||||
.endObject()
|
||||
.startObject("text")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.field("analyzer", "keyword")
|
||||
.field("index_options", "offsets")
|
||||
.field("term_vector", "with_positions_offsets")
|
||||
|
@ -173,13 +173,13 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
.startObject("unstored_field")
|
||||
.field("index_options", "offsets")
|
||||
.field("term_vector", "with_positions_offsets")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.field("store", false)
|
||||
.endObject()
|
||||
.startObject("text")
|
||||
.field("index_options", "offsets")
|
||||
.field("term_vector", "with_positions_offsets")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.field("store", true)
|
||||
.endObject()
|
||||
.endObject()
|
||||
|
@ -205,7 +205,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
// see #3486
|
||||
public void testHighTermFrequencyDoc() throws IOException {
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("test", "name", "type=string,term_vector=with_positions_offsets,store=" + randomBoolean()));
|
||||
.addMapping("test", "name", "type=text,term_vector=with_positions_offsets,store=" + randomBoolean()));
|
||||
ensureYellow();
|
||||
StringBuilder builder = new StringBuilder();
|
||||
for (int i = 0; i < 6000; i++) {
|
||||
|
@ -229,13 +229,13 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
.startObject("name")
|
||||
.startObject("fields")
|
||||
.startObject("autocomplete")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.field("analyzer", "autocomplete")
|
||||
.field("search_analyzer", "search_autocomplete")
|
||||
.field("term_vector", "with_positions_offsets")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject())
|
||||
|
@ -280,7 +280,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
* query. We cut off and extract terms if there are more than 16 terms in the query
|
||||
*/
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("test", "body", "type=string,analyzer=custom_analyzer,search_analyzer=custom_analyzer,term_vector=with_positions_offsets")
|
||||
.addMapping("test", "body", "type=text,analyzer=custom_analyzer,search_analyzer=custom_analyzer,term_vector=with_positions_offsets")
|
||||
.setSettings(
|
||||
settingsBuilder().put(indexSettings())
|
||||
.put("analysis.filter.wordDelimiter.type", "word_delimiter")
|
||||
|
@ -320,8 +320,8 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
public void testNgramHighlightingPreLucene42() throws IOException {
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("test",
|
||||
"name", "type=string,analyzer=name_index_analyzer,search_analyzer=name_search_analyzer," + randomStoreField() + "term_vector=with_positions_offsets",
|
||||
"name2", "type=string,analyzer=name2_index_analyzer,search_analyzer=name_search_analyzer," + randomStoreField() + "term_vector=with_positions_offsets")
|
||||
"name", "type=text,analyzer=name_index_analyzer,search_analyzer=name_search_analyzer," + randomStoreField() + "term_vector=with_positions_offsets",
|
||||
"name2", "type=text,analyzer=name2_index_analyzer,search_analyzer=name_search_analyzer," + randomStoreField() + "term_vector=with_positions_offsets")
|
||||
.setSettings(settingsBuilder()
|
||||
.put(indexSettings())
|
||||
.put("analysis.filter.my_ngram.max_gram", 20)
|
||||
|
@ -392,8 +392,8 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
public void testNgramHighlighting() throws IOException {
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("test",
|
||||
"name", "type=string,analyzer=name_index_analyzer,search_analyzer=name_search_analyzer,term_vector=with_positions_offsets",
|
||||
"name2", "type=string,analyzer=name2_index_analyzer,search_analyzer=name_search_analyzer,term_vector=with_positions_offsets")
|
||||
"name", "type=text,analyzer=name_index_analyzer,search_analyzer=name_search_analyzer,term_vector=with_positions_offsets",
|
||||
"name2", "type=text,analyzer=name2_index_analyzer,search_analyzer=name_search_analyzer,term_vector=with_positions_offsets")
|
||||
.setSettings(settingsBuilder()
|
||||
.put(indexSettings())
|
||||
.put("analysis.filter.my_ngram.max_gram", 20)
|
||||
|
@ -437,8 +437,8 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
public void testEnsureNoNegativeOffsets() throws Exception {
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1",
|
||||
"no_long_term", "type=string,term_vector=with_positions_offsets",
|
||||
"long_term", "type=string,term_vector=with_positions_offsets"));
|
||||
"no_long_term", "type=text,term_vector=with_positions_offsets",
|
||||
"long_term", "type=text,term_vector=with_positions_offsets"));
|
||||
ensureYellow();
|
||||
|
||||
client().prepareIndex("test", "type1", "1")
|
||||
|
@ -471,8 +471,8 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
// we don't store title and don't use term vector, now lets see if it works...
|
||||
.startObject("title").field("type", "string").field("store", false).field("term_vector", "no").endObject()
|
||||
.startObject("attachments").startObject("properties").startObject("body").field("type", "string").field("store", false).field("term_vector", "no").endObject().endObject().endObject()
|
||||
.startObject("title").field("type", "text").field("store", false).field("term_vector", "no").endObject()
|
||||
.startObject("attachments").startObject("properties").startObject("body").field("type", "text").field("store", false).field("term_vector", "no").endObject().endObject().endObject()
|
||||
.endObject().endObject().endObject()));
|
||||
ensureYellow();
|
||||
|
||||
|
@ -510,8 +510,8 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
// we don't store title, now lets see if it works...
|
||||
.startObject("title").field("type", "string").field("store", false).field("term_vector", "with_positions_offsets").endObject()
|
||||
.startObject("attachments").startObject("properties").startObject("body").field("type", "string").field("store", false).field("term_vector", "with_positions_offsets").endObject().endObject().endObject()
|
||||
.startObject("title").field("type", "text").field("store", false).field("term_vector", "with_positions_offsets").endObject()
|
||||
.startObject("attachments").startObject("properties").startObject("body").field("type", "text").field("store", false).field("term_vector", "with_positions_offsets").endObject().endObject().endObject()
|
||||
.endObject().endObject().endObject()));
|
||||
ensureYellow();
|
||||
|
||||
|
@ -549,8 +549,8 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
// we don't store title, now lets see if it works...
|
||||
.startObject("title").field("type", "string").field("store", false).field("index_options", "offsets").endObject()
|
||||
.startObject("attachments").startObject("properties").startObject("body").field("type", "string").field("store", false).field("index_options", "offsets").endObject().endObject().endObject()
|
||||
.startObject("title").field("type", "text").field("store", false).field("index_options", "offsets").endObject()
|
||||
.startObject("attachments").startObject("properties").startObject("body").field("type", "text").field("store", false).field("index_options", "offsets").endObject().endObject().endObject()
|
||||
.endObject().endObject().endObject()));
|
||||
ensureYellow();
|
||||
|
||||
|
@ -598,7 +598,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
|
||||
public void testHighlightIssue1994() throws Exception {
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", "title", "type=string,store=false", "titleTV", "type=string,store=false,term_vector=with_positions_offsets"));
|
||||
.addMapping("type1", "title", "type=text,store=false", "titleTV", "type=text,store=false,term_vector=with_positions_offsets"));
|
||||
ensureYellow();
|
||||
|
||||
indexRandom(false, client().prepareIndex("test", "type1", "1")
|
||||
|
@ -654,9 +654,9 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
public void testHighlightingOnWildcardFields() throws Exception {
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1",
|
||||
"field-postings", "type=string,index_options=offsets",
|
||||
"field-fvh", "type=string,term_vector=with_positions_offsets",
|
||||
"field-plain", "type=string"));
|
||||
"field-postings", "type=text,index_options=offsets",
|
||||
"field-fvh", "type=text,term_vector=with_positions_offsets",
|
||||
"field-plain", "type=text"));
|
||||
ensureGreen();
|
||||
|
||||
client().prepareIndex("test", "type1")
|
||||
|
@ -683,7 +683,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
.addMapping("type1", jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("_source").field("enabled", false).endObject()
|
||||
.startObject("properties")
|
||||
.startObject("field1").field("type", "string").field("store", true).field("index_options", "offsets")
|
||||
.startObject("field1").field("type", "text").field("store", true).field("index_options", "offsets")
|
||||
.field("term_vector", "with_positions_offsets").endObject()
|
||||
.endObject().endObject().endObject()));
|
||||
|
||||
|
@ -913,26 +913,26 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties")
|
||||
.startObject("foo")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.field("termVector", "with_positions_offsets")
|
||||
.field("store", true)
|
||||
.field("analyzer", "english")
|
||||
.startObject("fields")
|
||||
.startObject("plain")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.field("termVector", "with_positions_offsets")
|
||||
.field("analyzer", "standard")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.startObject("bar")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.field("termVector", "with_positions_offsets")
|
||||
.field("store", true)
|
||||
.field("analyzer", "english")
|
||||
.startObject("fields")
|
||||
.startObject("plain")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.field("termVector", "with_positions_offsets")
|
||||
.field("analyzer", "standard")
|
||||
.endObject()
|
||||
|
@ -1103,15 +1103,15 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
return XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("_all").field("store", true).field("termVector", "with_positions_offsets").endObject()
|
||||
.startObject("properties")
|
||||
.startObject("field1").field("type", "string").field("termVector", "with_positions_offsets").endObject()
|
||||
.startObject("field2").field("type", "string").field("termVector", "with_positions_offsets").endObject()
|
||||
.startObject("field1").field("type", "text").field("termVector", "with_positions_offsets").endObject()
|
||||
.startObject("field2").field("type", "text").field("termVector", "with_positions_offsets").endObject()
|
||||
.endObject()
|
||||
.endObject().endObject();
|
||||
}
|
||||
|
||||
public void testSameContent() throws Exception {
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", "title", "type=string,store=true,term_vector=with_positions_offsets"));
|
||||
.addMapping("type1", "title", "type=text,store=true,term_vector=with_positions_offsets"));
|
||||
ensureYellow();
|
||||
|
||||
IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5];
|
||||
|
@ -1133,7 +1133,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
|
||||
public void testFastVectorHighlighterOffsetParameter() throws Exception {
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", "title", "type=string,store=true,term_vector=with_positions_offsets").get());
|
||||
.addMapping("type1", "title", "type=text,store=true,term_vector=with_positions_offsets").get());
|
||||
ensureYellow();
|
||||
|
||||
IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5];
|
||||
|
@ -1156,7 +1156,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
|
||||
public void testEscapeHtml() throws Exception {
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", "title", "type=string,store=true"));
|
||||
.addMapping("type1", "title", "type=text,store=true"));
|
||||
ensureYellow();
|
||||
|
||||
IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5];
|
||||
|
@ -1178,7 +1178,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
|
||||
public void testEscapeHtmlVector() throws Exception {
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", "title", "type=string,store=true,term_vector=with_positions_offsets"));
|
||||
.addMapping("type1", "title", "type=text,store=true,term_vector=with_positions_offsets"));
|
||||
ensureYellow();
|
||||
|
||||
IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5];
|
||||
|
@ -1201,9 +1201,9 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
public void testMultiMapperVectorWithStore() throws Exception {
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("title").field("type", "string").field("store", true).field("term_vector", "with_positions_offsets").field("analyzer", "classic")
|
||||
.startObject("title").field("type", "text").field("store", true).field("term_vector", "with_positions_offsets").field("analyzer", "classic")
|
||||
.startObject("fields")
|
||||
.startObject("key").field("type", "string").field("store", true).field("term_vector", "with_positions_offsets").field("analyzer", "whitespace").endObject()
|
||||
.startObject("key").field("type", "text").field("store", true).field("term_vector", "with_positions_offsets").field("analyzer", "whitespace").endObject()
|
||||
.endObject().endObject()
|
||||
.endObject().endObject().endObject()));
|
||||
ensureGreen();
|
||||
|
@ -1229,9 +1229,9 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
public void testMultiMapperVectorFromSource() throws Exception {
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("title").field("type", "string").field("store", false).field("term_vector", "with_positions_offsets").field("analyzer", "classic")
|
||||
.startObject("title").field("type", "text").field("store", false).field("term_vector", "with_positions_offsets").field("analyzer", "classic")
|
||||
.startObject("fields")
|
||||
.startObject("key").field("type", "string").field("store", false).field("term_vector", "with_positions_offsets").field("analyzer", "whitespace").endObject()
|
||||
.startObject("key").field("type", "text").field("store", false).field("term_vector", "with_positions_offsets").field("analyzer", "whitespace").endObject()
|
||||
.endObject().endObject()
|
||||
.endObject().endObject().endObject()));
|
||||
ensureGreen();
|
||||
|
@ -1259,9 +1259,9 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
public void testMultiMapperNoVectorWithStore() throws Exception {
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("title").field("type", "string").field("store", true).field("term_vector", "no").field("analyzer", "classic")
|
||||
.startObject("title").field("type", "text").field("store", true).field("term_vector", "no").field("analyzer", "classic")
|
||||
.startObject("fields")
|
||||
.startObject("key").field("type", "string").field("store", true).field("term_vector", "no").field("analyzer", "whitespace").endObject()
|
||||
.startObject("key").field("type", "text").field("store", true).field("term_vector", "no").field("analyzer", "whitespace").endObject()
|
||||
.endObject().endObject()
|
||||
.endObject().endObject().endObject()));
|
||||
|
||||
|
@ -1289,9 +1289,9 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
public void testMultiMapperNoVectorFromSource() throws Exception {
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("title").field("type", "string").field("store", false).field("term_vector", "no").field("analyzer", "classic")
|
||||
.startObject("title").field("type", "text").field("store", false).field("term_vector", "no").field("analyzer", "classic")
|
||||
.startObject("fields")
|
||||
.startObject("key").field("type", "string").field("store", false).field("term_vector", "no").field("analyzer", "whitespace").endObject()
|
||||
.startObject("key").field("type", "text").field("store", false).field("term_vector", "no").field("analyzer", "whitespace").endObject()
|
||||
.endObject().endObject()
|
||||
.endObject().endObject().endObject()));
|
||||
ensureGreen();
|
||||
|
@ -1317,7 +1317,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
|
||||
public void testFastVectorHighlighterShouldFailIfNoTermVectors() throws Exception {
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", "title", "type=string,store=true,term_vector=no"));
|
||||
.addMapping("type1", "title", "type=text,store=true,term_vector=no"));
|
||||
ensureGreen();
|
||||
|
||||
IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5];
|
||||
|
@ -1347,7 +1347,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
|
||||
public void testDisableFastVectorHighlighter() throws Exception {
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", "title", "type=string,store=true,term_vector=with_positions_offsets,analyzer=classic"));
|
||||
.addMapping("type1", "title", "type=text,store=true,term_vector=with_positions_offsets,analyzer=classic"));
|
||||
ensureGreen();
|
||||
|
||||
IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5];
|
||||
|
@ -1392,7 +1392,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
|
||||
public void testFSHHighlightAllMvFragments() throws Exception {
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", "tags", "type=string,term_vector=with_positions_offsets"));
|
||||
.addMapping("type1", "tags", "type=text,term_vector=with_positions_offsets"));
|
||||
ensureGreen();
|
||||
client().prepareIndex("test", "type1", "1")
|
||||
.setSource("tags", new String[]{
|
||||
|
@ -1486,8 +1486,8 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
|
||||
assertAcked(prepareCreate("test").setSettings(builder.build()).addMapping("type1", type1TermVectorMapping())
|
||||
.addMapping("type2", "_all", "store=true,termVector=with_positions_offsets",
|
||||
"field4", "type=string,term_vector=with_positions_offsets,analyzer=synonym",
|
||||
"field3", "type=string,analyzer=synonym"));
|
||||
"field4", "type=text,term_vector=with_positions_offsets,analyzer=synonym",
|
||||
"field3", "type=text,analyzer=synonym"));
|
||||
ensureGreen();
|
||||
|
||||
client().prepareIndex("test", "type1", "0")
|
||||
|
@ -1548,7 +1548,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
|
||||
public void testPlainHighlightDifferentFragmenter() throws Exception {
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", "tags", "type=string"));
|
||||
.addMapping("type1", "tags", "type=text"));
|
||||
ensureGreen();
|
||||
client().prepareIndex("test", "type1", "1")
|
||||
.setSource(jsonBuilder().startObject().field("tags",
|
||||
|
@ -1603,7 +1603,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
|
||||
public void testFastVectorHighlighterMultipleFields() {
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", "field1", "type=string,term_vector=with_positions_offsets", "field2", "type=string,term_vector=with_positions_offsets"));
|
||||
.addMapping("type1", "field1", "type=text,term_vector=with_positions_offsets", "field2", "type=text,term_vector=with_positions_offsets"));
|
||||
ensureGreen();
|
||||
|
||||
index("test", "type1", "1", "field1", "The <b>quick<b> brown fox", "field2", "The <b>slow<b> brown fox");
|
||||
|
@ -1622,7 +1622,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
|
||||
public void testMissingStoredField() throws Exception {
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", "highlight_field", "type=string,store=true"));
|
||||
.addMapping("type1", "highlight_field", "type=text,store=true"));
|
||||
ensureGreen();
|
||||
client().prepareIndex("test", "type1", "1")
|
||||
.setSource(jsonBuilder().startObject()
|
||||
|
@ -1642,7 +1642,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
// Issue #3211
|
||||
public void testNumericHighlighting() throws Exception {
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("test", "text", "type=string,index=analyzed",
|
||||
.addMapping("test", "text", "type=text",
|
||||
"byte", "type=byte", "short", "type=short", "int", "type=integer", "long", "type=long",
|
||||
"float", "type=float", "double", "type=double"));
|
||||
ensureGreen();
|
||||
|
@ -1670,7 +1670,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
.put("analysis.analyzer.my_analyzer.type", "pattern")
|
||||
.put("analysis.analyzer.my_analyzer.pattern", "\\s+")
|
||||
.build())
|
||||
.addMapping("type", "text", "type=string,analyzer=my_analyzer"));
|
||||
.addMapping("type", "text", "type=text,analyzer=my_analyzer"));
|
||||
ensureGreen();
|
||||
client().prepareIndex("test", "type", "1")
|
||||
.setSource("text", "elasticsearch test").get();
|
||||
|
@ -1685,7 +1685,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
|
||||
public void testHighlightUsesHighlightQuery() throws IOException {
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", "text", "type=string," + randomStoreField() + "term_vector=with_positions_offsets,index_options=offsets"));
|
||||
.addMapping("type1", "text", "type=text," + randomStoreField() + "term_vector=with_positions_offsets,index_options=offsets"));
|
||||
ensureGreen();
|
||||
|
||||
index("test", "type1", "1", "text", "Testing the highlight query feature");
|
||||
|
@ -1751,7 +1751,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
|
||||
public void testHighlightNoMatchSize() throws IOException {
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", "text", "type=string," + randomStoreField() + "term_vector=with_positions_offsets,index_options=offsets"));
|
||||
.addMapping("type1", "text", "type=text," + randomStoreField() + "term_vector=with_positions_offsets,index_options=offsets"));
|
||||
ensureGreen();
|
||||
|
||||
String text = "I am pretty long so some of me should get cut off. Second sentence";
|
||||
|
@ -1859,7 +1859,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
|
||||
public void testHighlightNoMatchSizeWithMultivaluedFields() throws IOException {
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", "text", "type=string," + randomStoreField() + "term_vector=with_positions_offsets,index_options=offsets"));
|
||||
.addMapping("type1", "text", "type=text," + randomStoreField() + "term_vector=with_positions_offsets,index_options=offsets"));
|
||||
ensureGreen();
|
||||
|
||||
String text1 = "I am pretty long so some of me should get cut off. We'll see how that goes.";
|
||||
|
@ -1971,7 +1971,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
|
||||
public void testHighlightNoMatchSizeNumberOfFragments() throws IOException {
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", "text", "type=string," + randomStoreField() + "term_vector=with_positions_offsets,index_options=offsets"));
|
||||
.addMapping("type1", "text", "type=text," + randomStoreField() + "term_vector=with_positions_offsets,index_options=offsets"));
|
||||
ensureGreen();
|
||||
|
||||
String text1 = "This is the first sentence. This is the second sentence." + HighlightUtils.PARAGRAPH_SEPARATOR;
|
||||
|
@ -2138,8 +2138,8 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("_all").field("store", true).field("index_options", "offsets").endObject()
|
||||
.startObject("properties")
|
||||
.startObject("field1").field("type", "string").field("index_options", "offsets").field("term_vector", "with_positions_offsets").endObject()
|
||||
.startObject("field2").field("type", "string").field("index_options", "offsets").field("term_vector", "with_positions_offsets").endObject()
|
||||
.startObject("field1").field("type", "text").field("index_options", "offsets").field("term_vector", "with_positions_offsets").endObject()
|
||||
.startObject("field2").field("type", "text").field("index_options", "offsets").field("term_vector", "with_positions_offsets").endObject()
|
||||
.endObject()
|
||||
.endObject().endObject();
|
||||
assertAcked(prepareCreate("test").addMapping("type1", mapping));
|
||||
|
@ -2203,7 +2203,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
|
||||
public void testPostingsHighlighterEscapeHtml() throws Exception {
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", "title", "type=string," + randomStoreField() + "index_options=offsets"));
|
||||
.addMapping("type1", "title", "type=text," + randomStoreField() + "index_options=offsets"));
|
||||
ensureYellow();
|
||||
|
||||
IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5];
|
||||
|
@ -2226,9 +2226,9 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties")
|
||||
.startObject("title").field("type", "string").field("store", true).field("index_options", "offsets").field("analyzer", "classic")
|
||||
.startObject("title").field("type", "text").field("store", true).field("index_options", "offsets").field("analyzer", "classic")
|
||||
.startObject("fields")
|
||||
.startObject("key").field("type", "string").field("store", true).field("index_options", "offsets").field("analyzer", "whitespace").endObject()
|
||||
.startObject("key").field("type", "text").field("store", true).field("index_options", "offsets").field("analyzer", "whitespace").endObject()
|
||||
.endObject().endObject()
|
||||
.endObject().endObject().endObject()));
|
||||
ensureGreen();
|
||||
|
@ -2258,9 +2258,9 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
public void testPostingsHighlighterMultiMapperFromSource() throws Exception {
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("title").field("type", "string").field("store", false).field("index_options", "offsets").field("analyzer", "classic")
|
||||
.startObject("title").field("type", "text").field("store", false).field("index_options", "offsets").field("analyzer", "classic")
|
||||
.startObject("fields")
|
||||
.startObject("key").field("type", "string").field("store", false).field("index_options", "offsets").field("analyzer", "whitespace").endObject()
|
||||
.startObject("key").field("type", "text").field("store", false).field("index_options", "offsets").field("analyzer", "whitespace").endObject()
|
||||
.endObject().endObject()
|
||||
.endObject().endObject().endObject()));
|
||||
ensureGreen();
|
||||
|
@ -2287,7 +2287,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
public void testPostingsHighlighterShouldFailIfNoOffsets() throws Exception {
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("title").field("type", "string").field("store", true).field("index_options", "docs").endObject()
|
||||
.startObject("title").field("type", "text").field("store", true).field("index_options", "docs").endObject()
|
||||
.endObject().endObject().endObject()));
|
||||
ensureGreen();
|
||||
|
||||
|
@ -2357,8 +2357,8 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
private static XContentBuilder type1PostingsffsetsMapping() throws IOException {
|
||||
return XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties")
|
||||
.startObject("field1").field("type", "string").field("index_options", "offsets").endObject()
|
||||
.startObject("field2").field("type", "string").field("index_options", "offsets").endObject()
|
||||
.startObject("field1").field("type", "text").field("index_options", "offsets").endObject()
|
||||
.startObject("field2").field("type", "text").field("index_options", "offsets").endObject()
|
||||
.endObject()
|
||||
.endObject().endObject();
|
||||
}
|
||||
|
@ -2552,7 +2552,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
|
||||
public void testDoesNotHighlightTypeName() throws Exception {
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("typename").startObject("properties")
|
||||
.startObject("foo").field("type", "string")
|
||||
.startObject("foo").field("type", "text")
|
||||
.field("index_options", "offsets")
|
||||
.field("term_vector", "with_positions_offsets")
|
||||
.endObject().endObject().endObject().endObject();
|
||||
|
@ -2570,7 +2570,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
|
||||
public void testDoesNotHighlightAliasFilters() throws Exception {
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("typename").startObject("properties")
|
||||
.startObject("foo").field("type", "string")
|
||||
.startObject("foo").field("type", "text")
|
||||
.field("index_options", "offsets")
|
||||
.field("term_vector", "with_positions_offsets")
|
||||
.endObject().endObject().endObject().endObject();
|
||||
|
|
|
@ -84,12 +84,12 @@ public class InnerHitsIT extends ESIntegTestCase {
|
|||
.field("type", "nested")
|
||||
.startObject("properties")
|
||||
.startObject("message")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.startObject("title")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.endObject()
|
||||
.endObject().endObject().endObject()));
|
||||
|
||||
|
@ -285,8 +285,8 @@ public class InnerHitsIT extends ESIntegTestCase {
|
|||
|
||||
public void testSimpleParentChild() throws Exception {
|
||||
assertAcked(prepareCreate("articles")
|
||||
.addMapping("article", "title", "type=string")
|
||||
.addMapping("comment", "_parent", "type=article", "message", "type=string")
|
||||
.addMapping("article", "title", "type=text")
|
||||
.addMapping("comment", "_parent", "type=article", "message", "type=text")
|
||||
);
|
||||
|
||||
List<IndexRequestBuilder> requests = new ArrayList<>();
|
||||
|
@ -506,8 +506,8 @@ public class InnerHitsIT extends ESIntegTestCase {
|
|||
|
||||
public void testInnerHitsOnHasParent() throws Exception {
|
||||
assertAcked(prepareCreate("stack")
|
||||
.addMapping("question", "body", "type=string")
|
||||
.addMapping("answer", "_parent", "type=question", "body", "type=string")
|
||||
.addMapping("question", "body", "type=text")
|
||||
.addMapping("answer", "_parent", "type=question", "body", "type=text")
|
||||
);
|
||||
List<IndexRequestBuilder> requests = new ArrayList<>();
|
||||
requests.add(client().prepareIndex("stack", "question", "1").setSource("body", "I'm using HTTPS + Basic authentication to protect a resource. How can I throttle authentication attempts to protect against brute force attacks?"));
|
||||
|
@ -544,9 +544,9 @@ public class InnerHitsIT extends ESIntegTestCase {
|
|||
|
||||
public void testParentChildMultipleLayers() throws Exception {
|
||||
assertAcked(prepareCreate("articles")
|
||||
.addMapping("article", "title", "type=string")
|
||||
.addMapping("comment", "_parent", "type=article", "message", "type=string")
|
||||
.addMapping("remark", "_parent", "type=comment", "message", "type=string")
|
||||
.addMapping("article", "title", "type=text")
|
||||
.addMapping("comment", "_parent", "type=article", "message", "type=text")
|
||||
.addMapping("remark", "_parent", "type=comment", "message", "type=text")
|
||||
);
|
||||
|
||||
List<IndexRequestBuilder> requests = new ArrayList<>();
|
||||
|
@ -617,18 +617,18 @@ public class InnerHitsIT extends ESIntegTestCase {
|
|||
.field("type", "nested")
|
||||
.startObject("properties")
|
||||
.startObject("message")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.endObject()
|
||||
.startObject("remarks")
|
||||
.field("type", "nested")
|
||||
.startObject("properties")
|
||||
.startObject("message").field("type", "string").endObject()
|
||||
.startObject("message").field("type", "text").endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.startObject("title")
|
||||
.field("type", "string")
|
||||
.field("type", "text")
|
||||
.endObject()
|
||||
.endObject().endObject().endObject()));
|
||||
|
||||
|
@ -731,7 +731,7 @@ public class InnerHitsIT extends ESIntegTestCase {
|
|||
|
||||
// Issue #9723
|
||||
public void testNestedDefinedAsObject() throws Exception {
|
||||
assertAcked(prepareCreate("articles").addMapping("article", "comments", "type=nested", "title", "type=string"));
|
||||
assertAcked(prepareCreate("articles").addMapping("article", "comments", "type=nested", "title", "type=text"));
|
||||
|
||||
List<IndexRequestBuilder> requests = new ArrayList<>();
|
||||
requests.add(client().prepareIndex("articles", "article", "1").setSource(jsonBuilder().startObject()
|
||||
|
@ -762,7 +762,7 @@ public class InnerHitsIT extends ESIntegTestCase {
|
|||
.startObject("comments")
|
||||
.field("type", "nested")
|
||||
.startObject("properties")
|
||||
.startObject("message").field("type", "string").field("store", true).endObject()
|
||||
.startObject("message").field("type", "text").field("store", true).endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
|
@ -800,7 +800,7 @@ public class InnerHitsIT extends ESIntegTestCase {
|
|||
.startObject("comments")
|
||||
.field("type", "nested")
|
||||
.startObject("properties")
|
||||
.startObject("message").field("type", "string").field("store", true).endObject()
|
||||
.startObject("message").field("type", "text").field("store", true).endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
|
@ -838,7 +838,7 @@ public class InnerHitsIT extends ESIntegTestCase {
|
|||
.startObject("comments")
|
||||
.field("type", "nested")
|
||||
.startObject("properties")
|
||||
.startObject("message").field("type", "string").field("store", true).endObject()
|
||||
.startObject("message").field("type", "text").field("store", true).endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
|
@ -877,7 +877,7 @@ public class InnerHitsIT extends ESIntegTestCase {
|
|||
.startObject("comments")
|
||||
.field("type", "nested")
|
||||
.startObject("properties")
|
||||
.startObject("message").field("type", "string").field("store", true).endObject()
|
||||
.startObject("message").field("type", "text").field("store", true).endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
|
|
|
@ -65,7 +65,7 @@ public class MoreLikeThisIT extends ESIntegTestCase {
|
|||
logger.info("Creating index test");
|
||||
assertAcked(prepareCreate("test").addMapping("type1",
|
||||
jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("text").field("type", "string").endObject()
|
||||
.startObject("text").field("type", "text").endObject()
|
||||
.endObject().endObject().endObject()));
|
||||
|
||||
logger.info("Running Cluster Health");
|
||||
|
@ -105,7 +105,7 @@ public class MoreLikeThisIT extends ESIntegTestCase {
|
|||
logger.info("Creating index test");
|
||||
assertAcked(prepareCreate("test").addMapping("type1",
|
||||
jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("text").field("type", "string").endObject()
|
||||
.startObject("text").field("type", "text").endObject()
|
||||
.endObject().endObject().endObject()));
|
||||
logger.info("Creating aliases alias release");
|
||||
client().admin().indices().aliases(indexAliasesRequest().addAlias("release", termQuery("text", "release"), "test")).actionGet();
|
||||
|
@ -218,7 +218,7 @@ public class MoreLikeThisIT extends ESIntegTestCase {
|
|||
.startObject().startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("int_value").field("type", randomFrom(numericTypes)).endObject()
|
||||
.startObject("string_value").field("type", "string").endObject()
|
||||
.startObject("string_value").field("type", "text").endObject()
|
||||
.endObject()
|
||||
.endObject().endObject()).execute().actionGet();
|
||||
ensureGreen();
|
||||
|
@ -275,7 +275,7 @@ public class MoreLikeThisIT extends ESIntegTestCase {
|
|||
logger.info("Creating index test");
|
||||
assertAcked(prepareCreate("test").addMapping("type1",
|
||||
jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("text").field("type", "string").endObject()
|
||||
.startObject("text").field("type", "text").endObject()
|
||||
.endObject().endObject().endObject()));
|
||||
|
||||
logger.info("Running Cluster Health");
|
||||
|
@ -311,7 +311,7 @@ public class MoreLikeThisIT extends ESIntegTestCase {
|
|||
logger.info("Creating index test");
|
||||
assertAcked(prepareCreate("test").addMapping("type1",
|
||||
jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("text").field("type", "string").endObject()
|
||||
.startObject("text").field("type", "text").endObject()
|
||||
.endObject().endObject().endObject()));
|
||||
|
||||
logger.info("Running Cluster Health");
|
||||
|
@ -336,7 +336,7 @@ public class MoreLikeThisIT extends ESIntegTestCase {
|
|||
CreateIndexRequestBuilder createRequestBuilder = prepareCreate("test");
|
||||
for (int i = 0; i < numOfTypes; i++) {
|
||||
createRequestBuilder.addMapping("type" + i, jsonBuilder().startObject().startObject("type" + i).startObject("properties")
|
||||
.startObject("text").field("type", "string").endObject()
|
||||
.startObject("text").field("type", "text").endObject()
|
||||
.endObject().endObject().endObject());
|
||||
}
|
||||
assertAcked(createRequestBuilder);
|
||||
|
@ -365,7 +365,7 @@ public class MoreLikeThisIT extends ESIntegTestCase {
|
|||
public void testMoreLikeThisMultiValueFields() throws Exception {
|
||||
logger.info("Creating the index ...");
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", "text", "type=string,analyzer=keyword")
|
||||
.addMapping("type1", "text", "type=text,analyzer=keyword")
|
||||
.setSettings(SETTING_NUMBER_OF_SHARDS, 1));
|
||||
ensureGreen();
|
||||
|
||||
|
@ -397,7 +397,7 @@ public class MoreLikeThisIT extends ESIntegTestCase {
|
|||
public void testMinimumShouldMatch() throws ExecutionException, InterruptedException {
|
||||
logger.info("Creating the index ...");
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", "text", "type=string,analyzer=whitespace")
|
||||
.addMapping("type1", "text", "type=text,analyzer=whitespace")
|
||||
.setSettings(SETTING_NUMBER_OF_SHARDS, 1));
|
||||
ensureGreen();
|
||||
|
||||
|
@ -460,7 +460,7 @@ public class MoreLikeThisIT extends ESIntegTestCase {
|
|||
public void testMoreLikeThisMalformedArtificialDocs() throws Exception {
|
||||
logger.info("Creating the index ...");
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", "text", "type=string,analyzer=whitespace", "date", "type=date"));
|
||||
.addMapping("type1", "text", "type=text,analyzer=whitespace", "date", "type=date"));
|
||||
ensureGreen("test");
|
||||
|
||||
logger.info("Creating an index with a single document ...");
|
||||
|
@ -572,7 +572,7 @@ public class MoreLikeThisIT extends ESIntegTestCase {
|
|||
|
||||
public void testSelectFields() throws IOException, ExecutionException, InterruptedException {
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", "text", "type=string,analyzer=whitespace", "text1", "type=string,analyzer=whitespace"));
|
||||
.addMapping("type1", "text", "type=text,analyzer=whitespace", "text1", "type=text,analyzer=whitespace"));
|
||||
ensureGreen("test");
|
||||
|
||||
indexRandom(true, client().prepareIndex("test", "type1", "1").setSource(jsonBuilder()
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue