Rework norms parameters for 5.0. #16987
Changes: - no more option to configure eager/lazy loading of the norms (useless now that orms are disk-based) - only the `string`, `text` and `keyword` fields support the `norms` setting - the `norms` setting takes a boolean that decides whether norms should be stored in the index but old options are still supported to give users time to upgrade - setting a `boost` no longer implicitely enables norms (for new indices only, this is still needed for old indices)
This commit is contained in:
parent
5596e31068
commit
c50c5a52d5
|
@ -19,12 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectHashSet;
|
||||
import com.carrotsearch.hppc.ObjectSet;
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.NumericDocValues;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Setting.Property;
|
||||
|
@ -64,7 +59,6 @@ public final class IndexWarmer extends AbstractComponent {
|
|||
super(settings);
|
||||
ArrayList<Listener> list = new ArrayList<>();
|
||||
final Executor executor = threadPool.executor(ThreadPool.Names.WARMER);
|
||||
list.add(new NormsWarmer(executor));
|
||||
list.add(new FieldDataWarmer(executor));
|
||||
for (Listener listener : listeners) {
|
||||
list.add(listener);
|
||||
|
@ -138,64 +132,6 @@ public final class IndexWarmer extends AbstractComponent {
|
|||
TerminationHandle warmTopReader(IndexShard indexShard, Engine.Searcher searcher);
|
||||
}
|
||||
|
||||
private static class NormsWarmer implements IndexWarmer.Listener {
|
||||
private final Executor executor;
|
||||
public NormsWarmer(Executor executor) {
|
||||
this.executor = executor;
|
||||
}
|
||||
@Override
|
||||
public TerminationHandle warmNewReaders(final IndexShard indexShard, final Engine.Searcher searcher) {
|
||||
final MappedFieldType.Loading defaultLoading = indexShard.indexSettings().getValue(INDEX_NORMS_LOADING_SETTING);
|
||||
final MapperService mapperService = indexShard.mapperService();
|
||||
final ObjectSet<String> warmUp = new ObjectHashSet<>();
|
||||
for (DocumentMapper docMapper : mapperService.docMappers(false)) {
|
||||
for (FieldMapper fieldMapper : docMapper.mappers()) {
|
||||
final String indexName = fieldMapper.fieldType().name();
|
||||
MappedFieldType.Loading normsLoading = fieldMapper.fieldType().normsLoading();
|
||||
if (normsLoading == null) {
|
||||
normsLoading = defaultLoading;
|
||||
}
|
||||
if (fieldMapper.fieldType().indexOptions() != IndexOptions.NONE && !fieldMapper.fieldType().omitNorms()
|
||||
&& normsLoading == MappedFieldType.Loading.EAGER) {
|
||||
warmUp.add(indexName);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
// Norms loading may be I/O intensive but is not CPU intensive, so we execute it in a single task
|
||||
executor.execute(() -> {
|
||||
try {
|
||||
for (ObjectCursor<String> stringObjectCursor : warmUp) {
|
||||
final String indexName = stringObjectCursor.value;
|
||||
final long start = System.nanoTime();
|
||||
for (final LeafReaderContext ctx : searcher.reader().leaves()) {
|
||||
final NumericDocValues values = ctx.reader().getNormValues(indexName);
|
||||
if (values != null) {
|
||||
values.get(0);
|
||||
}
|
||||
}
|
||||
if (indexShard.warmerService().logger().isTraceEnabled()) {
|
||||
indexShard.warmerService().logger().trace("warmed norms for [{}], took [{}]", indexName,
|
||||
TimeValue.timeValueNanos(System.nanoTime() - start));
|
||||
}
|
||||
}
|
||||
} catch (Throwable t) {
|
||||
indexShard.warmerService().logger().warn("failed to warm-up norms", t);
|
||||
} finally {
|
||||
latch.countDown();
|
||||
}
|
||||
});
|
||||
|
||||
return () -> latch.await();
|
||||
}
|
||||
|
||||
@Override
|
||||
public TerminationHandle warmTopReader(IndexShard indexShard, final Engine.Searcher searcher) {
|
||||
return TerminationHandle.NO_WAIT;
|
||||
}
|
||||
}
|
||||
|
||||
private static class FieldDataWarmer implements IndexWarmer.Listener {
|
||||
|
||||
private final Executor executor;
|
||||
|
|
|
@ -203,11 +203,6 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
|||
return builder;
|
||||
}
|
||||
|
||||
public T normsLoading(MappedFieldType.Loading normsLoading) {
|
||||
this.fieldType.setNormsLoading(normsLoading);
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T fieldDataSettings(Settings settings) {
|
||||
this.fieldDataSettings = settings;
|
||||
return builder;
|
||||
|
@ -243,6 +238,9 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
|||
|
||||
protected void setupFieldType(BuilderContext context) {
|
||||
fieldType.setName(buildFullName(context));
|
||||
if (context.indexCreatedVersion().before(Version.V_5_0_0)) {
|
||||
fieldType.setOmitNorms(fieldType.omitNorms() && fieldType.boost() == 1.0f);
|
||||
}
|
||||
if (fieldType.indexAnalyzer() == null && fieldType.tokenized() == false && fieldType.indexOptions() != IndexOptions.NONE) {
|
||||
fieldType.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
|
||||
fieldType.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
|
||||
|
@ -419,15 +417,8 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
|||
if (includeDefaults || fieldType().storeTermVectors() != defaultFieldType.storeTermVectors()) {
|
||||
builder.field("term_vector", termVectorOptionsToString(fieldType()));
|
||||
}
|
||||
if (includeDefaults || fieldType().omitNorms() != defaultFieldType.omitNorms() || fieldType().normsLoading() != null) {
|
||||
builder.startObject("norms");
|
||||
if (includeDefaults || fieldType().omitNorms() != defaultFieldType.omitNorms()) {
|
||||
builder.field("enabled", !fieldType().omitNorms());
|
||||
}
|
||||
if (fieldType().normsLoading() != null) {
|
||||
builder.field(MappedFieldType.Loading.KEY, fieldType().normsLoading());
|
||||
}
|
||||
builder.endObject();
|
||||
if (includeDefaults || fieldType().omitNorms() != defaultFieldType.omitNorms()) {
|
||||
builder.field("norms", fieldType().omitNorms() == false);
|
||||
}
|
||||
if (indexed && (includeDefaults || fieldType().indexOptions() != defaultFieldType.indexOptions())) {
|
||||
builder.field("index_options", indexOptionToString(fieldType().indexOptions()));
|
||||
|
|
|
@ -103,7 +103,6 @@ public abstract class MappedFieldType extends FieldType {
|
|||
private NamedAnalyzer searchAnalyzer;
|
||||
private NamedAnalyzer searchQuoteAnalyzer;
|
||||
private SimilarityProvider similarity;
|
||||
private Loading normsLoading;
|
||||
private FieldDataType fieldDataType;
|
||||
private Object nullValue;
|
||||
private String nullValueAsString; // for sending null value to _all field
|
||||
|
@ -117,7 +116,6 @@ public abstract class MappedFieldType extends FieldType {
|
|||
this.searchAnalyzer = ref.searchAnalyzer();
|
||||
this.searchQuoteAnalyzer = ref.searchQuoteAnalyzer();
|
||||
this.similarity = ref.similarity();
|
||||
this.normsLoading = ref.normsLoading();
|
||||
this.fieldDataType = ref.fieldDataType();
|
||||
this.nullValue = ref.nullValue();
|
||||
this.nullValueAsString = ref.nullValueAsString();
|
||||
|
@ -158,7 +156,6 @@ public abstract class MappedFieldType extends FieldType {
|
|||
Objects.equals(indexAnalyzer, fieldType.indexAnalyzer) &&
|
||||
Objects.equals(searchAnalyzer, fieldType.searchAnalyzer) &&
|
||||
Objects.equals(searchQuoteAnalyzer(), fieldType.searchQuoteAnalyzer()) &&
|
||||
Objects.equals(normsLoading, fieldType.normsLoading) &&
|
||||
Objects.equals(fieldDataType, fieldType.fieldDataType) &&
|
||||
Objects.equals(nullValue, fieldType.nullValue) &&
|
||||
Objects.equals(nullValueAsString, fieldType.nullValueAsString);
|
||||
|
@ -167,7 +164,7 @@ public abstract class MappedFieldType extends FieldType {
|
|||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(super.hashCode(), name, boost, docValues, indexAnalyzer, searchAnalyzer, searchQuoteAnalyzer,
|
||||
similarity == null ? null : similarity.name(), normsLoading, fieldDataType, nullValue, nullValueAsString);
|
||||
similarity == null ? null : similarity.name(), fieldDataType, nullValue, nullValueAsString);
|
||||
}
|
||||
|
||||
// norelease: we need to override freeze() and add safety checks that all settings are actually set
|
||||
|
@ -205,7 +202,7 @@ public abstract class MappedFieldType extends FieldType {
|
|||
conflicts.add("mapper [" + name() + "] has different [doc_values] values");
|
||||
}
|
||||
if (omitNorms() && !other.omitNorms()) {
|
||||
conflicts.add("mapper [" + name() + "] has different [omit_norms] values, cannot change from disable to enabled");
|
||||
conflicts.add("mapper [" + name() + "] has different [norms] values, cannot change from disable to enabled");
|
||||
}
|
||||
if (storeTermVectors() != other.storeTermVectors()) {
|
||||
conflicts.add("mapper [" + name() + "] has different [store_term_vector] values");
|
||||
|
@ -242,9 +239,6 @@ public abstract class MappedFieldType extends FieldType {
|
|||
if (boost() != other.boost()) {
|
||||
conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [boost] across all types.");
|
||||
}
|
||||
if (normsLoading() != other.normsLoading()) {
|
||||
conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [norms.loading] across all types.");
|
||||
}
|
||||
if (Objects.equals(searchAnalyzer(), other.searchAnalyzer()) == false) {
|
||||
conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [search_analyzer] across all types.");
|
||||
}
|
||||
|
@ -304,15 +298,6 @@ public abstract class MappedFieldType extends FieldType {
|
|||
this.docValues = hasDocValues;
|
||||
}
|
||||
|
||||
public Loading normsLoading() {
|
||||
return normsLoading;
|
||||
}
|
||||
|
||||
public void setNormsLoading(Loading normsLoading) {
|
||||
checkIfFrozen();
|
||||
this.normsLoading = normsLoading;
|
||||
}
|
||||
|
||||
public NamedAnalyzer indexAnalyzer() {
|
||||
return indexAnalyzer;
|
||||
}
|
||||
|
|
|
@ -92,14 +92,6 @@ public final class KeywordFieldMapper extends FieldMapper implements AllFieldMap
|
|||
return super.indexOptions(indexOptions);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setupFieldType(BuilderContext context) {
|
||||
if (!omitNormsSet && fieldType.boost() != 1.0f) {
|
||||
fieldType.setOmitNorms(false);
|
||||
}
|
||||
super.setupFieldType(context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public KeywordFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
|
@ -128,6 +120,9 @@ public final class KeywordFieldMapper extends FieldMapper implements AllFieldMap
|
|||
} else if (propName.equals("ignore_above")) {
|
||||
builder.ignoreAbove(XContentMapValues.nodeIntegerValue(propNode, -1));
|
||||
iterator.remove();
|
||||
} else if (propName.equals("norms")) {
|
||||
builder.omitNorms(XContentMapValues.nodeBooleanValue(propNode) == false);
|
||||
iterator.remove();
|
||||
} else if (parseMultiField(builder, name, parserContext, propName, propNode)) {
|
||||
iterator.remove();
|
||||
}
|
||||
|
|
|
@ -31,6 +31,7 @@ import org.apache.lucene.index.IndexableField;
|
|||
import org.apache.lucene.index.IndexableFieldType;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Explicit;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Setting.Property;
|
||||
|
@ -116,7 +117,6 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM
|
|||
|
||||
protected void setupFieldType(BuilderContext context) {
|
||||
super.setupFieldType(context);
|
||||
fieldType.setOmitNorms(fieldType.omitNorms() && fieldType.boost() == 1.0f);
|
||||
int precisionStep = fieldType.numericPrecisionStep();
|
||||
if (precisionStep <= 0 || precisionStep >= maxPrecisionStep()) {
|
||||
fieldType.setNumericPrecisionStep(Integer.MAX_VALUE);
|
||||
|
|
|
@ -157,13 +157,30 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
|||
fieldName);
|
||||
final Object index = node.remove("index");
|
||||
final boolean keyword = index != null && "analyzed".equals(index) == false;
|
||||
// upgrade the index setting
|
||||
node.put("index", "no".equals(index) == false);
|
||||
{
|
||||
// upgrade the index setting
|
||||
node.put("index", "no".equals(index) == false);
|
||||
}
|
||||
{
|
||||
// upgrade norms settings
|
||||
Object norms = node.remove("norms");
|
||||
if (norms instanceof Map) {
|
||||
norms = ((Map<?,?>) norms).get("enabled");
|
||||
}
|
||||
if (norms != null) {
|
||||
node.put("norms", TypeParsers.nodeBooleanValue("norms", norms, parserContext));
|
||||
}
|
||||
Object omitNorms = node.remove("omit_norms");
|
||||
if (omitNorms != null) {
|
||||
node.put("norms", TypeParsers.nodeBooleanValue("omit_norms", omitNorms, parserContext) == false);
|
||||
}
|
||||
}
|
||||
if (keyword) {
|
||||
return new KeywordFieldMapper.TypeParser().parse(fieldName, node, parserContext);
|
||||
} else {
|
||||
return new TextFieldMapper.TypeParser().parse(fieldName, node, parserContext);
|
||||
}
|
||||
|
||||
}
|
||||
throw new IllegalArgumentException("The [string] type is removed in 5.0. You should now use either a [text] "
|
||||
+ "or [keyword] field instead for field [" + fieldName + "]");
|
||||
|
|
|
@ -71,7 +71,7 @@ public class TypeParsers {
|
|||
private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(TypeParsers.class));
|
||||
private static final Set<String> BOOLEAN_STRINGS = new HashSet<>(Arrays.asList("true", "false"));
|
||||
|
||||
private static boolean nodeBooleanValue(String name, Object node, Mapper.TypeParser.ParserContext parserContext) {
|
||||
public static boolean nodeBooleanValue(String name, Object node, Mapper.TypeParser.ParserContext parserContext) {
|
||||
// Hook onto ParseFieldMatcher so that parsing becomes strict when setting index.query.parse.strict
|
||||
if (parserContext.parseFieldMatcher().isStrict()) {
|
||||
return XContentMapValues.nodeBooleanValue(node);
|
||||
|
@ -99,9 +99,6 @@ public class TypeParsers {
|
|||
} else if (propName.equals("coerce")) {
|
||||
builder.coerce(nodeBooleanValue("coerce", propNode, parserContext));
|
||||
iterator.remove();
|
||||
} else if (propName.equals("omit_norms")) {
|
||||
builder.omitNorms(nodeBooleanValue("omit_norms", propNode, parserContext));
|
||||
iterator.remove();
|
||||
} else if (propName.equals("similarity")) {
|
||||
SimilarityProvider similarityProvider = resolveSimilarity(parserContext, name, propNode.toString());
|
||||
builder.similarity(similarityProvider);
|
||||
|
@ -187,6 +184,37 @@ public class TypeParsers {
|
|||
}
|
||||
}
|
||||
|
||||
public static boolean parseNorms(FieldMapper.Builder builder, String propName, Object propNode, Mapper.TypeParser.ParserContext parserContext) {
|
||||
if (propName.equals("norms")) {
|
||||
if (propNode instanceof Map) {
|
||||
final Map<String, Object> properties = nodeMapValue(propNode, "norms");
|
||||
for (Iterator<Entry<String, Object>> propsIterator = properties.entrySet().iterator(); propsIterator.hasNext();) {
|
||||
Entry<String, Object> entry2 = propsIterator.next();
|
||||
final String propName2 = Strings.toUnderscoreCase(entry2.getKey());
|
||||
final Object propNode2 = entry2.getValue();
|
||||
if (propName2.equals("enabled")) {
|
||||
builder.omitNorms(!lenientNodeBooleanValue(propNode2));
|
||||
propsIterator.remove();
|
||||
} else if (propName2.equals(Loading.KEY)) {
|
||||
// ignore for bw compat
|
||||
propsIterator.remove();
|
||||
}
|
||||
}
|
||||
DocumentMapperParser.checkNoRemainingFields(propName, properties, parserContext.indexVersionCreated());
|
||||
DEPRECATION_LOGGER.deprecated("The [norms{enabled:true/false}] way of specifying norms is deprecated, please use [norms:true/false] instead");
|
||||
} else {
|
||||
builder.omitNorms(nodeBooleanValue("norms", propNode, parserContext) == false);
|
||||
}
|
||||
return true;
|
||||
} else if (propName.equals("omit_norms")) {
|
||||
builder.omitNorms(nodeBooleanValue("norms", propNode, parserContext));
|
||||
DEPRECATION_LOGGER.deprecated("[omit_norms] is deprecated, please use [norms] instead with the opposite boolean value");
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse text field attributes. In addition to {@link #parseField common attributes}
|
||||
* this will parse analysis and term-vectors related settings.
|
||||
|
@ -194,6 +222,14 @@ public class TypeParsers {
|
|||
public static void parseTextField(FieldMapper.Builder builder, String name, Map<String, Object> fieldNode, Mapper.TypeParser.ParserContext parserContext) {
|
||||
parseField(builder, name, fieldNode, parserContext);
|
||||
parseAnalyzersAndTermVectors(builder, name, fieldNode, parserContext);
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = fieldNode.entrySet().iterator(); iterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
final String propName = Strings.toUnderscoreCase(entry.getKey());
|
||||
final Object propNode = entry.getValue();
|
||||
if (parseNorms(builder, propName, propNode, parserContext)) {
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -217,24 +253,8 @@ public class TypeParsers {
|
|||
} else if (propName.equals("boost")) {
|
||||
builder.boost(nodeFloatValue(propNode));
|
||||
iterator.remove();
|
||||
} else if (propName.equals("omit_norms")) {
|
||||
builder.omitNorms(nodeBooleanValue("omit_norms", propNode, parserContext));
|
||||
iterator.remove();
|
||||
} else if (propName.equals("norms")) {
|
||||
final Map<String, Object> properties = nodeMapValue(propNode, "norms");
|
||||
for (Iterator<Entry<String, Object>> propsIterator = properties.entrySet().iterator(); propsIterator.hasNext();) {
|
||||
Entry<String, Object> entry2 = propsIterator.next();
|
||||
final String propName2 = Strings.toUnderscoreCase(entry2.getKey());
|
||||
final Object propNode2 = entry2.getValue();
|
||||
if (propName2.equals("enabled")) {
|
||||
builder.omitNorms(!lenientNodeBooleanValue(propNode2));
|
||||
propsIterator.remove();
|
||||
} else if (propName2.equals(Loading.KEY)) {
|
||||
builder.normsLoading(Loading.parse(nodeStringValue(propNode2, null), null));
|
||||
propsIterator.remove();
|
||||
}
|
||||
}
|
||||
DocumentMapperParser.checkNoRemainingFields(propName, properties, parserContext.indexVersionCreated());
|
||||
} else if (parserContext.indexVersionCreated().before(Version.V_5_0_0)
|
||||
&& parseNorms(builder, propName, propNode, parserContext)) {
|
||||
iterator.remove();
|
||||
} else if (propName.equals("index_options")) {
|
||||
builder.indexOptions(nodeIndexOptionValue(propNode));
|
||||
|
|
|
@ -305,7 +305,7 @@ public class AllFieldMapper extends MetadataFieldMapper {
|
|||
builder.field("store_term_vector_payloads", fieldType().storeTermVectorPayloads());
|
||||
}
|
||||
if (includeDefaults || fieldType().omitNorms() != Defaults.FIELD_TYPE.omitNorms()) {
|
||||
builder.field("omit_norms", fieldType().omitNorms());
|
||||
builder.field("norms", !fieldType().omitNorms());
|
||||
}
|
||||
|
||||
doXContentAnalyzers(builder, includeDefaults);
|
||||
|
|
|
@ -130,12 +130,6 @@ public abstract class FieldTypeTestCase extends ESTestCase {
|
|||
other.setSimilarity(new BM25SimilarityProvider("bar", Settings.EMPTY));
|
||||
}
|
||||
},
|
||||
new Modifier("norms.loading", true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
ft.setNormsLoading(MappedFieldType.Loading.LAZY);
|
||||
}
|
||||
},
|
||||
new Modifier("fielddata", true) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
|
@ -217,7 +211,6 @@ public abstract class FieldTypeTestCase extends ESTestCase {
|
|||
", searchAnalyzer=" + ft.searchAnalyzer() +
|
||||
", searchQuoteAnalyzer=" + ft.searchQuoteAnalyzer() +
|
||||
", similarity=" + ft.similarity() +
|
||||
", normsLoading=" + ft.normsLoading() +
|
||||
", fieldDataType=" + ft.fieldDataType() +
|
||||
", nullValue=" + ft.nullValue() +
|
||||
", nullValueAsString='" + ft.nullValueAsString() + "'" +
|
||||
|
|
|
@ -223,7 +223,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase {
|
|||
}
|
||||
|
||||
public void testRandom() throws Exception {
|
||||
boolean omitNorms = false;
|
||||
boolean norms = false;
|
||||
boolean stored = false;
|
||||
boolean enabled = true;
|
||||
boolean tv_stored = false;
|
||||
|
@ -239,7 +239,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase {
|
|||
allDefault = false;
|
||||
mappingBuilder.startObject("_all");
|
||||
if (randomBoolean()) {
|
||||
booleanOptionList.add(new Tuple<>("omit_norms", omitNorms = randomBoolean()));
|
||||
booleanOptionList.add(new Tuple<>("norms", norms = randomBoolean()));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
booleanOptionList.add(new Tuple<>("store", stored = randomBoolean()));
|
||||
|
@ -285,7 +285,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase {
|
|||
Document doc = builtDocMapper.parse("test", "test", "1", new BytesArray(json)).rootDoc();
|
||||
AllField field = (AllField) doc.getField("_all");
|
||||
if (enabled) {
|
||||
assertThat(field.fieldType().omitNorms(), equalTo(omitNorms));
|
||||
assertThat(field.fieldType().omitNorms(), equalTo(!norms));
|
||||
assertThat(field.fieldType().stored(), equalTo(stored));
|
||||
assertThat(field.fieldType().storeTermVectorOffsets(), equalTo(tv_offsets));
|
||||
assertThat(field.fieldType().storeTermVectorPayloads(), equalTo(tv_payloads));
|
||||
|
|
|
@ -85,18 +85,17 @@ public class CustomBoostMappingTests extends ESSingleNodeTestCase {
|
|||
}
|
||||
|
||||
public void testBackCompatFieldMappingBoostValues() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
|
||||
.startObject("s_field").field("type", "keyword").field("boost", 2.0f).endObject()
|
||||
.startObject("l_field").field("type", "long").field("boost", 3.0f).startObject("norms").field("enabled", true).endObject().endObject()
|
||||
.startObject("i_field").field("type", "integer").field("boost", 4.0f).startObject("norms").field("enabled", true).endObject().endObject()
|
||||
.startObject("sh_field").field("type", "short").field("boost", 5.0f).startObject("norms").field("enabled", true).endObject().endObject()
|
||||
.startObject("b_field").field("type", "byte").field("boost", 6.0f).startObject("norms").field("enabled", true).endObject().endObject()
|
||||
.startObject("d_field").field("type", "double").field("boost", 7.0f).startObject("norms").field("enabled", true).endObject().endObject()
|
||||
.startObject("f_field").field("type", "float").field("boost", 8.0f).startObject("norms").field("enabled", true).endObject().endObject()
|
||||
.startObject("date_field").field("type", "date").field("boost", 9.0f).startObject("norms").field("enabled", true).endObject().endObject()
|
||||
.endObject().endObject().endObject().string();
|
||||
|
||||
{
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
|
||||
.startObject("s_field").field("type", "keyword").field("boost", 2.0f).endObject()
|
||||
.startObject("l_field").field("type", "long").field("boost", 3.0f).endObject()
|
||||
.startObject("i_field").field("type", "integer").field("boost", 4.0f).endObject()
|
||||
.startObject("sh_field").field("type", "short").field("boost", 5.0f).endObject()
|
||||
.startObject("b_field").field("type", "byte").field("boost", 6.0f).endObject()
|
||||
.startObject("d_field").field("type", "double").field("boost", 7.0f).endObject()
|
||||
.startObject("f_field").field("type", "float").field("boost", 8.0f).endObject()
|
||||
.startObject("date_field").field("type", "date").field("boost", 9.0f).endObject()
|
||||
.endObject().endObject().endObject().string();
|
||||
IndexService indexService = createIndex("test", BW_SETTINGS);
|
||||
QueryShardContext context = indexService.newQueryShardContext();
|
||||
DocumentMapper mapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
|
@ -122,16 +121,34 @@ public class CustomBoostMappingTests extends ESSingleNodeTestCase {
|
|||
.endObject().bytes());
|
||||
|
||||
assertThat(doc.rootDoc().getField("s_field").boost(), equalTo(2.0f));
|
||||
assertThat(doc.rootDoc().getField("s_field").fieldType().omitNorms(), equalTo(false));
|
||||
assertThat(doc.rootDoc().getField("l_field").boost(), equalTo(3.0f));
|
||||
assertThat(doc.rootDoc().getField("l_field").fieldType().omitNorms(), equalTo(false));
|
||||
assertThat(doc.rootDoc().getField("i_field").boost(), equalTo(4.0f));
|
||||
assertThat(doc.rootDoc().getField("i_field").fieldType().omitNorms(), equalTo(false));
|
||||
assertThat(doc.rootDoc().getField("sh_field").boost(), equalTo(5.0f));
|
||||
assertThat(doc.rootDoc().getField("sh_field").fieldType().omitNorms(), equalTo(false));
|
||||
assertThat(doc.rootDoc().getField("b_field").boost(), equalTo(6.0f));
|
||||
assertThat(doc.rootDoc().getField("b_field").fieldType().omitNorms(), equalTo(false));
|
||||
assertThat(doc.rootDoc().getField("d_field").boost(), equalTo(7.0f));
|
||||
assertThat(doc.rootDoc().getField("d_field").fieldType().omitNorms(), equalTo(false));
|
||||
assertThat(doc.rootDoc().getField("f_field").boost(), equalTo(8.0f));
|
||||
assertThat(doc.rootDoc().getField("f_field").fieldType().omitNorms(), equalTo(false));
|
||||
assertThat(doc.rootDoc().getField("date_field").boost(), equalTo(9.0f));
|
||||
assertThat(doc.rootDoc().getField("date_field").fieldType().omitNorms(), equalTo(false));
|
||||
}
|
||||
|
||||
{
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
|
||||
.startObject("s_field").field("type", "keyword").field("boost", 2.0f).endObject()
|
||||
.startObject("l_field").field("type", "long").field("boost", 3.0f).endObject()
|
||||
.startObject("i_field").field("type", "integer").field("boost", 4.0f).endObject()
|
||||
.startObject("sh_field").field("type", "short").field("boost", 5.0f).endObject()
|
||||
.startObject("b_field").field("type", "byte").field("boost", 6.0f).endObject()
|
||||
.startObject("d_field").field("type", "double").field("boost", 7.0f).endObject()
|
||||
.startObject("f_field").field("type", "float").field("boost", 8.0f).endObject()
|
||||
.startObject("date_field").field("type", "date").field("boost", 9.0f).endObject()
|
||||
.endObject().endObject().endObject().string();
|
||||
IndexService indexService = createIndex("text");
|
||||
QueryShardContext context = indexService.newQueryShardContext();
|
||||
DocumentMapper mapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
|
@ -157,13 +174,21 @@ public class CustomBoostMappingTests extends ESSingleNodeTestCase {
|
|||
.endObject().bytes());
|
||||
|
||||
assertThat(doc.rootDoc().getField("s_field").boost(), equalTo(1f));
|
||||
assertThat(doc.rootDoc().getField("s_field").fieldType().omitNorms(), equalTo(true));
|
||||
assertThat(doc.rootDoc().getField("l_field").boost(), equalTo(1f));
|
||||
assertThat(doc.rootDoc().getField("l_field").fieldType().omitNorms(), equalTo(true));
|
||||
assertThat(doc.rootDoc().getField("i_field").boost(), equalTo(1f));
|
||||
assertThat(doc.rootDoc().getField("i_field").fieldType().omitNorms(), equalTo(true));
|
||||
assertThat(doc.rootDoc().getField("sh_field").boost(), equalTo(1f));
|
||||
assertThat(doc.rootDoc().getField("sh_field").fieldType().omitNorms(), equalTo(true));
|
||||
assertThat(doc.rootDoc().getField("b_field").boost(), equalTo(1f));
|
||||
assertThat(doc.rootDoc().getField("b_field").fieldType().omitNorms(), equalTo(true));
|
||||
assertThat(doc.rootDoc().getField("d_field").boost(), equalTo(1f));
|
||||
assertThat(doc.rootDoc().getField("d_field").fieldType().omitNorms(), equalTo(true));
|
||||
assertThat(doc.rootDoc().getField("f_field").boost(), equalTo(1f));
|
||||
assertThat(doc.rootDoc().getField("f_field").fieldType().omitNorms(), equalTo(true));
|
||||
assertThat(doc.rootDoc().getField("date_field").boost(), equalTo(1f));
|
||||
assertThat(doc.rootDoc().getField("date_field").fieldType().omitNorms(), equalTo(true));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -102,13 +102,13 @@ public class FieldLevelBoostTests extends ESSingleNodeTestCase {
|
|||
public void testBackCompatFieldLevelMappingBoost() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("person").startObject("properties")
|
||||
.startObject("str_field").field("type", "keyword").field("boost", "2.0").endObject()
|
||||
.startObject("int_field").field("type", "integer").field("boost", "3.0").startObject("norms").field("enabled", true).endObject().endObject()
|
||||
.startObject("byte_field").field("type", "byte").field("boost", "4.0").startObject("norms").field("enabled", true).endObject().endObject()
|
||||
.startObject("date_field").field("type", "date").field("boost", "5.0").startObject("norms").field("enabled", true).endObject().endObject()
|
||||
.startObject("double_field").field("type", "double").field("boost", "6.0").startObject("norms").field("enabled", true).endObject().endObject()
|
||||
.startObject("float_field").field("type", "float").field("boost", "7.0").startObject("norms").field("enabled", true).endObject().endObject()
|
||||
.startObject("long_field").field("type", "long").field("boost", "8.0").startObject("norms").field("enabled", true).endObject().endObject()
|
||||
.startObject("short_field").field("type", "short").field("boost", "9.0").startObject("norms").field("enabled", true).endObject().endObject()
|
||||
.startObject("int_field").field("type", "integer").field("boost", "3.0").endObject()
|
||||
.startObject("byte_field").field("type", "byte").field("boost", "4.0").endObject()
|
||||
.startObject("date_field").field("type", "date").field("boost", "5.0").endObject()
|
||||
.startObject("double_field").field("type", "double").field("boost", "6.0").endObject()
|
||||
.startObject("float_field").field("type", "float").field("boost", "7.0").endObject()
|
||||
.startObject("long_field").field("type", "long").field("boost", "8.0").endObject()
|
||||
.startObject("short_field").field("type", "short").field("boost", "9.0").endObject()
|
||||
.string();
|
||||
|
||||
{
|
||||
|
|
|
@ -24,22 +24,33 @@ import org.apache.lucene.index.IndexOptions;
|
|||
import org.apache.lucene.index.IndexableField;
|
||||
import org.apache.lucene.index.IndexableFieldType;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class KeywordFieldMapperTests extends ESSingleNodeTestCase {
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> getPlugins() {
|
||||
return pluginList(InternalSettingsPlugin.class);
|
||||
}
|
||||
|
||||
IndexService indexService;
|
||||
DocumentMapperParser parser;
|
||||
|
||||
|
@ -232,4 +243,51 @@ public class KeywordFieldMapperTests extends ESSingleNodeTestCase {
|
|||
assertEquals("The [keyword] field does not support positions, got [index_options]=" + indexOptions, e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
public void testBoost() throws IOException {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", "keyword").field("boost", 2f).endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
}
|
||||
|
||||
public void testBoostImplicitlyEnablesNormsOnOldIndex() throws IOException {
|
||||
indexService = createIndex("test2",
|
||||
Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0).build());
|
||||
parser = indexService.mapperService().documentMapperParser();
|
||||
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", "keyword").field("boost", 2f).endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
|
||||
String expectedMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", "keyword")
|
||||
.field("boost", 2f).field("norms", true).endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
assertEquals(expectedMapping, mapper.mappingSource().toString());
|
||||
}
|
||||
|
||||
public void testEnableNorms() throws IOException {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", "keyword").field("norms", true).endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
|
||||
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("field", "1234")
|
||||
.endObject()
|
||||
.bytes());
|
||||
|
||||
IndexableField[] fields = doc.rootDoc().getFields("field");
|
||||
assertEquals(2, fields.length);
|
||||
assertFalse(fields[0].fieldType().omitNorms());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -39,6 +39,7 @@ import org.elasticsearch.test.InternalSettingsPlugin;
|
|||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
|
@ -130,6 +131,7 @@ public class StringMappingUpgradeTests extends ESSingleNodeTestCase {
|
|||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", "string");
|
||||
boolean keyword = randomBoolean();
|
||||
boolean hasNorms = keyword == false;
|
||||
boolean shouldUpgrade = true;
|
||||
if (keyword) {
|
||||
mapping.field("index", randomBoolean() ? "not_analyzed" : "no");
|
||||
|
@ -143,7 +145,12 @@ public class StringMappingUpgradeTests extends ESSingleNodeTestCase {
|
|||
mapping.field("doc_values", randomBoolean());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
mapping.field("omit_norms", randomBoolean());
|
||||
hasNorms = randomBoolean();
|
||||
if (randomBoolean()) {
|
||||
mapping.field("omit_norms", hasNorms == false);
|
||||
} else {
|
||||
mapping.field("norms", Collections.singletonMap("enabled", hasNorms));
|
||||
}
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
mapping.startObject("fields").startObject("raw").field("type", "keyword").endObject().endObject();
|
||||
|
@ -172,6 +179,9 @@ public class StringMappingUpgradeTests extends ESSingleNodeTestCase {
|
|||
} else {
|
||||
assertThat(field, instanceOf(TextFieldMapper.class));
|
||||
}
|
||||
if (field.fieldType().indexOptions() != IndexOptions.NONE) {
|
||||
assertEquals(hasNorms, field.fieldType().omitNorms() == false);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -132,9 +132,7 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase {
|
|||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field")
|
||||
.field("type", "text")
|
||||
.startObject("norms")
|
||||
.field("enabled", false)
|
||||
.endObject()
|
||||
.field("norms", false)
|
||||
.endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
|
@ -386,4 +384,5 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase {
|
|||
assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorPositions(), equalTo(true));
|
||||
assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorPayloads(), equalTo(true));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -684,4 +684,20 @@ public class SimpleNumericTests extends ESSingleNodeTestCase {
|
|||
parser = createIndex("index2-" + type, oldIndexSettings).mapperService().documentMapperParser();
|
||||
parser.parse("type", new CompressedXContent(mappingWithTV)); // no exception
|
||||
}
|
||||
|
||||
public void testRejectNorms() throws IOException {
|
||||
// not supported as of 5.0
|
||||
for (String type : Arrays.asList("byte", "short", "integer", "long", "float", "double")) {
|
||||
DocumentMapperParser parser = createIndex("index-" + type).mapperService().documentMapperParser();
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("foo")
|
||||
.field("type", type)
|
||||
.field("norms", random().nextBoolean())
|
||||
.endObject()
|
||||
.endObject().endObject().endObject().string();
|
||||
MapperParsingException e = expectThrows(MapperParsingException.class, () -> parser.parse("type", new CompressedXContent(mapping)));
|
||||
assertThat(e.getMessage(), containsString("Mapping definition for [foo] has unsupported parameters: [norms"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -564,7 +564,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase {
|
|||
mapperService.merge("type", new CompressedXContent(updatedMapping), MapperService.MergeReason.MAPPING_UPDATE, false);
|
||||
fail();
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), containsString("different [omit_norms]"));
|
||||
assertThat(e.getMessage(), containsString("different [norms]"));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -49,7 +49,7 @@ public class UpdateMappingOnClusterIT extends ESIntegTestCase {
|
|||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/update/all_mapping_create_index.json");
|
||||
String mappingUpdate = copyToStringFromClasspath("/org/elasticsearch/index/mapper/update/all_mapping_update_with_conflicts.json");
|
||||
String[] errorMessage = {
|
||||
"[_all] has different [omit_norms] values",
|
||||
"[_all] has different [norms] values",
|
||||
"[_all] has different [store] values",
|
||||
"[_all] has different [store_term_vector] values",
|
||||
"[_all] has different [store_term_vector_offsets] values",
|
||||
|
|
|
@ -156,15 +156,15 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase {
|
|||
|
||||
public void testUpdateMappingWithNormsConflicts() throws Exception {
|
||||
client().admin().indices().prepareCreate("test")
|
||||
.addMapping("type", "{\"type\":{\"properties\":{\"body\":{\"type\":\"text\", \"norms\": { \"enabled\": false }}}}}")
|
||||
.addMapping("type", "{\"type\":{\"properties\":{\"body\":{\"type\":\"text\", \"norms\": false }}}}")
|
||||
.execute().actionGet();
|
||||
try {
|
||||
client().admin().indices().preparePutMapping("test").setType("type")
|
||||
.setSource("{\"type\":{\"properties\":{\"body\":{\"type\":\"text\", \"norms\": { \"enabled\": true }}}}}").execute()
|
||||
.setSource("{\"type\":{\"properties\":{\"body\":{\"type\":\"text\", \"norms\": true }}}}").execute()
|
||||
.actionGet();
|
||||
fail("Expected MergeMappingException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), containsString("mapper [body] has different [omit_norms]"));
|
||||
assertThat(e.getMessage(), containsString("mapper [body] has different [norms]"));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -51,7 +51,6 @@ import org.hamcrest.Matchers;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
|
|
|
@ -156,12 +156,12 @@ public class MultiMatchQueryIT extends ESIntegTestCase {
|
|||
.endObject()
|
||||
.startObject("first_name")
|
||||
.field("type", "text")
|
||||
.field("omit_norms", "true")
|
||||
.field("norms", false)
|
||||
.field("copy_to", "first_name_phrase")
|
||||
.endObject()
|
||||
.startObject("last_name")
|
||||
.field("type", "text")
|
||||
.field("omit_norms", "true")
|
||||
.field("norms", false)
|
||||
.field("copy_to", "last_name_phrase")
|
||||
.endObject()
|
||||
.endObject()
|
||||
|
|
|
@ -117,7 +117,7 @@ public class SearchQueryIT extends ESIntegTestCase {
|
|||
public void testOmitNormsOnAll() throws ExecutionException, InterruptedException, IOException {
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("_all").field("omit_norms", true).endObject()
|
||||
.startObject("_all").field("norms", false).endObject()
|
||||
.endObject().endObject())
|
||||
.setSettings(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)); // only one shard otherwise IDF might be different for comparing scores
|
||||
|
||||
|
|
Binary file not shown.
Binary file not shown.
|
@ -2,7 +2,7 @@
|
|||
"person":{
|
||||
"_all":{
|
||||
"enabled":true,
|
||||
"omit_norms":true
|
||||
"norms":false
|
||||
},
|
||||
"properties":{
|
||||
"name":{
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
"store_term_vector_offsets": true,
|
||||
"store_term_vector_positions": true,
|
||||
"store_term_vector_payloads": true,
|
||||
"omit_norms": true,
|
||||
"norms": false,
|
||||
"analyzer": "standard",
|
||||
"search_analyzer": "whitespace",
|
||||
"similarity": "my_similarity",
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
"store_term_vector_offsets": false,
|
||||
"store_term_vector_positions": false,
|
||||
"store_term_vector_payloads": false,
|
||||
"omit_norms": false,
|
||||
"norms": true,
|
||||
"analyzer": "whitespace",
|
||||
"search_analyzer": "standard",
|
||||
"similarity": "BM25",
|
||||
|
|
|
@ -247,6 +247,25 @@ def generate_index(client, version, index_name):
|
|||
}
|
||||
}
|
||||
|
||||
mappings['norms'] = {
|
||||
'properties': {
|
||||
'string_with_norms_disabled': {
|
||||
'type': 'string',
|
||||
'norms': {
|
||||
'enabled': False
|
||||
}
|
||||
},
|
||||
'string_with_norms_enabled': {
|
||||
'type': 'string',
|
||||
'index': 'not_analyzed',
|
||||
'norms': {
|
||||
'enabled': True,
|
||||
'loading': 'eager'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mappings['doc'] = {
|
||||
'properties': {
|
||||
'string': {
|
||||
|
|
Loading…
Reference in New Issue