mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-17 02:14:54 +00:00
Add per-field metadata. (#50333)
This PR adds per-field metadata that can be set in the mappings and is later returned by the field capabilities API. This metadata is completely opaque to Elasticsearch but may be used by tools that index data in Elasticsearch to communicate metadata about fields with tools that then search this data. A typical example that has been requested in the past is the ability to attach a unit to a numeric field. In order to not bloat the cluster state, Elasticsearch requires that this metadata be small: - keys can't be longer than 20 chars, - values can only be numbers or strings of no more than 50 chars - no inner arrays or objects, - the metadata can't have more than 5 keys in total. Given that metadata is opaque to Elasticsearch, field capabilities don't try to do anything smart when merging metadata about multiple indices, the union of all field metadatas is returned. Here is how the meta might look like in mappings: ```json { "properties": { "latency": { "type": "long", "meta": { "unit": "ms" } } } } ``` And then in the field capabilities response: ```json { "latency": { "long": { "searchable": true, "aggreggatable": true, "meta": { "unit": [ "ms" ] } } } } ``` When there are no conflicts, values are arrays of size 1, but when there are conflicts, Elasticsearch includes all unique values in this array, without giving ways to know which index has which metadata value: ```json { "latency": { "long": { "searchable": true, "aggreggatable": true, "meta": { "unit": [ "ms", "ns" ] } } } } ``` Closes #33267
This commit is contained in:
parent
71054d269b
commit
31158ab3d5
@ -1242,11 +1242,11 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
|
||||
assertEquals(2, ratingResponse.size());
|
||||
|
||||
FieldCapabilities expectedKeywordCapabilities = new FieldCapabilities(
|
||||
"rating", "keyword", true, true, new String[]{"index2"}, null, null);
|
||||
"rating", "keyword", true, true, new String[]{"index2"}, null, null, Collections.emptyMap());
|
||||
assertEquals(expectedKeywordCapabilities, ratingResponse.get("keyword"));
|
||||
|
||||
FieldCapabilities expectedLongCapabilities = new FieldCapabilities(
|
||||
"rating", "long", true, true, new String[]{"index1"}, null, null);
|
||||
"rating", "long", true, true, new String[]{"index1"}, null, null, Collections.emptyMap());
|
||||
assertEquals(expectedLongCapabilities, ratingResponse.get("long"));
|
||||
|
||||
// Check the capabilities for the 'field' field.
|
||||
@ -1255,7 +1255,7 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
|
||||
assertEquals(1, fieldResponse.size());
|
||||
|
||||
FieldCapabilities expectedTextCapabilities = new FieldCapabilities(
|
||||
"field", "text", true, false);
|
||||
"field", "text", true, false, Collections.emptyMap());
|
||||
assertEquals(expectedTextCapabilities, fieldResponse.get("text"));
|
||||
}
|
||||
|
||||
|
@ -8,15 +8,15 @@ parameters that are used by <<mapping-types,field mappings>>:
|
||||
The following mapping parameters are common to some or all field datatypes:
|
||||
|
||||
* <<analyzer,`analyzer`>>
|
||||
* <<normalizer, `normalizer`>>
|
||||
* <<mapping-boost,`boost`>>
|
||||
* <<coerce,`coerce`>>
|
||||
* <<copy-to,`copy_to`>>
|
||||
* <<doc-values,`doc_values`>>
|
||||
* <<dynamic,`dynamic`>>
|
||||
* <<eager-global-ordinals,`eager_global_ordinals`>>
|
||||
* <<enabled,`enabled`>>
|
||||
* <<fielddata,`fielddata`>>
|
||||
* <<eager-global-ordinals,`eager_global_ordinals`>>
|
||||
* <<multi-fields,`fields`>>
|
||||
* <<mapping-date-format,`format`>>
|
||||
* <<ignore-above,`ignore_above`>>
|
||||
* <<ignore-malformed,`ignore_malformed`>>
|
||||
@ -24,7 +24,8 @@ The following mapping parameters are common to some or all field datatypes:
|
||||
* <<index-phrases,`index_phrases`>>
|
||||
* <<index-prefixes,`index_prefixes`>>
|
||||
* <<mapping-index,`index`>>
|
||||
* <<multi-fields,`fields`>>
|
||||
* <<mapping-field-meta,`meta`>>
|
||||
* <<normalizer, `normalizer`>>
|
||||
* <<norms,`norms`>>
|
||||
* <<null-value,`null_value`>>
|
||||
* <<position-increment-gap,`position_increment_gap`>>
|
||||
@ -37,8 +38,6 @@ The following mapping parameters are common to some or all field datatypes:
|
||||
|
||||
include::params/analyzer.asciidoc[]
|
||||
|
||||
include::params/normalizer.asciidoc[]
|
||||
|
||||
include::params/boost.asciidoc[]
|
||||
|
||||
include::params/coerce.asciidoc[]
|
||||
@ -49,10 +48,10 @@ include::params/doc-values.asciidoc[]
|
||||
|
||||
include::params/dynamic.asciidoc[]
|
||||
|
||||
include::params/enabled.asciidoc[]
|
||||
|
||||
include::params/eager-global-ordinals.asciidoc[]
|
||||
|
||||
include::params/enabled.asciidoc[]
|
||||
|
||||
include::params/fielddata.asciidoc[]
|
||||
|
||||
include::params/format.asciidoc[]
|
||||
@ -69,8 +68,12 @@ include::params/index-phrases.asciidoc[]
|
||||
|
||||
include::params/index-prefixes.asciidoc[]
|
||||
|
||||
include::params/meta.asciidoc[]
|
||||
|
||||
include::params/multi-fields.asciidoc[]
|
||||
|
||||
include::params/normalizer.asciidoc[]
|
||||
|
||||
include::params/norms.asciidoc[]
|
||||
|
||||
include::params/null-value.asciidoc[]
|
||||
|
31
docs/reference/mapping/params/meta.asciidoc
Normal file
31
docs/reference/mapping/params/meta.asciidoc
Normal file
@ -0,0 +1,31 @@
|
||||
[[mapping-field-meta]]
|
||||
=== `meta`
|
||||
|
||||
Metadata attached to the field. This metadata is opaque to Elasticsearch, it is
|
||||
only useful for multiple applications that work on the same indices to share
|
||||
meta information about fields such as units
|
||||
|
||||
[source,console]
|
||||
------------
|
||||
PUT my_index
|
||||
{
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"latency": {
|
||||
"type": "long",
|
||||
"meta": {
|
||||
"unit": "ms"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
------------
|
||||
// TEST
|
||||
|
||||
NOTE: Field metadata enforces at most 5 entries, that keys have a length that
|
||||
is less than or equal to 20, and that values are strings whose length is less
|
||||
than or equal to 50.
|
||||
|
||||
NOTE: Field metadata is updatable by submitting a mapping update. The metadata
|
||||
of the update will override the metadata of the existing field.
|
@ -120,3 +120,6 @@ The following parameters are accepted by `boolean` fields:
|
||||
the <<mapping-source-field,`_source`>> field. Accepts `true` or `false`
|
||||
(default).
|
||||
|
||||
<<mapping-field-meta,`meta`>>::
|
||||
|
||||
Metadata about the field.
|
||||
|
@ -137,3 +137,7 @@ The following parameters are accepted by `date` fields:
|
||||
Whether the field value should be stored and retrievable separately from
|
||||
the <<mapping-source-field,`_source`>> field. Accepts `true` or `false`
|
||||
(default).
|
||||
|
||||
<<mapping-field-meta,`meta`>>::
|
||||
|
||||
Metadata about the field.
|
||||
|
@ -115,6 +115,10 @@ The following parameters are accepted by `keyword` fields:
|
||||
when building a query for this field.
|
||||
Accepts `true` or `false` (default).
|
||||
|
||||
<<mapping-field-meta,`meta`>>::
|
||||
|
||||
Metadata about the field.
|
||||
|
||||
NOTE: Indexes imported from 2.x do not support `keyword`. Instead they will
|
||||
attempt to downgrade `keyword` into `string`. This allows you to merge modern
|
||||
mappings with legacy mappings. Long lived indexes will have to be recreated
|
||||
|
@ -149,6 +149,10 @@ The following parameters are accepted by numeric types:
|
||||
the <<mapping-source-field,`_source`>> field. Accepts `true` or `false`
|
||||
(default).
|
||||
|
||||
<<mapping-field-meta,`meta`>>::
|
||||
|
||||
Metadata about the field.
|
||||
|
||||
[[scaled-float-params]]
|
||||
==== Parameters for `scaled_float`
|
||||
|
||||
|
@ -143,3 +143,7 @@ The following parameters are accepted by `text` fields:
|
||||
|
||||
Whether term vectors should be stored for an <<mapping-index,`analyzed`>>
|
||||
field. Defaults to `no`.
|
||||
|
||||
<<mapping-field-meta,`meta`>>::
|
||||
|
||||
Metadata about the field.
|
||||
|
@ -78,6 +78,12 @@ include::{docdir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable]
|
||||
The list of indices where this field is not aggregatable, or null if all
|
||||
indices have the same definition for the field.
|
||||
|
||||
`meta`::
|
||||
Merged metadata across all indices as a map of string keys to arrays of values.
|
||||
A value length of 1 indicates that all indices had the same value for this key,
|
||||
while a length of 2 or more indicates that not all indices had the same value
|
||||
for this key.
|
||||
|
||||
|
||||
[[search-field-caps-api-example]]
|
||||
==== {api-examples-title}
|
||||
|
@ -27,6 +27,7 @@ import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.mapper.MapperService.MergeReason;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
@ -35,6 +36,7 @@ import org.junit.Before;
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
@ -353,4 +355,33 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase {
|
||||
MapperParsingException e = expectThrows(MapperParsingException.class, () -> parser.parse("type", new CompressedXContent(mapping)));
|
||||
assertThat(e.getMessage(), containsString("index_options not allowed in field [foo] of type [scaled_float]"));
|
||||
}
|
||||
|
||||
public void testMeta() throws Exception {
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc")
|
||||
.startObject("properties").startObject("field").field("type", "scaled_float")
|
||||
.field("meta", Collections.singletonMap("foo", "bar"))
|
||||
.field("scaling_factor", 10.0)
|
||||
.endObject().endObject().endObject().endObject());
|
||||
|
||||
DocumentMapper mapper = indexService.mapperService().merge("_doc",
|
||||
new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
|
||||
String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc")
|
||||
.startObject("properties").startObject("field").field("type", "scaled_float")
|
||||
.field("scaling_factor", 10.0)
|
||||
.endObject().endObject().endObject().endObject());
|
||||
mapper = indexService.mapperService().merge("_doc",
|
||||
new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE);
|
||||
assertEquals(mapping2, mapper.mappingSource().toString());
|
||||
|
||||
String mapping3 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc")
|
||||
.startObject("properties").startObject("field").field("type", "scaled_float")
|
||||
.field("meta", Collections.singletonMap("baz", "quux"))
|
||||
.field("scaling_factor", 10.0)
|
||||
.endObject().endObject().endObject().endObject());
|
||||
mapper = indexService.mapperService().merge("_doc",
|
||||
new CompressedXContent(mapping3), MergeReason.MAPPING_UPDATE);
|
||||
assertEquals(mapping3, mapper.mappingSource().toString());
|
||||
}
|
||||
}
|
||||
|
@ -323,4 +323,3 @@ setup:
|
||||
- match: {fields.misc.unmapped.searchable: false}
|
||||
- match: {fields.misc.unmapped.aggregatable: false}
|
||||
- match: {fields.misc.unmapped.indices: ["test2", "test3"]}
|
||||
|
||||
|
@ -0,0 +1,65 @@
|
||||
---
|
||||
"Merge metadata across multiple indices":
|
||||
|
||||
- skip:
|
||||
version: " - 7.5.99"
|
||||
reason: Metadata support was added in 7.6
|
||||
|
||||
- do:
|
||||
indices.create:
|
||||
index: test1
|
||||
body:
|
||||
mappings:
|
||||
properties:
|
||||
latency:
|
||||
type: long
|
||||
meta:
|
||||
unit: ms
|
||||
metric_type: gauge
|
||||
|
||||
- do:
|
||||
indices.create:
|
||||
index: test2
|
||||
body:
|
||||
mappings:
|
||||
properties:
|
||||
latency:
|
||||
type: long
|
||||
meta:
|
||||
unit: ns
|
||||
metric_type: gauge
|
||||
|
||||
- do:
|
||||
indices.create:
|
||||
index: test3
|
||||
|
||||
- do:
|
||||
field_caps:
|
||||
index: test3
|
||||
fields: [latency]
|
||||
|
||||
- is_false: fields.latency.long.meta.unit
|
||||
|
||||
- do:
|
||||
field_caps:
|
||||
index: test1
|
||||
fields: [latency]
|
||||
|
||||
- match: {fields.latency.long.meta.unit: ["ms"]}
|
||||
- match: {fields.latency.long.meta.metric_type: ["gauge"]}
|
||||
|
||||
- do:
|
||||
field_caps:
|
||||
index: test1,test3
|
||||
fields: [latency]
|
||||
|
||||
- match: {fields.latency.long.meta.unit: ["ms"]}
|
||||
- match: {fields.latency.long.meta.metric_type: ["gauge"]}
|
||||
|
||||
- do:
|
||||
field_caps:
|
||||
index: test1,test2,test3
|
||||
fields: [latency]
|
||||
|
||||
- match: {fields.latency.long.meta.unit: ["ms", "ns"]}
|
||||
- match: {fields.latency.long.meta.metric_type: ["gauge"]}
|
@ -91,3 +91,39 @@
|
||||
|
||||
- match: { error.type: "illegal_argument_exception" }
|
||||
- match: { error.reason: "Types cannot be provided in put mapping requests, unless the include_type_name parameter is set to true." }
|
||||
|
||||
---
|
||||
"Update per-field metadata":
|
||||
|
||||
- skip:
|
||||
version: " - 7.5.99"
|
||||
reason: "Per-field meta was introduced in 7.6"
|
||||
|
||||
- do:
|
||||
indices.create:
|
||||
index: test_index
|
||||
body:
|
||||
mappings:
|
||||
properties:
|
||||
foo:
|
||||
type: keyword
|
||||
meta:
|
||||
bar: baz
|
||||
|
||||
- do:
|
||||
indices.put_mapping:
|
||||
index: test_index
|
||||
body:
|
||||
properties:
|
||||
foo:
|
||||
type: keyword
|
||||
meta:
|
||||
baz: quux
|
||||
|
||||
- do:
|
||||
indices.get_mapping:
|
||||
index: test_index
|
||||
|
||||
- is_false: test_index.mappings.properties.foo.meta.bar
|
||||
- match: { test_index.mappings.properties.foo.meta.baz: "quux" }
|
||||
|
||||
|
@ -19,6 +19,7 @@
|
||||
|
||||
package org.elasticsearch.action.fieldcaps;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
@ -34,20 +35,34 @@ import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* Describes the capabilities of a field optionally merged across multiple indices.
|
||||
*/
|
||||
public class FieldCapabilities implements Writeable, ToXContentObject {
|
||||
|
||||
private static final ParseField TYPE_FIELD = new ParseField("type");
|
||||
private static final ParseField SEARCHABLE_FIELD = new ParseField("searchable");
|
||||
private static final ParseField AGGREGATABLE_FIELD = new ParseField("aggregatable");
|
||||
private static final ParseField INDICES_FIELD = new ParseField("indices");
|
||||
private static final ParseField NON_SEARCHABLE_INDICES_FIELD = new ParseField("non_searchable_indices");
|
||||
private static final ParseField NON_AGGREGATABLE_INDICES_FIELD = new ParseField("non_aggregatable_indices");
|
||||
private static final ParseField META_FIELD = new ParseField("meta");
|
||||
|
||||
private static Map<String, Set<String>> mapToMapOfSets(Map<String, String> map) {
|
||||
final Function<Map.Entry<String, String>, String> entryValueFunction = Map.Entry::getValue;
|
||||
return Collections.unmodifiableMap(map.entrySet().stream().collect(
|
||||
Collectors.toMap(Map.Entry::getKey, entryValueFunction.andThen(Arrays::asList).andThen(HashSet::new)
|
||||
.andThen(Collections::unmodifiableSet))));
|
||||
}
|
||||
|
||||
private final String name;
|
||||
private final String type;
|
||||
@ -58,19 +73,23 @@ public class FieldCapabilities implements Writeable, ToXContentObject {
|
||||
private final String[] nonSearchableIndices;
|
||||
private final String[] nonAggregatableIndices;
|
||||
|
||||
private final Map<String, Set<String>> meta;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* Constructor for a single index.
|
||||
* @param name The name of the field.
|
||||
* @param type The type associated with the field.
|
||||
* @param isSearchable Whether this field is indexed for search.
|
||||
* @param isAggregatable Whether this field can be aggregated on.
|
||||
* @param meta Metadata about the field.
|
||||
*/
|
||||
public FieldCapabilities(String name, String type, boolean isSearchable, boolean isAggregatable) {
|
||||
this(name, type, isSearchable, isAggregatable, null, null, null);
|
||||
public FieldCapabilities(String name, String type, boolean isSearchable, boolean isAggregatable,
|
||||
Map<String, String> meta) {
|
||||
this(name, type, isSearchable, isAggregatable, null, null, null, mapToMapOfSets(Objects.requireNonNull(meta)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* Constructor for a set of indices.
|
||||
* @param name The name of the field
|
||||
* @param type The type associated with the field.
|
||||
* @param isSearchable Whether this field is indexed for search.
|
||||
@ -81,12 +100,14 @@ public class FieldCapabilities implements Writeable, ToXContentObject {
|
||||
* or null if the field is searchable in all indices.
|
||||
* @param nonAggregatableIndices The list of indices where this field is not aggregatable,
|
||||
* or null if the field is aggregatable in all indices.
|
||||
* @param meta Merged metadata across indices.
|
||||
*/
|
||||
public FieldCapabilities(String name, String type,
|
||||
boolean isSearchable, boolean isAggregatable,
|
||||
String[] indices,
|
||||
String[] nonSearchableIndices,
|
||||
String[] nonAggregatableIndices) {
|
||||
String[] nonAggregatableIndices,
|
||||
Map<String, Set<String>> meta) {
|
||||
this.name = name;
|
||||
this.type = type;
|
||||
this.isSearchable = isSearchable;
|
||||
@ -94,6 +115,7 @@ public class FieldCapabilities implements Writeable, ToXContentObject {
|
||||
this.indices = indices;
|
||||
this.nonSearchableIndices = nonSearchableIndices;
|
||||
this.nonAggregatableIndices = nonAggregatableIndices;
|
||||
this.meta = Objects.requireNonNull(meta);
|
||||
}
|
||||
|
||||
public FieldCapabilities(StreamInput in) throws IOException {
|
||||
@ -104,6 +126,11 @@ public class FieldCapabilities implements Writeable, ToXContentObject {
|
||||
this.indices = in.readOptionalStringArray();
|
||||
this.nonSearchableIndices = in.readOptionalStringArray();
|
||||
this.nonAggregatableIndices = in.readOptionalStringArray();
|
||||
if (in.getVersion().onOrAfter(Version.V_7_6_0)) {
|
||||
meta = in.readMap(StreamInput::readString, i -> i.readSet(StreamInput::readString));
|
||||
} else {
|
||||
meta = Collections.emptyMap();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -115,6 +142,9 @@ public class FieldCapabilities implements Writeable, ToXContentObject {
|
||||
out.writeOptionalStringArray(indices);
|
||||
out.writeOptionalStringArray(nonSearchableIndices);
|
||||
out.writeOptionalStringArray(nonAggregatableIndices);
|
||||
if (out.getVersion().onOrAfter(Version.V_7_6_0)) {
|
||||
out.writeMap(meta, StreamOutput::writeString, (o, set) -> o.writeCollection(set, StreamOutput::writeString));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -132,6 +162,17 @@ public class FieldCapabilities implements Writeable, ToXContentObject {
|
||||
if (nonAggregatableIndices != null) {
|
||||
builder.field(NON_AGGREGATABLE_INDICES_FIELD.getPreferredName(), nonAggregatableIndices);
|
||||
}
|
||||
if (meta.isEmpty() == false) {
|
||||
builder.startObject("meta");
|
||||
List<Map.Entry<String, Set<String>>> entries = new ArrayList<>(meta.entrySet());
|
||||
entries.sort(Comparator.comparing(Map.Entry::getKey)); // provide predictable order
|
||||
for (Map.Entry<String, Set<String>> entry : entries) {
|
||||
List<String> values = new ArrayList<>(entry.getValue());
|
||||
values.sort(String::compareTo); // provide predictable order
|
||||
builder.field(entry.getKey(), values);
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
@ -150,7 +191,8 @@ public class FieldCapabilities implements Writeable, ToXContentObject {
|
||||
(boolean) a[2],
|
||||
a[3] != null ? ((List<String>) a[3]).toArray(new String[0]) : null,
|
||||
a[4] != null ? ((List<String>) a[4]).toArray(new String[0]) : null,
|
||||
a[5] != null ? ((List<String>) a[5]).toArray(new String[0]) : null));
|
||||
a[5] != null ? ((List<String>) a[5]).toArray(new String[0]) : null,
|
||||
a[6] != null ? ((Map<String, Set<String>>) a[6]) : Collections.emptyMap()));
|
||||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), TYPE_FIELD);
|
||||
@ -159,6 +201,8 @@ public class FieldCapabilities implements Writeable, ToXContentObject {
|
||||
PARSER.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), INDICES_FIELD);
|
||||
PARSER.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), NON_SEARCHABLE_INDICES_FIELD);
|
||||
PARSER.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), NON_AGGREGATABLE_INDICES_FIELD);
|
||||
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(),
|
||||
(parser, context) -> parser.map(HashMap::new, p -> Collections.unmodifiableSet(new HashSet<>(p.list()))), META_FIELD);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -213,6 +257,13 @@ public class FieldCapabilities implements Writeable, ToXContentObject {
|
||||
return nonAggregatableIndices;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return merged metadata across indices.
|
||||
*/
|
||||
public Map<String, Set<String>> meta() {
|
||||
return meta;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
@ -224,12 +275,13 @@ public class FieldCapabilities implements Writeable, ToXContentObject {
|
||||
Objects.equals(type, that.type) &&
|
||||
Arrays.equals(indices, that.indices) &&
|
||||
Arrays.equals(nonSearchableIndices, that.nonSearchableIndices) &&
|
||||
Arrays.equals(nonAggregatableIndices, that.nonAggregatableIndices);
|
||||
Arrays.equals(nonAggregatableIndices, that.nonAggregatableIndices) &&
|
||||
Objects.equals(meta, that.meta);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = Objects.hash(name, type, isSearchable, isAggregatable);
|
||||
int result = Objects.hash(name, type, isSearchable, isAggregatable, meta);
|
||||
result = 31 * result + Arrays.hashCode(indices);
|
||||
result = 31 * result + Arrays.hashCode(nonSearchableIndices);
|
||||
result = 31 * result + Arrays.hashCode(nonAggregatableIndices);
|
||||
@ -247,6 +299,7 @@ public class FieldCapabilities implements Writeable, ToXContentObject {
|
||||
private boolean isSearchable;
|
||||
private boolean isAggregatable;
|
||||
private List<IndexCaps> indiceList;
|
||||
private Map<String, Set<String>> meta;
|
||||
|
||||
Builder(String name, String type) {
|
||||
this.name = name;
|
||||
@ -254,15 +307,38 @@ public class FieldCapabilities implements Writeable, ToXContentObject {
|
||||
this.isSearchable = true;
|
||||
this.isAggregatable = true;
|
||||
this.indiceList = new ArrayList<>();
|
||||
this.meta = new HashMap<>();
|
||||
}
|
||||
|
||||
void add(String index, boolean search, boolean agg) {
|
||||
private void add(String index, boolean search, boolean agg) {
|
||||
IndexCaps indexCaps = new IndexCaps(index, search, agg);
|
||||
indiceList.add(indexCaps);
|
||||
this.isSearchable &= search;
|
||||
this.isAggregatable &= agg;
|
||||
}
|
||||
|
||||
/**
|
||||
* Collect capabilities of an index.
|
||||
*/
|
||||
void add(String index, boolean search, boolean agg, Map<String, String> meta) {
|
||||
add(index, search, agg);
|
||||
for (Map.Entry<String, String> entry : meta.entrySet()) {
|
||||
this.meta.computeIfAbsent(entry.getKey(), key -> new HashSet<>())
|
||||
.add(entry.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge another capabilities instance.
|
||||
*/
|
||||
void merge(String index, boolean search, boolean agg, Map<String, Set<String>> meta) {
|
||||
add(index, search, agg);
|
||||
for (Map.Entry<String, Set<String>> entry : meta.entrySet()) {
|
||||
this.meta.computeIfAbsent(entry.getKey(), key -> new HashSet<>())
|
||||
.addAll(entry.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
List<String> getIndices() {
|
||||
return indiceList.stream().map(c -> c.name).collect(Collectors.toList());
|
||||
}
|
||||
@ -305,8 +381,12 @@ public class FieldCapabilities implements Writeable, ToXContentObject {
|
||||
} else {
|
||||
nonAggregatableIndices = null;
|
||||
}
|
||||
final Function<Map.Entry<String, Set<String>>, Set<String>> entryValueFunction = Map.Entry::getValue;
|
||||
Map<String, Set<String>> immutableMeta = Collections.unmodifiableMap(meta.entrySet().stream()
|
||||
.collect(Collectors.toMap(
|
||||
Map.Entry::getKey, entryValueFunction.andThen(HashSet::new).andThen(Collections::unmodifiableSet))));
|
||||
return new FieldCapabilities(name, type, isSearchable, isAggregatable,
|
||||
indices, nonSearchableIndices, nonAggregatableIndices);
|
||||
indices, nonSearchableIndices, nonAggregatableIndices, immutableMeta);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -175,7 +175,7 @@ public class TransportFieldCapabilitiesAction extends HandledTransportAction<Fie
|
||||
FieldCapabilities.Builder unmapped = new FieldCapabilities.Builder(field, "unmapped");
|
||||
typeMap.put("unmapped", unmapped);
|
||||
for (String index : unmappedIndices) {
|
||||
unmapped.add(index, false, false);
|
||||
unmapped.add(index, false, false, Collections.emptyMap());
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -188,7 +188,7 @@ public class TransportFieldCapabilitiesAction extends HandledTransportAction<Fie
|
||||
Map<String, FieldCapabilities.Builder> typeMap = responseMapBuilder.computeIfAbsent(field, f -> new HashMap<>());
|
||||
FieldCapabilities.Builder builder = typeMap.computeIfAbsent(fieldCap.getType(),
|
||||
key -> new FieldCapabilities.Builder(field, key));
|
||||
builder.add(indexName, fieldCap.isSearchable(), fieldCap.isAggregatable());
|
||||
builder.merge(indexName, fieldCap.isSearchable(), fieldCap.isAggregatable(), fieldCap.meta());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -37,6 +37,7 @@ import org.elasticsearch.indices.IndicesService;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
@ -86,7 +87,8 @@ public class TransportFieldCapabilitiesIndexAction extends TransportSingleShardA
|
||||
if (ft != null) {
|
||||
if (indicesService.isMetaDataField(mapperService.getIndexSettings().getIndexVersionCreated(), field)
|
||||
|| fieldPredicate.test(ft.name())) {
|
||||
FieldCapabilities fieldCap = new FieldCapabilities(field, ft.typeName(), ft.isSearchable(), ft.isAggregatable());
|
||||
FieldCapabilities fieldCap = new FieldCapabilities(field, ft.typeName(), ft.isSearchable(), ft.isAggregatable(),
|
||||
ft.meta());
|
||||
responseMap.put(field, fieldCap);
|
||||
} else {
|
||||
continue;
|
||||
@ -104,7 +106,7 @@ public class TransportFieldCapabilitiesIndexAction extends TransportSingleShardA
|
||||
// no field type, it must be an object field
|
||||
ObjectMapper mapper = mapperService.getObjectMapper(parentField);
|
||||
String type = mapper.nested().isNested() ? "nested" : "object";
|
||||
FieldCapabilities fieldCap = new FieldCapabilities(parentField, type, false, false);
|
||||
FieldCapabilities fieldCap = new FieldCapabilities(parentField, type, false, false, Collections.emptyMap());
|
||||
responseMap.put(parentField, fieldCap);
|
||||
}
|
||||
dotIndex = parentField.lastIndexOf('.');
|
||||
|
@ -50,6 +50,7 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.HashMap;
|
||||
import java.util.Objects;
|
||||
import java.util.TreeMap;
|
||||
import java.util.stream.StreamSupport;
|
||||
|
||||
public abstract class FieldMapper extends Mapper implements Cloneable {
|
||||
@ -223,6 +224,12 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
||||
fieldType.setHasDocValues(defaultDocValues);
|
||||
}
|
||||
}
|
||||
|
||||
/** Set metadata on this field. */
|
||||
public T meta(Map<String, String> meta) {
|
||||
fieldType.setMeta(meta);
|
||||
return (T) this;
|
||||
}
|
||||
}
|
||||
|
||||
protected final Version indexCreatedVersion;
|
||||
@ -427,6 +434,10 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
||||
|
||||
multiFields.toXContent(builder, params);
|
||||
copyTo.toXContent(builder, params);
|
||||
|
||||
if (includeDefaults || fieldType().meta().isEmpty() == false) {
|
||||
builder.field("meta", new TreeMap<>(fieldType().meta())); // ensure consistent order
|
||||
}
|
||||
}
|
||||
|
||||
protected final void doXContentAnalyzers(XContentBuilder builder, boolean includeDefaults) throws IOException {
|
||||
|
@ -53,7 +53,10 @@ import org.elasticsearch.search.DocValueFormat;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.ZoneId;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
@ -72,6 +75,7 @@ public abstract class MappedFieldType extends FieldType {
|
||||
private Object nullValue;
|
||||
private String nullValueAsString; // for sending null value to _all field
|
||||
private boolean eagerGlobalOrdinals;
|
||||
private Map<String, String> meta;
|
||||
|
||||
protected MappedFieldType(MappedFieldType ref) {
|
||||
super(ref);
|
||||
@ -85,6 +89,7 @@ public abstract class MappedFieldType extends FieldType {
|
||||
this.nullValue = ref.nullValue();
|
||||
this.nullValueAsString = ref.nullValueAsString();
|
||||
this.eagerGlobalOrdinals = ref.eagerGlobalOrdinals;
|
||||
this.meta = ref.meta;
|
||||
}
|
||||
|
||||
public MappedFieldType() {
|
||||
@ -94,6 +99,7 @@ public abstract class MappedFieldType extends FieldType {
|
||||
setOmitNorms(false);
|
||||
setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
|
||||
setBoost(1.0f);
|
||||
meta = Collections.emptyMap();
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -126,13 +132,14 @@ public abstract class MappedFieldType extends FieldType {
|
||||
Objects.equals(eagerGlobalOrdinals, fieldType.eagerGlobalOrdinals) &&
|
||||
Objects.equals(nullValue, fieldType.nullValue) &&
|
||||
Objects.equals(nullValueAsString, fieldType.nullValueAsString) &&
|
||||
Objects.equals(similarity, fieldType.similarity);
|
||||
Objects.equals(similarity, fieldType.similarity) &&
|
||||
Objects.equals(meta, fieldType.meta);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(super.hashCode(), name, boost, docValues, indexAnalyzer, searchAnalyzer, searchQuoteAnalyzer,
|
||||
eagerGlobalOrdinals, similarity == null ? null : similarity.name(), nullValue, nullValueAsString);
|
||||
eagerGlobalOrdinals, similarity == null ? null : similarity.name(), nullValue, nullValueAsString, meta);
|
||||
}
|
||||
|
||||
// TODO: we need to override freeze() and add safety checks that all settings are actually set
|
||||
@ -490,4 +497,18 @@ public abstract class MappedFieldType extends FieldType {
|
||||
return ((TermQuery) termQuery).getTerm();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the metadata associated with this field.
|
||||
*/
|
||||
public Map<String, String> meta() {
|
||||
return meta;
|
||||
}
|
||||
|
||||
/**
|
||||
* Associate metadata with this field.
|
||||
*/
|
||||
public void setMeta(Map<String, String> meta) {
|
||||
checkIfFrozen();
|
||||
this.meta = Collections.unmodifiableMap(new HashMap<>(Objects.requireNonNull(meta)));
|
||||
}
|
||||
}
|
||||
|
@ -33,6 +33,8 @@ import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.support.XContentMapValues.isArray;
|
||||
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeFloatValue;
|
||||
@ -143,7 +145,7 @@ public class TypeParsers {
|
||||
}
|
||||
}
|
||||
|
||||
public static void parseNorms(FieldMapper.Builder builder, String fieldName, Object propNode) {
|
||||
public static void parseNorms(FieldMapper.Builder<?,?> builder, String fieldName, Object propNode) {
|
||||
builder.omitNorms(XContentMapValues.nodeBooleanValue(propNode, fieldName + ".norms") == false);
|
||||
}
|
||||
|
||||
@ -151,7 +153,7 @@ public class TypeParsers {
|
||||
* Parse text field attributes. In addition to {@link #parseField common attributes}
|
||||
* this will parse analysis and term-vectors related settings.
|
||||
*/
|
||||
public static void parseTextField(FieldMapper.Builder builder, String name, Map<String, Object> fieldNode,
|
||||
public static void parseTextField(FieldMapper.Builder<?,?> builder, String name, Map<String, Object> fieldNode,
|
||||
Mapper.TypeParser.ParserContext parserContext) {
|
||||
parseField(builder, name, fieldNode, parserContext);
|
||||
parseAnalyzersAndTermVectors(builder, name, fieldNode, parserContext);
|
||||
@ -166,11 +168,58 @@ public class TypeParsers {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the {@code meta} key of the mapping.
|
||||
*/
|
||||
public static void parseMeta(FieldMapper.Builder<?,?> builder, String name, Map<String, Object> fieldNode) {
|
||||
Object metaObject = fieldNode.remove("meta");
|
||||
if (metaObject == null) {
|
||||
// no meta
|
||||
return;
|
||||
}
|
||||
if (metaObject instanceof Map == false) {
|
||||
throw new MapperParsingException("[meta] must be an object, got " + metaObject.getClass().getSimpleName() +
|
||||
"[" + metaObject + "] for field [" + name +"]");
|
||||
}
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, ?> meta = (Map<String, ?>) metaObject;
|
||||
if (meta.size() > 5) {
|
||||
throw new MapperParsingException("[meta] can't have more than 5 entries, but got " + meta.size() + " on field [" +
|
||||
name + "]");
|
||||
}
|
||||
for (String key : meta.keySet()) {
|
||||
if (key.codePointCount(0, key.length()) > 20) {
|
||||
throw new MapperParsingException("[meta] keys can't be longer than 20 chars, but got [" + key +
|
||||
"] for field [" + name + "]");
|
||||
}
|
||||
}
|
||||
for (Object value : meta.values()) {
|
||||
if (value instanceof String) {
|
||||
String sValue = (String) value;
|
||||
if (sValue.codePointCount(0, sValue.length()) > 50) {
|
||||
throw new MapperParsingException("[meta] values can't be longer than 50 chars, but got [" + value +
|
||||
"] for field [" + name + "]");
|
||||
}
|
||||
} else if (value == null) {
|
||||
throw new MapperParsingException("[meta] values can't be null (field [" + name + "])");
|
||||
} else {
|
||||
throw new MapperParsingException("[meta] values can only be strings, but got " +
|
||||
value.getClass().getSimpleName() + "[" + value + "] for field [" + name + "]");
|
||||
}
|
||||
}
|
||||
final Function<Map.Entry<String, ?>, Object> entryValueFunction = Map.Entry::getValue;
|
||||
final Function<Object, String> stringCast = String.class::cast;
|
||||
Map<String, String> checkedMeta = Collections.unmodifiableMap(meta.entrySet().stream()
|
||||
.collect(Collectors.toMap(Map.Entry::getKey, entryValueFunction.andThen(stringCast))));
|
||||
builder.meta(checkedMeta);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse common field attributes such as {@code doc_values} or {@code store}.
|
||||
*/
|
||||
public static void parseField(FieldMapper.Builder builder, String name, Map<String, Object> fieldNode,
|
||||
public static void parseField(FieldMapper.Builder<?,?> builder, String name, Map<String, Object> fieldNode,
|
||||
Mapper.TypeParser.ParserContext parserContext) {
|
||||
parseMeta(builder, name, fieldNode);
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = fieldNode.entrySet().iterator(); iterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
final String propName = entry.getKey();
|
||||
|
@ -19,12 +19,17 @@
|
||||
|
||||
package org.elasticsearch.action.fieldcaps;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractSerializingTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
@ -48,9 +53,9 @@ public class FieldCapabilitiesTests extends AbstractSerializingTestCase<FieldCap
|
||||
|
||||
public void testBuilder() {
|
||||
FieldCapabilities.Builder builder = new FieldCapabilities.Builder("field", "type");
|
||||
builder.add("index1", true, false);
|
||||
builder.add("index2", true, false);
|
||||
builder.add("index3", true, false);
|
||||
builder.add("index1", true, false, Collections.emptyMap());
|
||||
builder.add("index2", true, false, Collections.emptyMap());
|
||||
builder.add("index3", true, false, Collections.emptyMap());
|
||||
|
||||
{
|
||||
FieldCapabilities cap1 = builder.build(false);
|
||||
@ -59,6 +64,7 @@ public class FieldCapabilitiesTests extends AbstractSerializingTestCase<FieldCap
|
||||
assertNull(cap1.indices());
|
||||
assertNull(cap1.nonSearchableIndices());
|
||||
assertNull(cap1.nonAggregatableIndices());
|
||||
assertEquals(Collections.emptyMap(), cap1.meta());
|
||||
|
||||
FieldCapabilities cap2 = builder.build(true);
|
||||
assertThat(cap2.isSearchable(), equalTo(true));
|
||||
@ -67,12 +73,13 @@ public class FieldCapabilitiesTests extends AbstractSerializingTestCase<FieldCap
|
||||
assertThat(cap2.indices(), equalTo(new String[]{"index1", "index2", "index3"}));
|
||||
assertNull(cap2.nonSearchableIndices());
|
||||
assertNull(cap2.nonAggregatableIndices());
|
||||
assertEquals(Collections.emptyMap(), cap2.meta());
|
||||
}
|
||||
|
||||
builder = new FieldCapabilities.Builder("field", "type");
|
||||
builder.add("index1", false, true);
|
||||
builder.add("index2", true, false);
|
||||
builder.add("index3", false, false);
|
||||
builder.add("index1", false, true, Collections.emptyMap());
|
||||
builder.add("index2", true, false, Collections.emptyMap());
|
||||
builder.add("index3", false, false, Collections.emptyMap());
|
||||
{
|
||||
FieldCapabilities cap1 = builder.build(false);
|
||||
assertThat(cap1.isSearchable(), equalTo(false));
|
||||
@ -80,6 +87,7 @@ public class FieldCapabilitiesTests extends AbstractSerializingTestCase<FieldCap
|
||||
assertNull(cap1.indices());
|
||||
assertThat(cap1.nonSearchableIndices(), equalTo(new String[]{"index1", "index3"}));
|
||||
assertThat(cap1.nonAggregatableIndices(), equalTo(new String[]{"index2", "index3"}));
|
||||
assertEquals(Collections.emptyMap(), cap1.meta());
|
||||
|
||||
FieldCapabilities cap2 = builder.build(true);
|
||||
assertThat(cap2.isSearchable(), equalTo(false));
|
||||
@ -88,6 +96,30 @@ public class FieldCapabilitiesTests extends AbstractSerializingTestCase<FieldCap
|
||||
assertThat(cap2.indices(), equalTo(new String[]{"index1", "index2", "index3"}));
|
||||
assertThat(cap2.nonSearchableIndices(), equalTo(new String[]{"index1", "index3"}));
|
||||
assertThat(cap2.nonAggregatableIndices(), equalTo(new String[]{"index2", "index3"}));
|
||||
assertEquals(Collections.emptyMap(), cap2.meta());
|
||||
}
|
||||
|
||||
builder = new FieldCapabilities.Builder("field", "type");
|
||||
builder.add("index1", true, true, Collections.emptyMap());
|
||||
builder.add("index2", true, true, ImmutableMap.of("foo", "bar"));
|
||||
builder.add("index3", true, true, ImmutableMap.of("foo", "quux"));
|
||||
{
|
||||
FieldCapabilities cap1 = builder.build(false);
|
||||
assertThat(cap1.isSearchable(), equalTo(true));
|
||||
assertThat(cap1.isAggregatable(), equalTo(true));
|
||||
assertNull(cap1.indices());
|
||||
assertNull(cap1.nonSearchableIndices());
|
||||
assertNull(cap1.nonAggregatableIndices());
|
||||
assertEquals(ImmutableMap.of("foo", ImmutableSet.of("bar", "quux")), cap1.meta());
|
||||
|
||||
FieldCapabilities cap2 = builder.build(true);
|
||||
assertThat(cap2.isSearchable(), equalTo(true));
|
||||
assertThat(cap2.isAggregatable(), equalTo(true));
|
||||
assertThat(cap2.indices().length, equalTo(3));
|
||||
assertThat(cap2.indices(), equalTo(new String[]{"index1", "index2", "index3"}));
|
||||
assertNull(cap2.nonSearchableIndices());
|
||||
assertNull(cap2.nonAggregatableIndices());
|
||||
assertEquals(ImmutableMap.of("foo", ImmutableSet.of("bar", "quux")), cap2.meta());
|
||||
}
|
||||
}
|
||||
|
||||
@ -113,9 +145,23 @@ public class FieldCapabilitiesTests extends AbstractSerializingTestCase<FieldCap
|
||||
nonAggregatableIndices[i] = randomAlphaOfLengthBetween(5, 20);
|
||||
}
|
||||
}
|
||||
|
||||
Map<String, Set<String>> meta;
|
||||
switch (randomInt(2)) {
|
||||
case 0:
|
||||
meta = Collections.emptyMap();
|
||||
break;
|
||||
case 1:
|
||||
meta = ImmutableMap.of("foo", ImmutableSet.of("bar"));
|
||||
break;
|
||||
default:
|
||||
meta = ImmutableMap.of("foo", ImmutableSet.of("bar", "baz"));
|
||||
break;
|
||||
}
|
||||
|
||||
return new FieldCapabilities(fieldName,
|
||||
randomAlphaOfLengthBetween(5, 20), randomBoolean(), randomBoolean(),
|
||||
indices, nonSearchableIndices, nonAggregatableIndices);
|
||||
indices, nonSearchableIndices, nonAggregatableIndices, meta);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -127,7 +173,8 @@ public class FieldCapabilitiesTests extends AbstractSerializingTestCase<FieldCap
|
||||
String[] indices = instance.indices();
|
||||
String[] nonSearchableIndices = instance.nonSearchableIndices();
|
||||
String[] nonAggregatableIndices = instance.nonAggregatableIndices();
|
||||
switch (between(0, 6)) {
|
||||
Map<String, Set<String>> meta = instance.meta();
|
||||
switch (between(0, 7)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLengthBetween(1, 10);
|
||||
break;
|
||||
@ -169,7 +216,6 @@ public class FieldCapabilitiesTests extends AbstractSerializingTestCase<FieldCap
|
||||
nonSearchableIndices = newNonSearchableIndices;
|
||||
break;
|
||||
case 6:
|
||||
default:
|
||||
String[] newNonAggregatableIndices;
|
||||
int startNonAggregatablePos = 0;
|
||||
if (nonAggregatableIndices == null) {
|
||||
@ -183,7 +229,18 @@ public class FieldCapabilitiesTests extends AbstractSerializingTestCase<FieldCap
|
||||
}
|
||||
nonAggregatableIndices = newNonAggregatableIndices;
|
||||
break;
|
||||
case 7:
|
||||
Map<String, Set<String>> newMeta;
|
||||
if (meta.isEmpty()) {
|
||||
newMeta = ImmutableMap.of("foo", ImmutableSet.of("bar"));
|
||||
} else {
|
||||
newMeta = Collections.emptyMap();
|
||||
}
|
||||
meta = newMeta;
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError();
|
||||
}
|
||||
return new FieldCapabilities(name, type, isSearchable, isAggregatable, indices, nonSearchableIndices, nonAggregatableIndices);
|
||||
return new FieldCapabilities(name, type, isSearchable, isAggregatable, indices, nonSearchableIndices, nonAggregatableIndices, meta);
|
||||
}
|
||||
}
|
||||
|
@ -152,19 +152,19 @@ public class MergedFieldCapabilitiesResponseTests extends AbstractSerializingTes
|
||||
|
||||
private static FieldCapabilitiesResponse createSimpleResponse() {
|
||||
Map<String, FieldCapabilities> titleCapabilities = new HashMap<>();
|
||||
titleCapabilities.put("text", new FieldCapabilities("title", "text", true, false));
|
||||
titleCapabilities.put("text", new FieldCapabilities("title", "text", true, false, Collections.emptyMap()));
|
||||
|
||||
Map<String, FieldCapabilities> ratingCapabilities = new HashMap<>();
|
||||
ratingCapabilities.put("long", new FieldCapabilities("rating", "long",
|
||||
true, false,
|
||||
new String[]{"index1", "index2"},
|
||||
null,
|
||||
new String[]{"index1"}));
|
||||
new String[]{"index1"}, Collections.emptyMap()));
|
||||
ratingCapabilities.put("keyword", new FieldCapabilities("rating", "keyword",
|
||||
false, true,
|
||||
new String[]{"index3", "index4"},
|
||||
new String[]{"index4"},
|
||||
null));
|
||||
null, Collections.emptyMap()));
|
||||
|
||||
Map<String, Map<String, FieldCapabilities>> responses = new HashMap<>();
|
||||
responses.put("title", titleCapabilities);
|
||||
|
@ -38,6 +38,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.mapper.MapperService.MergeReason;
|
||||
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
@ -46,6 +47,7 @@ import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
||||
@ -251,4 +253,30 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase {
|
||||
);
|
||||
assertThat(e.getMessage(), containsString("name cannot be empty string"));
|
||||
}
|
||||
|
||||
public void testMeta() throws Exception {
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc")
|
||||
.startObject("properties").startObject("field").field("type", "boolean")
|
||||
.field("meta", Collections.singletonMap("foo", "bar"))
|
||||
.endObject().endObject().endObject().endObject());
|
||||
|
||||
DocumentMapper mapper = indexService.mapperService().merge("_doc",
|
||||
new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
|
||||
String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc")
|
||||
.startObject("properties").startObject("field").field("type", "boolean")
|
||||
.endObject().endObject().endObject().endObject());
|
||||
mapper = indexService.mapperService().merge("_doc",
|
||||
new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE);
|
||||
assertEquals(mapping2, mapper.mappingSource().toString());
|
||||
|
||||
String mapping3 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc")
|
||||
.startObject("properties").startObject("field").field("type", "boolean")
|
||||
.field("meta", Collections.singletonMap("baz", "quux"))
|
||||
.endObject().endObject().endObject().endObject());
|
||||
mapper = indexService.mapperService().merge("_doc",
|
||||
new CompressedXContent(mapping3), MergeReason.MAPPING_UPDATE);
|
||||
assertEquals(mapping3, mapper.mappingSource().toString());
|
||||
}
|
||||
}
|
||||
|
@ -29,6 +29,7 @@ import org.elasticsearch.common.time.DateFormatter;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.mapper.MapperService.MergeReason;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
@ -39,6 +40,7 @@ import java.time.ZoneId;
|
||||
import java.time.ZoneOffset;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
@ -415,4 +417,30 @@ public class DateFieldMapperTests extends ESSingleNodeTestCase {
|
||||
() -> parser.parse("type", new CompressedXContent(mapping)));
|
||||
assertEquals("Invalid format: [[test_format]]: Unknown pattern letter: t", e.getMessage());
|
||||
}
|
||||
|
||||
public void testMeta() throws Exception {
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc")
|
||||
.startObject("properties").startObject("field").field("type", "date")
|
||||
.field("meta", Collections.singletonMap("foo", "bar"))
|
||||
.endObject().endObject().endObject().endObject());
|
||||
|
||||
DocumentMapper mapper = indexService.mapperService().merge("_doc",
|
||||
new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
|
||||
String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc")
|
||||
.startObject("properties").startObject("field").field("type", "date")
|
||||
.endObject().endObject().endObject().endObject());
|
||||
mapper = indexService.mapperService().merge("_doc",
|
||||
new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE);
|
||||
assertEquals(mapping2, mapper.mappingSource().toString());
|
||||
|
||||
String mapping3 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc")
|
||||
.startObject("properties").startObject("field").field("type", "date")
|
||||
.field("meta", Collections.singletonMap("baz", "quux"))
|
||||
.endObject().endObject().endObject().endObject());
|
||||
mapper = indexService.mapperService().merge("_doc",
|
||||
new CompressedXContent(mapping3), MergeReason.MAPPING_UPDATE);
|
||||
assertEquals(mapping3, mapper.mappingSource().toString());
|
||||
}
|
||||
}
|
||||
|
@ -46,6 +46,7 @@ import org.junit.Before;
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
@ -547,4 +548,30 @@ public class KeywordFieldMapperTests extends ESSingleNodeTestCase {
|
||||
assertThat(ft.searchAnalyzer().name(), equalTo("my_lowercase"));
|
||||
assertTokenStreamContents(ft.searchAnalyzer().analyzer().tokenStream("", "Hello World"), new String[] {"hello world"});
|
||||
}
|
||||
|
||||
public void testMeta() throws Exception {
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc")
|
||||
.startObject("properties").startObject("field").field("type", "keyword")
|
||||
.field("meta", Collections.singletonMap("foo", "bar"))
|
||||
.endObject().endObject().endObject().endObject());
|
||||
|
||||
DocumentMapper mapper = indexService.mapperService().merge("_doc",
|
||||
new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
|
||||
String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc")
|
||||
.startObject("properties").startObject("field").field("type", "keyword")
|
||||
.endObject().endObject().endObject().endObject());
|
||||
mapper = indexService.mapperService().merge("_doc",
|
||||
new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE);
|
||||
assertEquals(mapping2, mapper.mappingSource().toString());
|
||||
|
||||
String mapping3 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc")
|
||||
.startObject("properties").startObject("field").field("type", "keyword")
|
||||
.field("meta", Collections.singletonMap("baz", "quux"))
|
||||
.endObject().endObject().endObject().endObject());
|
||||
mapper = indexService.mapperService().merge("_doc",
|
||||
new CompressedXContent(mapping3), MergeReason.MAPPING_UPDATE);
|
||||
assertEquals(mapping3, mapper.mappingSource().toString());
|
||||
}
|
||||
}
|
||||
|
@ -1260,4 +1260,30 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase {
|
||||
assertThat(mapper.mappers().getMapper("b_field"), instanceOf(KeywordFieldMapper.class));
|
||||
}
|
||||
}
|
||||
|
||||
public void testMeta() throws Exception {
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc")
|
||||
.startObject("properties").startObject("field").field("type", "text")
|
||||
.field("meta", Collections.singletonMap("foo", "bar"))
|
||||
.endObject().endObject().endObject().endObject());
|
||||
|
||||
DocumentMapper mapper = indexService.mapperService().merge("_doc",
|
||||
new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
|
||||
String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc")
|
||||
.startObject("properties").startObject("field").field("type", "text")
|
||||
.endObject().endObject().endObject().endObject());
|
||||
mapper = indexService.mapperService().merge("_doc",
|
||||
new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE);
|
||||
assertEquals(mapping2, mapper.mappingSource().toString());
|
||||
|
||||
String mapping3 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc")
|
||||
.startObject("properties").startObject("field").field("type", "text")
|
||||
.field("meta", Collections.singletonMap("baz", "quux"))
|
||||
.endObject().endObject().endObject().endObject());
|
||||
mapper = indexService.mapperService().merge("_doc",
|
||||
new CompressedXContent(mapping3), MergeReason.MAPPING_UPDATE);
|
||||
assertEquals(mapping3, mapper.mappingSource().toString());
|
||||
}
|
||||
}
|
||||
|
@ -19,6 +19,7 @@
|
||||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||
@ -39,11 +40,14 @@ import org.elasticsearch.index.analysis.IndexAnalyzers;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.analysis.TokenFilterFactory;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.hamcrest.Matchers;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.stream.IntStream;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.index.analysis.AnalysisRegistry.DEFAULT_ANALYZER_NAME;
|
||||
import static org.elasticsearch.index.analysis.AnalysisRegistry.DEFAULT_SEARCH_ANALYZER_NAME;
|
||||
@ -209,4 +213,75 @@ public class TypeParsersTests extends ESTestCase {
|
||||
return new CustomAnalyzer(null, new CharFilterFactory[0],
|
||||
new TokenFilterFactory[] { tokenFilter });
|
||||
}
|
||||
|
||||
public void testParseMeta() {
|
||||
FieldMapper.Builder<?, ?> builder = new KeywordFieldMapper.Builder("foo");
|
||||
Mapper.TypeParser.ParserContext parserContext = new Mapper.TypeParser.ParserContext(null, null, null, null, null);
|
||||
|
||||
{
|
||||
Map<String, Object> mapping = new HashMap<>(ImmutableMap.of("meta", 3));
|
||||
MapperParsingException e = expectThrows(MapperParsingException.class,
|
||||
() -> TypeParsers.parseField(builder, builder.name, mapping, parserContext));
|
||||
assertEquals("[meta] must be an object, got Integer[3] for field [foo]", e.getMessage());
|
||||
}
|
||||
|
||||
{
|
||||
Map<String, Object> mapping = new HashMap<>(ImmutableMap.of("meta", ImmutableMap.of("veryloooooooooooongkey", 3L)));
|
||||
MapperParsingException e = expectThrows(MapperParsingException.class,
|
||||
() -> TypeParsers.parseField(builder, builder.name, mapping, parserContext));
|
||||
assertEquals("[meta] keys can't be longer than 20 chars, but got [veryloooooooooooongkey] for field [foo]",
|
||||
e.getMessage());
|
||||
}
|
||||
|
||||
{
|
||||
Map<String, String> meta = new HashMap<>();
|
||||
meta.put("foo1", "3");
|
||||
meta.put("foo2", "3");
|
||||
meta.put("foo3", "3");
|
||||
meta.put("foo4", "3");
|
||||
meta.put("foo5", "3");
|
||||
meta.put("foo6", "3");
|
||||
Map<String, Object> mapping = new HashMap<>(ImmutableMap.of("meta", meta));
|
||||
MapperParsingException e = expectThrows(MapperParsingException.class,
|
||||
() -> TypeParsers.parseField(builder, builder.name, mapping, parserContext));
|
||||
assertEquals("[meta] can't have more than 5 entries, but got 6 on field [foo]",
|
||||
e.getMessage());
|
||||
}
|
||||
|
||||
{
|
||||
Map<String, Object> mapping = new HashMap<>(ImmutableMap.of("meta", ImmutableMap.of("foo", ImmutableMap.of("bar", "baz"))));
|
||||
MapperParsingException e = expectThrows(MapperParsingException.class,
|
||||
() -> TypeParsers.parseField(builder, builder.name, mapping, parserContext));
|
||||
assertEquals("[meta] values can only be strings, but got SingletonImmutableBiMap[{bar=baz}] for field [foo]",
|
||||
e.getMessage());
|
||||
}
|
||||
|
||||
{
|
||||
Map<String, Object> mapping = new HashMap<>(ImmutableMap.of("meta", ImmutableMap.of("bar", "baz", "foo", 3)));
|
||||
MapperParsingException e = expectThrows(MapperParsingException.class,
|
||||
() -> TypeParsers.parseField(builder, builder.name, mapping, parserContext));
|
||||
assertEquals("[meta] values can only be strings, but got Integer[3] for field [foo]",
|
||||
e.getMessage());
|
||||
}
|
||||
|
||||
{
|
||||
Map<String, String> meta = new HashMap<>();
|
||||
meta.put("foo", null);
|
||||
Map<String, Object> mapping = new HashMap<>(ImmutableMap.of("meta", meta));
|
||||
MapperParsingException e = expectThrows(MapperParsingException.class,
|
||||
() -> TypeParsers.parseField(builder, builder.name, mapping, parserContext));
|
||||
assertEquals("[meta] values can't be null (field [foo])",
|
||||
e.getMessage());
|
||||
}
|
||||
|
||||
{
|
||||
String longString = IntStream.range(0, 51)
|
||||
.mapToObj(Integer::toString)
|
||||
.collect(Collectors.joining());
|
||||
Map<String, Object> mapping = new HashMap<>(ImmutableMap.of("meta", ImmutableMap.of("foo", longString)));
|
||||
MapperParsingException e = expectThrows(MapperParsingException.class,
|
||||
() -> TypeParsers.parseField(builder, builder.name, mapping, parserContext));
|
||||
assertThat(e.getMessage(), Matchers.startsWith("[meta] values can't be longer than 50 chars"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -119,12 +119,14 @@ public class FieldCapabilitiesIT extends ESIntegTestCase {
|
||||
|
||||
assertTrue(distance.containsKey("double"));
|
||||
assertEquals(
|
||||
new FieldCapabilities("distance", "double", true, true, new String[] {"old_index"}, null, null),
|
||||
new FieldCapabilities("distance", "double", true, true, new String[] {"old_index"}, null, null,
|
||||
Collections.emptyMap()),
|
||||
distance.get("double"));
|
||||
|
||||
assertTrue(distance.containsKey("text"));
|
||||
assertEquals(
|
||||
new FieldCapabilities("distance", "text", true, false, new String[] {"new_index"}, null, null),
|
||||
new FieldCapabilities("distance", "text", true, false, new String[] {"new_index"}, null, null,
|
||||
Collections.emptyMap()),
|
||||
distance.get("text"));
|
||||
|
||||
// Check the capabilities for the 'route_length_miles' alias.
|
||||
@ -133,7 +135,7 @@ public class FieldCapabilitiesIT extends ESIntegTestCase {
|
||||
|
||||
assertTrue(routeLength.containsKey("double"));
|
||||
assertEquals(
|
||||
new FieldCapabilities("route_length_miles", "double", true, true),
|
||||
new FieldCapabilities("route_length_miles", "double", true, true, Collections.emptyMap()),
|
||||
routeLength.get("double"));
|
||||
}
|
||||
|
||||
@ -174,12 +176,14 @@ public class FieldCapabilitiesIT extends ESIntegTestCase {
|
||||
|
||||
assertTrue(oldField.containsKey("long"));
|
||||
assertEquals(
|
||||
new FieldCapabilities("old_field", "long", true, true, new String[] {"old_index"}, null, null),
|
||||
new FieldCapabilities("old_field", "long", true, true, new String[] {"old_index"}, null, null,
|
||||
Collections.emptyMap()),
|
||||
oldField.get("long"));
|
||||
|
||||
assertTrue(oldField.containsKey("unmapped"));
|
||||
assertEquals(
|
||||
new FieldCapabilities("old_field", "unmapped", false, false, new String[] {"new_index"}, null, null),
|
||||
new FieldCapabilities("old_field", "unmapped", false, false, new String[] {"new_index"}, null, null,
|
||||
Collections.emptyMap()),
|
||||
oldField.get("unmapped"));
|
||||
|
||||
Map<String, FieldCapabilities> newField = response.getField("new_field");
|
||||
@ -187,7 +191,7 @@ public class FieldCapabilitiesIT extends ESIntegTestCase {
|
||||
|
||||
assertTrue(newField.containsKey("long"));
|
||||
assertEquals(
|
||||
new FieldCapabilities("new_field", "long", true, true),
|
||||
new FieldCapabilities("new_field", "long", true, true, Collections.emptyMap()),
|
||||
newField.get("long"));
|
||||
}
|
||||
|
||||
|
@ -22,6 +22,7 @@ import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.mapper.MapperService.MergeReason;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
@ -29,6 +30,7 @@ import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
@ -124,4 +126,33 @@ public abstract class AbstractNumericFieldMapperTestCase extends ESSingleNodeTes
|
||||
}
|
||||
}
|
||||
|
||||
public void testMeta() throws Exception {
|
||||
for (String type : TYPES) {
|
||||
IndexService indexService = createIndex("test-" + type);
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc")
|
||||
.startObject("properties").startObject("field").field("type", type)
|
||||
.field("meta", Collections.singletonMap("foo", "bar"))
|
||||
.endObject().endObject().endObject().endObject());
|
||||
|
||||
DocumentMapper mapper = indexService.mapperService().merge("_doc",
|
||||
new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
|
||||
String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc")
|
||||
.startObject("properties").startObject("field").field("type", type)
|
||||
.endObject().endObject().endObject().endObject());
|
||||
mapper = indexService.mapperService().merge("_doc",
|
||||
new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE);
|
||||
assertEquals(mapping2, mapper.mappingSource().toString());
|
||||
|
||||
String mapping3 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc")
|
||||
.startObject("properties").startObject("field").field("type", type)
|
||||
.field("meta", Collections.singletonMap("baz", "quux"))
|
||||
.endObject().endObject().endObject().endObject());
|
||||
mapper = indexService.mapperService().merge("_doc",
|
||||
new CompressedXContent(mapping3), MergeReason.MAPPING_UPDATE);
|
||||
assertEquals(mapping3, mapper.mappingSource().toString());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -43,6 +43,7 @@ import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.TypeParsers;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.query.QueryShardException;
|
||||
import org.elasticsearch.indices.breaker.CircuitBreakerService;
|
||||
@ -124,6 +125,7 @@ public class HistogramFieldMapper extends FieldMapper {
|
||||
Map<String, Object> node, ParserContext parserContext)
|
||||
throws MapperParsingException {
|
||||
Builder builder = new HistogramFieldMapper.Builder(name);
|
||||
TypeParsers.parseMeta(builder, name, node);
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
String propName = entry.getKey();
|
||||
|
@ -11,10 +11,12 @@ import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.mapper.SourceToParse;
|
||||
import org.elasticsearch.index.mapper.MapperService.MergeReason;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.xpack.analytics.AnalyticsPlugin;
|
||||
@ -22,6 +24,7 @@ import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
@ -498,6 +501,33 @@ public class HistogramFieldMapperTests extends ESSingleNodeTestCase {
|
||||
assertThat(e.getCause().getMessage(), containsString("[counts] elements must be >= 0 but got -3"));
|
||||
}
|
||||
|
||||
public void testMeta() throws Exception {
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc")
|
||||
.startObject("properties").startObject("field").field("type", "histogram")
|
||||
.field("meta", Collections.singletonMap("foo", "bar"))
|
||||
.endObject().endObject().endObject().endObject());
|
||||
|
||||
IndexService indexService = createIndex("test");
|
||||
DocumentMapper mapper = indexService.mapperService().merge("_doc",
|
||||
new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
|
||||
String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc")
|
||||
.startObject("properties").startObject("field").field("type", "histogram")
|
||||
.endObject().endObject().endObject().endObject());
|
||||
mapper = indexService.mapperService().merge("_doc",
|
||||
new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE);
|
||||
assertEquals(mapping2, mapper.mappingSource().toString());
|
||||
|
||||
String mapping3 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc")
|
||||
.startObject("properties").startObject("field").field("type", "histogram")
|
||||
.field("meta", Collections.singletonMap("baz", "quux"))
|
||||
.endObject().endObject().endObject().endObject());
|
||||
mapper = indexService.mapperService().merge("_doc",
|
||||
new CompressedXContent(mapping3), MergeReason.MAPPING_UPDATE);
|
||||
assertEquals(mapping3, mapper.mappingSource().toString());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> getPlugins() {
|
||||
List<Class<? extends Plugin>> plugins = new ArrayList<>(super.getPlugins());
|
||||
|
@ -920,7 +920,7 @@ public class ExtractedFieldsDetectorTests extends ESTestCase {
|
||||
private MockFieldCapsResponseBuilder addField(String field, boolean isAggregatable, String... types) {
|
||||
Map<String, FieldCapabilities> caps = new HashMap<>();
|
||||
for (String type : types) {
|
||||
caps.put(type, new FieldCapabilities(field, type, true, isAggregatable));
|
||||
caps.put(type, new FieldCapabilities(field, type, true, isAggregatable, Collections.emptyMap()));
|
||||
}
|
||||
fieldCaps.put(field, caps);
|
||||
return this;
|
||||
|
@ -14,6 +14,7 @@ import org.elasticsearch.xpack.sql.type.KeywordEsField;
|
||||
import org.elasticsearch.xpack.sql.type.TypesTests;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
@ -143,8 +144,10 @@ public class IndexResolverTests extends ESTestCase {
|
||||
addFieldCaps(fieldCaps, fieldName + ".keyword", "keyword", true, true);
|
||||
|
||||
Map<String, FieldCapabilities> multi = new HashMap<>();
|
||||
multi.put("long", new FieldCapabilities(fieldName, "long", true, true, new String[] { "one-index" }, null, null));
|
||||
multi.put("text", new FieldCapabilities(fieldName, "text", true, false, new String[] { "another-index" }, null, null));
|
||||
multi.put("long", new FieldCapabilities(fieldName, "long", true, true, new String[] { "one-index" }, null, null,
|
||||
Collections.emptyMap()));
|
||||
multi.put("text", new FieldCapabilities(fieldName, "text", true, false, new String[] { "another-index" }, null, null,
|
||||
Collections.emptyMap()));
|
||||
fieldCaps.put(fieldName, multi);
|
||||
|
||||
|
||||
@ -214,7 +217,8 @@ public class IndexResolverTests extends ESTestCase {
|
||||
|
||||
public void testIndexWithNoMapping() {
|
||||
Map<String, Map<String, FieldCapabilities>> versionFC = singletonMap("_version",
|
||||
singletonMap("_index", new FieldCapabilities("_version", "_version", false, false)));
|
||||
singletonMap("_index", new FieldCapabilities("_version", "_version", false, false,
|
||||
Collections.emptyMap())));
|
||||
assertTrue(IndexResolver.mergedMappings("*", new String[] { "empty" }, versionFC).isValid());
|
||||
}
|
||||
|
||||
@ -289,7 +293,7 @@ public class IndexResolverTests extends ESTestCase {
|
||||
List<String> nonAggregatableIndices = new ArrayList<>();
|
||||
|
||||
UpdateableFieldCapabilities(String name, String type, boolean isSearchable, boolean isAggregatable) {
|
||||
super(name, type, isSearchable, isAggregatable);
|
||||
super(name, type, isSearchable, isAggregatable, Collections.emptyMap());
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -323,7 +327,7 @@ public class IndexResolverTests extends ESTestCase {
|
||||
private void addFieldCaps(Map<String, Map<String, FieldCapabilities>> fieldCaps, String name, String type, boolean isSearchable,
|
||||
boolean isAggregatable) {
|
||||
Map<String, FieldCapabilities> cap = new HashMap<>();
|
||||
cap.put(name, new FieldCapabilities(name, type, isSearchable, isAggregatable));
|
||||
cap.put(name, new FieldCapabilities(name, type, isSearchable, isAggregatable, Collections.emptyMap()));
|
||||
fieldCaps.put(name, cap);
|
||||
}
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user