SQL: Use field caps inside DESCRIBE TABLE as well (#41377)
Thanks to #34071, there is enough information in field caps to infer the table structure and thus use the same API consistently across the IndexResolver. (cherry picked from commit f99946943a3350206b6bca774b2f060f41a787b3)
This commit is contained in:
parent
f65a86c258
commit
b288b88ba0
|
@ -234,12 +234,14 @@ public class JdbcSecurityIT extends SqlSecurityTestCase {
|
|||
expectUnauthorized("cluster:monitor/main", user, () -> es(userProperties(user)));
|
||||
expectUnauthorized("cluster:monitor/main", user, () -> es(userProperties(user)).getMetaData().getDatabaseMajorVersion());
|
||||
expectUnauthorized("cluster:monitor/main", user, () -> es(userProperties(user)).getMetaData().getDatabaseMinorVersion());
|
||||
expectUnauthorized("cluster:monitor/main", user,
|
||||
() -> es(userProperties(user)).createStatement().executeQuery("SELECT * FROM test"));
|
||||
expectUnauthorized("cluster:monitor/main", user,
|
||||
() -> es(userProperties(user)).createStatement().executeQuery("SHOW TABLES LIKE 'test'"));
|
||||
expectUnauthorized("cluster:monitor/main", user,
|
||||
() -> es(userProperties(user)).createStatement().executeQuery("DESCRIBE test"));
|
||||
|
||||
// by moving to field caps these calls do not require the monitor permission
|
||||
// expectUnauthorized("cluster:monitor/main", user,
|
||||
// () -> es(userProperties(user)).createStatement().executeQuery("SELECT * FROM test"));
|
||||
// expectUnauthorized("cluster:monitor/main", user,
|
||||
// () -> es(userProperties(user)).createStatement().executeQuery("SHOW TABLES LIKE 'test'"));
|
||||
// expectUnauthorized("cluster:monitor/main", user,
|
||||
// () -> es(userProperties(user)).createStatement().executeQuery("DESCRIBE test"));
|
||||
}
|
||||
|
||||
private void expectUnauthorized(String action, String user, ThrowingRunnable r) {
|
||||
|
|
|
@ -107,6 +107,7 @@ x-pack_plugin_sql_qa_single-node_integTestCluster |null |test_emp
|
|||
x-pack_plugin_sql_qa_single-node_integTestCluster |null |test_emp |salary |4 |INTEGER |11 |4 |null |10 |1 |null |null |4 |0 |null |11 |YES |null |null |null |null |NO |NO
|
||||
x-pack_plugin_sql_qa_single-node_integTestCluster |null |test_emp_copy |birth_date |93 |DATETIME |29 |8 |null |null |1 |null |null |9 |3 |null |1 |YES |null |null |null |null |NO |NO
|
||||
x-pack_plugin_sql_qa_single-node_integTestCluster |null |test_emp_copy |emp_no |4 |INTEGER |11 |4 |null |10 |1 |null |null |4 |0 |null |3 |YES |null |null |null |null |NO |NO
|
||||
x-pack_plugin_sql_qa_single-node_integTestCluster |null |test_emp_copy |extra.info.gender |12 |KEYWORD |32766 |2147483647 |null |null |1 |null |null |12 |0 |2147483647 |6 |YES |null |null |null |null |NO |NO
|
||||
x-pack_plugin_sql_qa_single-node_integTestCluster |null |test_emp_copy |extra_gender |12 |KEYWORD |32766 |2147483647 |null |null |1 |null |null |12 |0 |2147483647 |7 |YES |null |null |null |null |NO |NO
|
||||
x-pack_plugin_sql_qa_single-node_integTestCluster |null |test_emp_copy |extra_no |4 |INTEGER |11 |4 |null |10 |1 |null |null |4 |0 |null |8 |YES |null |null |null |null |NO |NO
|
||||
x-pack_plugin_sql_qa_single-node_integTestCluster |null |test_emp_copy |first_name |12 |TEXT |2147483647 |2147483647 |null |null |1 |null |null |12 |0 |2147483647 |9 |YES |null |null |null |null |NO |NO
|
||||
|
|
|
@ -6,7 +6,6 @@
|
|||
package org.elasticsearch.xpack.sql.analysis.index;
|
||||
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
|
||||
import org.elasticsearch.ElasticsearchSecurityException;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
|
@ -22,17 +21,15 @@ import org.elasticsearch.action.support.IndicesOptions.Option;
|
|||
import org.elasticsearch.action.support.IndicesOptions.WildcardStates;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.metadata.AliasMetaData;
|
||||
import org.elasticsearch.cluster.metadata.MappingMetaData;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
|
||||
import org.elasticsearch.xpack.sql.type.DataType;
|
||||
import org.elasticsearch.xpack.sql.type.DateEsField;
|
||||
import org.elasticsearch.xpack.sql.type.EsField;
|
||||
import org.elasticsearch.xpack.sql.type.InvalidMappedField;
|
||||
import org.elasticsearch.xpack.sql.type.KeywordEsField;
|
||||
import org.elasticsearch.xpack.sql.type.TextEsField;
|
||||
import org.elasticsearch.xpack.sql.type.Types;
|
||||
import org.elasticsearch.xpack.sql.type.UnsupportedEsField;
|
||||
import org.elasticsearch.xpack.sql.util.CollectionUtils;
|
||||
|
||||
|
@ -41,20 +38,25 @@ import java.util.Arrays;
|
|||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.NavigableSet;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.TreeMap;
|
||||
import java.util.TreeSet;
|
||||
import java.util.function.BiFunction;
|
||||
import java.util.function.Function;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import static java.util.Arrays.asList;
|
||||
import static java.util.Collections.emptyList;
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static java.util.Collections.emptySet;
|
||||
|
||||
public class IndexResolver {
|
||||
|
||||
|
@ -136,6 +138,7 @@ public class IndexResolver {
|
|||
private static final IndicesOptions INDICES_ONLY_OPTIONS = new IndicesOptions(
|
||||
EnumSet.of(Option.ALLOW_NO_INDICES, Option.IGNORE_UNAVAILABLE, Option.IGNORE_ALIASES), EnumSet.of(WildcardStates.OPEN));
|
||||
private static final List<String> FIELD_NAMES_BLACKLIST = Arrays.asList("_size");
|
||||
private static final String UNMAPPED = "unmapped";
|
||||
|
||||
private final Client client;
|
||||
private final String clusterName;
|
||||
|
@ -242,44 +245,32 @@ public class IndexResolver {
|
|||
public void resolveAsMergedMapping(String indexWildcard, String javaRegex, ActionListener<IndexResolution> listener) {
|
||||
FieldCapabilitiesRequest fieldRequest = createFieldCapsRequest(indexWildcard);
|
||||
client.fieldCaps(fieldRequest,
|
||||
ActionListener.wrap(response -> listener.onResponse(mergedMapping(indexWildcard, response.get())), listener::onFailure));
|
||||
ActionListener.wrap(
|
||||
response -> listener.onResponse(mergedMappings(indexWildcard, response.getIndices(), response.get())),
|
||||
listener::onFailure));
|
||||
}
|
||||
|
||||
static IndexResolution mergedMapping(String indexPattern, Map<String, Map<String, FieldCapabilities>> fieldCaps) {
|
||||
static IndexResolution mergedMappings(String indexPattern, String[] indexNames, Map<String, Map<String, FieldCapabilities>> fieldCaps) {
|
||||
if (fieldCaps == null || fieldCaps.isEmpty()) {
|
||||
return IndexResolution.notFound(indexPattern);
|
||||
}
|
||||
|
||||
// merge all indices onto the same one
|
||||
List<EsIndex> indices = buildIndices(indexNames, null, fieldCaps, i -> indexPattern, (n, types) -> {
|
||||
StringBuilder errorMessage = new StringBuilder();
|
||||
|
||||
NavigableSet<Entry<String, Map<String, FieldCapabilities>>> sortedFields = new TreeSet<>(
|
||||
// for some reason .reversed doesn't work (prolly due to inference)
|
||||
Collections.reverseOrder(Comparator.comparing(Entry::getKey)));
|
||||
sortedFields.addAll(fieldCaps.entrySet());
|
||||
boolean hasUnmapped = types.containsKey(UNMAPPED);
|
||||
|
||||
Map<String, EsField> hierarchicalMapping = new TreeMap<>();
|
||||
Map<String, EsField> flattedMapping = new LinkedHashMap<>();
|
||||
|
||||
// sort keys descending in order to easily detect multi-fields (a.b.c multi-field of a.b)
|
||||
// without sorting, they can still be detected however without the emptyMap optimization
|
||||
// (fields without multi-fields have no children)
|
||||
for (Entry<String, Map<String, FieldCapabilities>> entry : sortedFields) {
|
||||
|
||||
InvalidMappedField invalidField = null;
|
||||
FieldCapabilities fieldCap = null;
|
||||
errorMessage.setLength(0);
|
||||
|
||||
String name = entry.getKey();
|
||||
|
||||
// Skip any of the blacklisted field names.
|
||||
if (!FIELD_NAMES_BLACKLIST.contains(name)) {
|
||||
Map<String, FieldCapabilities> types = entry.getValue();
|
||||
// field is mapped differently across indices
|
||||
if (types.size() > 1) {
|
||||
if (types.size() > (hasUnmapped ? 2 : 1)) {
|
||||
// build the error message
|
||||
// and create a MultiTypeField
|
||||
|
||||
for (Entry<String, FieldCapabilities> type : types.entrySet()) {
|
||||
// skip unmapped
|
||||
if (UNMAPPED.equals(type.getKey())) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (errorMessage.length() > 0) {
|
||||
errorMessage.append(", ");
|
||||
}
|
||||
|
@ -289,19 +280,14 @@ public class IndexResolver {
|
|||
errorMessage.append(Arrays.toString(type.getValue().indices()));
|
||||
}
|
||||
|
||||
errorMessage.insert(0, "mapped as [" + types.size() + "] incompatible types: ");
|
||||
errorMessage.insert(0, "mapped as [" + (types.size() - (hasUnmapped ? 1 : 0)) + "] incompatible types: ");
|
||||
|
||||
invalidField = new InvalidMappedField(name, errorMessage.toString());
|
||||
return new InvalidMappedField(n, errorMessage.toString());
|
||||
}
|
||||
// type is okay, check aggregation
|
||||
else {
|
||||
fieldCap = types.values().iterator().next();
|
||||
FieldCapabilities fieldCap = types.values().iterator().next();
|
||||
|
||||
// Skip internal fields (name starting with underscore and its type reported by field_caps starts with underscore
|
||||
// as well). A meta field named "_version", for example, has the type named "_version".
|
||||
if (name.startsWith("_") && fieldCap.getType().startsWith("_")) {
|
||||
continue;
|
||||
}
|
||||
// validate search/agg-able
|
||||
if (fieldCap.isAggregatable() && fieldCap.nonAggregatableIndices() != null) {
|
||||
errorMessage.append("mapped as aggregatable except in ");
|
||||
|
@ -316,29 +302,25 @@ public class IndexResolver {
|
|||
}
|
||||
|
||||
if (errorMessage.length() > 0) {
|
||||
invalidField = new InvalidMappedField(name, errorMessage.toString());
|
||||
return new InvalidMappedField(n, errorMessage.toString());
|
||||
}
|
||||
}
|
||||
|
||||
// validation passes - create the field
|
||||
// if the name wasn't added before
|
||||
final InvalidMappedField invalidF = invalidField;
|
||||
final FieldCapabilities fieldCapab = fieldCap;
|
||||
|
||||
EsField esField = flattedMapping.get(name);
|
||||
if (esField == null || (invalidF != null && (esField instanceof InvalidMappedField) == false)) {
|
||||
createField(name, fieldCaps, hierarchicalMapping, flattedMapping, s -> {
|
||||
return invalidF != null ? invalidF : createField(s, fieldCapab.getType(), emptyMap(), fieldCapab.isAggregatable());
|
||||
// everything checks
|
||||
return null;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (indices.size() != 1) {
|
||||
throw new SqlIllegalArgumentException("Incorrect merging of mappings (likely due to a bug) - expect 1 but found [{}]",
|
||||
indices.size());
|
||||
}
|
||||
|
||||
return IndexResolution.valid(new EsIndex(indexPattern, hierarchicalMapping));
|
||||
return IndexResolution.valid(indices.get(0));
|
||||
}
|
||||
|
||||
private static EsField createField(String fieldName, Map<String, Map<String, FieldCapabilities>> globalCaps,
|
||||
Map<String, EsField> hierarchicalMapping, Map<String, EsField> flattedMapping,
|
||||
Map<String, EsField> hierarchicalMapping,
|
||||
Map<String, EsField> flattedMapping,
|
||||
Function<String, EsField> field) {
|
||||
|
||||
Map<String, EsField> parentProps = hierarchicalMapping;
|
||||
|
@ -359,8 +341,13 @@ public class IndexResolver {
|
|||
// as such, create the field manually
|
||||
fieldFunction = s -> createField(s, DataType.OBJECT.name(), new TreeMap<>(), false);
|
||||
} else {
|
||||
FieldCapabilities parentCap = map.values().iterator().next();
|
||||
fieldFunction = s -> createField(s, parentCap.getType(), new TreeMap<>(), parentCap.isAggregatable());
|
||||
Iterator<FieldCapabilities> iterator = map.values().iterator();
|
||||
FieldCapabilities parentCap = iterator.next();
|
||||
if (iterator.hasNext() && UNMAPPED.equals(parentCap.getType())) {
|
||||
parentCap = iterator.next();
|
||||
}
|
||||
final FieldCapabilities parentC = parentCap;
|
||||
fieldFunction = s -> createField(s, parentC.getType(), new TreeMap<>(), parentC.isAggregatable());
|
||||
}
|
||||
|
||||
parent = createField(parentName, globalCaps, hierarchicalMapping, flattedMapping, fieldFunction);
|
||||
|
@ -395,107 +382,132 @@ public class IndexResolver {
|
|||
}
|
||||
}
|
||||
|
||||
private static FieldCapabilitiesRequest createFieldCapsRequest(String index) {
|
||||
return new FieldCapabilitiesRequest()
|
||||
.indices(Strings.commaDelimitedListToStringArray(index))
|
||||
.fields("*")
|
||||
//lenient because we throw our own errors looking at the response e.g. if something was not resolved
|
||||
//also because this way security doesn't throw authorization exceptions but rather honors ignore_unavailable
|
||||
.indicesOptions(IndicesOptions.lenientExpandOpen());
|
||||
}
|
||||
|
||||
// TODO: Concrete indices still uses get mapping
|
||||
// waiting on https://github.com/elastic/elasticsearch/pull/34071
|
||||
//
|
||||
|
||||
/**
|
||||
* Resolves a pattern to multiple, separate indices. Doesn't perform validation.
|
||||
*/
|
||||
public void resolveAsSeparateMappings(String indexWildcard, String javaRegex, ActionListener<List<EsIndex>> listener) {
|
||||
GetIndexRequest getIndexRequest = createGetIndexRequest(indexWildcard);
|
||||
client.admin().indices().getIndex(getIndexRequest, ActionListener.wrap(getIndexResponse -> {
|
||||
ImmutableOpenMap<String, ImmutableOpenMap<String, MappingMetaData>> mappings = getIndexResponse.getMappings();
|
||||
ImmutableOpenMap<String, List<AliasMetaData>> aliases = getIndexResponse.getAliases();
|
||||
FieldCapabilitiesRequest fieldRequest = createFieldCapsRequest(indexWildcard);
|
||||
client.fieldCaps(fieldRequest,
|
||||
ActionListener.wrap(
|
||||
response -> listener.onResponse(separateMappings(indexWildcard, javaRegex, response.getIndices(), response.get())),
|
||||
listener::onFailure));
|
||||
|
||||
List<EsIndex> results = new ArrayList<>(mappings.size());
|
||||
Pattern pattern = javaRegex != null ? Pattern.compile(javaRegex) : null;
|
||||
for (ObjectObjectCursor<String, ImmutableOpenMap<String, MappingMetaData>> indexMappings : mappings) {
|
||||
/*
|
||||
* We support wildcard expressions here, and it's only for commands that only perform the get index call.
|
||||
* We can and simply have to use the concrete index name and show that to users.
|
||||
* Get index against an alias with security enabled, where the user has only access to get mappings for the alias
|
||||
* and not the concrete index: there is a well known information leak of the concrete index name in the response.
|
||||
}
|
||||
|
||||
static List<EsIndex> separateMappings(String indexPattern, String javaRegex, String[] indexNames,
|
||||
Map<String, Map<String, FieldCapabilities>> fieldCaps) {
|
||||
return buildIndices(indexNames, javaRegex, fieldCaps, Function.identity(), (s, cap) -> null);
|
||||
}
|
||||
|
||||
private static class Fields {
|
||||
final Map<String, EsField> hierarchicalMapping = new TreeMap<>();
|
||||
final Map<String, EsField> flattedMapping = new LinkedHashMap<>();
|
||||
}
|
||||
|
||||
/**
|
||||
* Assemble an index-based mapping from the field caps (which is field based) by looking at the indices associated with
|
||||
* each field.
|
||||
*/
|
||||
String concreteIndex = indexMappings.key;
|
||||
private static List<EsIndex> buildIndices(String[] indexNames, String javaRegex, Map<String, Map<String, FieldCapabilities>> fieldCaps,
|
||||
Function<String, String> indexNameProcessor,
|
||||
BiFunction<String, Map<String, FieldCapabilities>, InvalidMappedField> validityVerifier) {
|
||||
|
||||
// take into account aliases
|
||||
List<AliasMetaData> aliasMetadata = aliases.get(concreteIndex);
|
||||
boolean matchesAlias = false;
|
||||
if (pattern != null && aliasMetadata != null) {
|
||||
for (AliasMetaData aliasMeta : aliasMetadata) {
|
||||
if (pattern.matcher(aliasMeta.alias()).matches()) {
|
||||
matchesAlias = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (indexNames == null || indexNames.length == 0) {
|
||||
return emptyList();
|
||||
}
|
||||
|
||||
if (pattern == null || matchesAlias || pattern.matcher(concreteIndex).matches()) {
|
||||
IndexResolution getIndexResult = buildGetIndexResult(concreteIndex, concreteIndex, indexMappings.value);
|
||||
if (getIndexResult.isValid()) {
|
||||
results.add(getIndexResult.get());
|
||||
}
|
||||
}
|
||||
}
|
||||
results.sort(Comparator.comparing(EsIndex::name));
|
||||
listener.onResponse(results);
|
||||
}, listener::onFailure));
|
||||
}
|
||||
final List<String> resolvedIndices = asList(indexNames);
|
||||
Map<String, Fields> indices = new LinkedHashMap<>(resolvedIndices.size());
|
||||
Pattern pattern = javaRegex != null ? Pattern.compile(javaRegex) : null;
|
||||
|
||||
private static GetIndexRequest createGetIndexRequest(String index) {
|
||||
return new GetIndexRequest()
|
||||
.local(true)
|
||||
.indices(Strings.commaDelimitedListToStringArray(index))
|
||||
//lenient because we throw our own errors looking at the response e.g. if something was not resolved
|
||||
//also because this way security doesn't throw authorization exceptions but rather honours ignore_unavailable
|
||||
.indicesOptions(IndicesOptions.lenientExpandOpen());
|
||||
}
|
||||
// sort fields in reverse order to build the field hierarchy
|
||||
Set<Entry<String, Map<String, FieldCapabilities>>> sortedFields = new TreeSet<>(
|
||||
Collections.reverseOrder(Comparator.comparing(Entry::getKey)));
|
||||
|
||||
private static IndexResolution buildGetIndexResult(String concreteIndex, String indexOrAlias,
|
||||
ImmutableOpenMap<String, MappingMetaData> mappings) {
|
||||
sortedFields.addAll(fieldCaps.entrySet());
|
||||
|
||||
// Make sure that the index contains only a single type
|
||||
MappingMetaData singleType = null;
|
||||
List<String> typeNames = null;
|
||||
for (ObjectObjectCursor<String, MappingMetaData> type : mappings) {
|
||||
//Default mappings are ignored as they are applied to each type. Each type alone holds all of its fields.
|
||||
if ("_default_".equals(type.key)) {
|
||||
for (Entry<String, Map<String, FieldCapabilities>> entry : sortedFields) {
|
||||
String fieldName = entry.getKey();
|
||||
Map<String, FieldCapabilities> types = entry.getValue();
|
||||
|
||||
// ignore size added by the mapper plugin
|
||||
if (FIELD_NAMES_BLACKLIST.contains(fieldName)) {
|
||||
continue;
|
||||
}
|
||||
if (singleType != null) {
|
||||
// There are more than one types
|
||||
if (typeNames == null) {
|
||||
typeNames = new ArrayList<>();
|
||||
typeNames.add(singleType.type());
|
||||
}
|
||||
typeNames.add(type.key);
|
||||
}
|
||||
singleType = type.value;
|
||||
|
||||
// apply verification
|
||||
final InvalidMappedField invalidField = validityVerifier.apply(fieldName, types);
|
||||
|
||||
// filter meta fields and unmapped
|
||||
FieldCapabilities unmapped = types.get(UNMAPPED);
|
||||
Set<String> unmappedIndices = unmapped != null ? new HashSet<>(asList(unmapped.indices())) : emptySet();
|
||||
|
||||
// check each type
|
||||
for (Entry<String, FieldCapabilities> typeEntry : types.entrySet()) {
|
||||
FieldCapabilities typeCap = typeEntry.getValue();
|
||||
String[] capIndices = typeCap.indices();
|
||||
|
||||
// Skip internal fields (name starting with underscore and its type reported by field_caps starts
|
||||
// with underscore as well). A meta field named "_version", for example, has the type named "_version".
|
||||
if (typeEntry.getKey().startsWith("_") && typeCap.getType().startsWith("_")) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (singleType == null) {
|
||||
return IndexResolution.invalid("[" + indexOrAlias + "] doesn't have any types so it is incompatible with sql");
|
||||
} else if (typeNames != null) {
|
||||
Collections.sort(typeNames);
|
||||
return IndexResolution.invalid(
|
||||
"[" + indexOrAlias + "] contains more than one type " + typeNames + " so it is incompatible with sql");
|
||||
// compute the actual indices - if any are specified, take into account the unmapped indices
|
||||
List<String> concreteIndices = null;
|
||||
if (capIndices != null) {
|
||||
if (unmappedIndices.isEmpty() == true) {
|
||||
concreteIndices = asList(capIndices);
|
||||
} else {
|
||||
try {
|
||||
Map<String, EsField> mapping = Types.fromEs(singleType.sourceAsMap());
|
||||
return IndexResolution.valid(new EsIndex(indexOrAlias, mapping));
|
||||
} catch (MappingException ex) {
|
||||
return IndexResolution.invalid(ex.getMessage());
|
||||
concreteIndices = new ArrayList<>(capIndices.length - unmappedIndices.size() + 1);
|
||||
for (String capIndex : capIndices) {
|
||||
// add only indices that have a mapping
|
||||
if (unmappedIndices.contains(capIndex) == false) {
|
||||
concreteIndices.add(capIndex);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
concreteIndices = resolvedIndices;
|
||||
}
|
||||
|
||||
// put the field in their respective mappings
|
||||
for (String index : concreteIndices) {
|
||||
if (pattern == null || pattern.matcher(index).matches()) {
|
||||
String indexName = indexNameProcessor.apply(index);
|
||||
Fields indexFields = indices.get(indexName);
|
||||
if (indexFields == null) {
|
||||
indexFields = new Fields();
|
||||
indices.put(indexName, indexFields);
|
||||
}
|
||||
EsField field = indexFields.flattedMapping.get(fieldName);
|
||||
if (field == null || (invalidField != null && (field instanceof InvalidMappedField) == false)) {
|
||||
createField(fieldName, fieldCaps, indexFields.hierarchicalMapping, indexFields.flattedMapping, s ->
|
||||
invalidField != null ? invalidField :
|
||||
createField(s, typeCap.getType(), emptyMap(), typeCap.isAggregatable()));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// return indices in ascending order
|
||||
List<EsIndex> foundIndices = new ArrayList<>(indices.size());
|
||||
for (Entry<String, Fields> entry : indices.entrySet()) {
|
||||
foundIndices.add(new EsIndex(entry.getKey(), entry.getValue().hierarchicalMapping));
|
||||
}
|
||||
foundIndices.sort(Comparator.comparing(EsIndex::name));
|
||||
return foundIndices;
|
||||
}
|
||||
|
||||
private static FieldCapabilitiesRequest createFieldCapsRequest(String index) {
|
||||
return new FieldCapabilitiesRequest()
|
||||
.indices(Strings.commaDelimitedListToStringArray(index))
|
||||
.fields("*")
|
||||
.includeUnmapped(true)
|
||||
//lenient because we throw our own errors looking at the response e.g. if something was not resolved
|
||||
//also because this way security doesn't throw authorization exceptions but rather honors ignore_unavailable
|
||||
.indicesOptions(IndicesOptions.lenientExpandOpen());
|
||||
}
|
||||
}
|
|
@ -116,7 +116,7 @@ public class SysColumns extends Command {
|
|||
|
||||
Pattern columnMatcher = columnPattern != null ? Pattern.compile(columnPattern.asJavaRegex()) : null;
|
||||
|
||||
// special case fo '%' (translated to *)
|
||||
// special case for '%' (translated to *)
|
||||
if ("*".equals(idx)) {
|
||||
session.indexResolver().resolveAsSeparateMappings(idx, regex, ActionListener.wrap(esIndices -> {
|
||||
List<List<?>> rows = new ArrayList<>();
|
||||
|
|
|
@ -17,6 +17,7 @@ import java.util.HashMap;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static org.elasticsearch.common.logging.LoggerMessageFormat.format;
|
||||
|
||||
|
@ -28,11 +29,7 @@ public class IndexResolverTests extends ESTestCase {
|
|||
assertNotSame(oneMapping, sameMapping);
|
||||
assertEquals(oneMapping, sameMapping);
|
||||
|
||||
String wildcard = "*";
|
||||
|
||||
IndexResolution resolution = IndexResolver.mergedMapping(wildcard, fromMappings(
|
||||
new EsIndex("a", oneMapping),
|
||||
new EsIndex("b", sameMapping)));
|
||||
IndexResolution resolution = merge(new EsIndex("a", oneMapping), new EsIndex("b", sameMapping));
|
||||
|
||||
assertTrue(resolution.isValid());
|
||||
assertEqualsMaps(oneMapping, resolution.get().mapping());
|
||||
|
@ -44,10 +41,7 @@ public class IndexResolverTests extends ESTestCase {
|
|||
|
||||
assertNotEquals(basicMapping, numericMapping);
|
||||
|
||||
String wildcard = "*";
|
||||
IndexResolution resolution = IndexResolver.mergedMapping(wildcard, fromMappings(
|
||||
new EsIndex("basic", basicMapping),
|
||||
new EsIndex("numeric", numericMapping)));
|
||||
IndexResolution resolution = merge(new EsIndex("basic", basicMapping), new EsIndex("numeric", numericMapping));
|
||||
|
||||
assertTrue(resolution.isValid());
|
||||
assertEquals(basicMapping.size() + numericMapping.size(), resolution.get().mapping().size());
|
||||
|
@ -60,8 +54,7 @@ public class IndexResolverTests extends ESTestCase {
|
|||
assertNotEquals(basicMapping, incompatible);
|
||||
|
||||
String wildcard = "*";
|
||||
IndexResolution resolution = IndexResolver.mergedMapping(wildcard,
|
||||
fromMappings(new EsIndex("basic", basicMapping), new EsIndex("incompatible", incompatible)));
|
||||
IndexResolution resolution = merge(new EsIndex("basic", basicMapping), new EsIndex("incompatible", incompatible));
|
||||
|
||||
assertTrue(resolution.isValid());
|
||||
|
||||
|
@ -82,8 +75,7 @@ public class IndexResolverTests extends ESTestCase {
|
|||
assertNotEquals(basicMapping, incompatible);
|
||||
|
||||
String wildcard = "*";
|
||||
IndexResolution resolution = IndexResolver.mergedMapping(wildcard,
|
||||
fromMappings(new EsIndex("basic", basicMapping), new EsIndex("incompatible", incompatible)));
|
||||
IndexResolution resolution = merge(new EsIndex("basic", basicMapping), new EsIndex("incompatible", incompatible));
|
||||
|
||||
assertTrue(resolution.isValid());
|
||||
|
||||
|
@ -97,8 +89,7 @@ public class IndexResolverTests extends ESTestCase {
|
|||
public void testMultiLevelObjectMappings() throws Exception {
|
||||
Map<String, EsField> dottedMapping = TypesTests.loadMapping("mapping-dotted-field.json", true);
|
||||
|
||||
String wildcard = "*";
|
||||
IndexResolution resolution = IndexResolver.mergedMapping(wildcard, fromMappings(new EsIndex("a", dottedMapping)));
|
||||
IndexResolution resolution = merge(new EsIndex("a", dottedMapping));
|
||||
|
||||
assertTrue(resolution.isValid());
|
||||
assertEqualsMaps(dottedMapping, resolution.get().mapping());
|
||||
|
@ -107,8 +98,7 @@ public class IndexResolverTests extends ESTestCase {
|
|||
public void testMultiLevelNestedMappings() throws Exception {
|
||||
Map<String, EsField> nestedMapping = TypesTests.loadMapping("mapping-nested.json", true);
|
||||
|
||||
String wildcard = "*";
|
||||
IndexResolution resolution = IndexResolver.mergedMapping(wildcard, fromMappings(new EsIndex("a", nestedMapping)));
|
||||
IndexResolution resolution = merge(new EsIndex("a", nestedMapping));
|
||||
|
||||
assertTrue(resolution.isValid());
|
||||
assertEqualsMaps(nestedMapping, resolution.get().mapping());
|
||||
|
@ -122,7 +112,7 @@ public class IndexResolverTests extends ESTestCase {
|
|||
addFieldCaps(fieldCaps, "text", "keyword", true, true);
|
||||
|
||||
String wildcard = "*";
|
||||
IndexResolution resolution = IndexResolver.mergedMapping(wildcard, fieldCaps);
|
||||
IndexResolution resolution = IndexResolver.mergedMappings(wildcard, new String[] { "index" }, fieldCaps);
|
||||
assertTrue(resolution.isValid());
|
||||
|
||||
EsIndex esIndex = resolution.get();
|
||||
|
@ -157,7 +147,7 @@ public class IndexResolverTests extends ESTestCase {
|
|||
|
||||
|
||||
String wildcard = "*";
|
||||
IndexResolution resolution = IndexResolver.mergedMapping(wildcard, fieldCaps);
|
||||
IndexResolution resolution = IndexResolver.mergedMappings(wildcard, new String[] { "one-index" }, fieldCaps);
|
||||
|
||||
assertTrue(resolution.isValid());
|
||||
|
||||
|
@ -175,8 +165,40 @@ public class IndexResolverTests extends ESTestCase {
|
|||
}
|
||||
|
||||
|
||||
|
||||
public void testSeparateSameMappingDifferentIndices() throws Exception {
|
||||
Map<String, EsField> oneMapping = TypesTests.loadMapping("mapping-basic.json", true);
|
||||
Map<String, EsField> sameMapping = TypesTests.loadMapping("mapping-basic.json", true);
|
||||
assertNotSame(oneMapping, sameMapping);
|
||||
assertEquals(oneMapping, sameMapping);
|
||||
|
||||
List<EsIndex> indices = separate(new EsIndex("a", oneMapping), new EsIndex("b", sameMapping));
|
||||
|
||||
assertEquals(2, indices.size());
|
||||
assertEqualsMaps(oneMapping, indices.get(0).mapping());
|
||||
assertEqualsMaps(sameMapping, indices.get(1).mapping());
|
||||
}
|
||||
|
||||
public void testSeparateIncompatibleTypes() throws Exception {
|
||||
Map<String, EsField> basicMapping = TypesTests.loadMapping("mapping-basic.json", true);
|
||||
Map<String, EsField> incompatible = TypesTests.loadMapping("mapping-basic-incompatible.json");
|
||||
|
||||
assertNotEquals(basicMapping, incompatible);
|
||||
|
||||
List<EsIndex> indices = separate(new EsIndex("basic", basicMapping), new EsIndex("incompatible", incompatible));
|
||||
|
||||
assertEquals(2, indices.size());
|
||||
assertEqualsMaps(basicMapping, indices.get(0).mapping());
|
||||
assertEqualsMaps(incompatible, indices.get(1).mapping());
|
||||
}
|
||||
|
||||
public static IndexResolution merge(EsIndex... indices) {
|
||||
return IndexResolver.mergedMapping("*", fromMappings(indices));
|
||||
return IndexResolver.mergedMappings("*", Stream.of(indices).map(EsIndex::name).toArray(String[]::new), fromMappings(indices));
|
||||
}
|
||||
|
||||
public static List<EsIndex> separate(EsIndex... indices) {
|
||||
return IndexResolver.separateMappings("*", null, Stream.of(indices).map(EsIndex::name).toArray(String[]::new),
|
||||
fromMappings(indices));
|
||||
}
|
||||
|
||||
public static Map<String, Map<String, FieldCapabilities>> fromMappings(EsIndex... indices) {
|
||||
|
|
Loading…
Reference in New Issue