SQL: Use field caps inside DESCRIBE TABLE as well (#41377)
Thanks to #34071, there is enough information in field caps to infer the table structure and thus use the same API consistently across the IndexResolver. (cherry picked from commit f99946943a3350206b6bca774b2f060f41a787b3)
This commit is contained in:
parent
f65a86c258
commit
b288b88ba0
|
@ -234,12 +234,14 @@ public class JdbcSecurityIT extends SqlSecurityTestCase {
|
||||||
expectUnauthorized("cluster:monitor/main", user, () -> es(userProperties(user)));
|
expectUnauthorized("cluster:monitor/main", user, () -> es(userProperties(user)));
|
||||||
expectUnauthorized("cluster:monitor/main", user, () -> es(userProperties(user)).getMetaData().getDatabaseMajorVersion());
|
expectUnauthorized("cluster:monitor/main", user, () -> es(userProperties(user)).getMetaData().getDatabaseMajorVersion());
|
||||||
expectUnauthorized("cluster:monitor/main", user, () -> es(userProperties(user)).getMetaData().getDatabaseMinorVersion());
|
expectUnauthorized("cluster:monitor/main", user, () -> es(userProperties(user)).getMetaData().getDatabaseMinorVersion());
|
||||||
expectUnauthorized("cluster:monitor/main", user,
|
|
||||||
() -> es(userProperties(user)).createStatement().executeQuery("SELECT * FROM test"));
|
// by moving to field caps these calls do not require the monitor permission
|
||||||
expectUnauthorized("cluster:monitor/main", user,
|
// expectUnauthorized("cluster:monitor/main", user,
|
||||||
() -> es(userProperties(user)).createStatement().executeQuery("SHOW TABLES LIKE 'test'"));
|
// () -> es(userProperties(user)).createStatement().executeQuery("SELECT * FROM test"));
|
||||||
expectUnauthorized("cluster:monitor/main", user,
|
// expectUnauthorized("cluster:monitor/main", user,
|
||||||
() -> es(userProperties(user)).createStatement().executeQuery("DESCRIBE test"));
|
// () -> es(userProperties(user)).createStatement().executeQuery("SHOW TABLES LIKE 'test'"));
|
||||||
|
// expectUnauthorized("cluster:monitor/main", user,
|
||||||
|
// () -> es(userProperties(user)).createStatement().executeQuery("DESCRIBE test"));
|
||||||
}
|
}
|
||||||
|
|
||||||
private void expectUnauthorized(String action, String user, ThrowingRunnable r) {
|
private void expectUnauthorized(String action, String user, ThrowingRunnable r) {
|
||||||
|
|
|
@ -107,6 +107,7 @@ x-pack_plugin_sql_qa_single-node_integTestCluster |null |test_emp
|
||||||
x-pack_plugin_sql_qa_single-node_integTestCluster |null |test_emp |salary |4 |INTEGER |11 |4 |null |10 |1 |null |null |4 |0 |null |11 |YES |null |null |null |null |NO |NO
|
x-pack_plugin_sql_qa_single-node_integTestCluster |null |test_emp |salary |4 |INTEGER |11 |4 |null |10 |1 |null |null |4 |0 |null |11 |YES |null |null |null |null |NO |NO
|
||||||
x-pack_plugin_sql_qa_single-node_integTestCluster |null |test_emp_copy |birth_date |93 |DATETIME |29 |8 |null |null |1 |null |null |9 |3 |null |1 |YES |null |null |null |null |NO |NO
|
x-pack_plugin_sql_qa_single-node_integTestCluster |null |test_emp_copy |birth_date |93 |DATETIME |29 |8 |null |null |1 |null |null |9 |3 |null |1 |YES |null |null |null |null |NO |NO
|
||||||
x-pack_plugin_sql_qa_single-node_integTestCluster |null |test_emp_copy |emp_no |4 |INTEGER |11 |4 |null |10 |1 |null |null |4 |0 |null |3 |YES |null |null |null |null |NO |NO
|
x-pack_plugin_sql_qa_single-node_integTestCluster |null |test_emp_copy |emp_no |4 |INTEGER |11 |4 |null |10 |1 |null |null |4 |0 |null |3 |YES |null |null |null |null |NO |NO
|
||||||
|
x-pack_plugin_sql_qa_single-node_integTestCluster |null |test_emp_copy |extra.info.gender |12 |KEYWORD |32766 |2147483647 |null |null |1 |null |null |12 |0 |2147483647 |6 |YES |null |null |null |null |NO |NO
|
||||||
x-pack_plugin_sql_qa_single-node_integTestCluster |null |test_emp_copy |extra_gender |12 |KEYWORD |32766 |2147483647 |null |null |1 |null |null |12 |0 |2147483647 |7 |YES |null |null |null |null |NO |NO
|
x-pack_plugin_sql_qa_single-node_integTestCluster |null |test_emp_copy |extra_gender |12 |KEYWORD |32766 |2147483647 |null |null |1 |null |null |12 |0 |2147483647 |7 |YES |null |null |null |null |NO |NO
|
||||||
x-pack_plugin_sql_qa_single-node_integTestCluster |null |test_emp_copy |extra_no |4 |INTEGER |11 |4 |null |10 |1 |null |null |4 |0 |null |8 |YES |null |null |null |null |NO |NO
|
x-pack_plugin_sql_qa_single-node_integTestCluster |null |test_emp_copy |extra_no |4 |INTEGER |11 |4 |null |10 |1 |null |null |4 |0 |null |8 |YES |null |null |null |null |NO |NO
|
||||||
x-pack_plugin_sql_qa_single-node_integTestCluster |null |test_emp_copy |first_name |12 |TEXT |2147483647 |2147483647 |null |null |1 |null |null |12 |0 |2147483647 |9 |YES |null |null |null |null |NO |NO
|
x-pack_plugin_sql_qa_single-node_integTestCluster |null |test_emp_copy |first_name |12 |TEXT |2147483647 |2147483647 |null |null |1 |null |null |12 |0 |2147483647 |9 |YES |null |null |null |null |NO |NO
|
||||||
|
|
|
@ -6,7 +6,6 @@
|
||||||
package org.elasticsearch.xpack.sql.analysis.index;
|
package org.elasticsearch.xpack.sql.analysis.index;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
|
||||||
|
|
||||||
import org.elasticsearch.ElasticsearchSecurityException;
|
import org.elasticsearch.ElasticsearchSecurityException;
|
||||||
import org.elasticsearch.action.ActionListener;
|
import org.elasticsearch.action.ActionListener;
|
||||||
|
@ -22,17 +21,15 @@ import org.elasticsearch.action.support.IndicesOptions.Option;
|
||||||
import org.elasticsearch.action.support.IndicesOptions.WildcardStates;
|
import org.elasticsearch.action.support.IndicesOptions.WildcardStates;
|
||||||
import org.elasticsearch.client.Client;
|
import org.elasticsearch.client.Client;
|
||||||
import org.elasticsearch.cluster.metadata.AliasMetaData;
|
import org.elasticsearch.cluster.metadata.AliasMetaData;
|
||||||
import org.elasticsearch.cluster.metadata.MappingMetaData;
|
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
|
||||||
import org.elasticsearch.index.IndexNotFoundException;
|
import org.elasticsearch.index.IndexNotFoundException;
|
||||||
|
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
|
||||||
import org.elasticsearch.xpack.sql.type.DataType;
|
import org.elasticsearch.xpack.sql.type.DataType;
|
||||||
import org.elasticsearch.xpack.sql.type.DateEsField;
|
import org.elasticsearch.xpack.sql.type.DateEsField;
|
||||||
import org.elasticsearch.xpack.sql.type.EsField;
|
import org.elasticsearch.xpack.sql.type.EsField;
|
||||||
import org.elasticsearch.xpack.sql.type.InvalidMappedField;
|
import org.elasticsearch.xpack.sql.type.InvalidMappedField;
|
||||||
import org.elasticsearch.xpack.sql.type.KeywordEsField;
|
import org.elasticsearch.xpack.sql.type.KeywordEsField;
|
||||||
import org.elasticsearch.xpack.sql.type.TextEsField;
|
import org.elasticsearch.xpack.sql.type.TextEsField;
|
||||||
import org.elasticsearch.xpack.sql.type.Types;
|
|
||||||
import org.elasticsearch.xpack.sql.type.UnsupportedEsField;
|
import org.elasticsearch.xpack.sql.type.UnsupportedEsField;
|
||||||
import org.elasticsearch.xpack.sql.util.CollectionUtils;
|
import org.elasticsearch.xpack.sql.util.CollectionUtils;
|
||||||
|
|
||||||
|
@ -41,20 +38,25 @@ import java.util.Arrays;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.Comparator;
|
import java.util.Comparator;
|
||||||
import java.util.EnumSet;
|
import java.util.EnumSet;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.Iterator;
|
||||||
import java.util.LinkedHashMap;
|
import java.util.LinkedHashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Locale;
|
import java.util.Locale;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Map.Entry;
|
import java.util.Map.Entry;
|
||||||
import java.util.NavigableSet;
|
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.TreeMap;
|
import java.util.TreeMap;
|
||||||
import java.util.TreeSet;
|
import java.util.TreeSet;
|
||||||
|
import java.util.function.BiFunction;
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
|
import static java.util.Arrays.asList;
|
||||||
|
import static java.util.Collections.emptyList;
|
||||||
import static java.util.Collections.emptyMap;
|
import static java.util.Collections.emptyMap;
|
||||||
|
import static java.util.Collections.emptySet;
|
||||||
|
|
||||||
public class IndexResolver {
|
public class IndexResolver {
|
||||||
|
|
||||||
|
@ -136,6 +138,7 @@ public class IndexResolver {
|
||||||
private static final IndicesOptions INDICES_ONLY_OPTIONS = new IndicesOptions(
|
private static final IndicesOptions INDICES_ONLY_OPTIONS = new IndicesOptions(
|
||||||
EnumSet.of(Option.ALLOW_NO_INDICES, Option.IGNORE_UNAVAILABLE, Option.IGNORE_ALIASES), EnumSet.of(WildcardStates.OPEN));
|
EnumSet.of(Option.ALLOW_NO_INDICES, Option.IGNORE_UNAVAILABLE, Option.IGNORE_ALIASES), EnumSet.of(WildcardStates.OPEN));
|
||||||
private static final List<String> FIELD_NAMES_BLACKLIST = Arrays.asList("_size");
|
private static final List<String> FIELD_NAMES_BLACKLIST = Arrays.asList("_size");
|
||||||
|
private static final String UNMAPPED = "unmapped";
|
||||||
|
|
||||||
private final Client client;
|
private final Client client;
|
||||||
private final String clusterName;
|
private final String clusterName;
|
||||||
|
@ -242,103 +245,82 @@ public class IndexResolver {
|
||||||
public void resolveAsMergedMapping(String indexWildcard, String javaRegex, ActionListener<IndexResolution> listener) {
|
public void resolveAsMergedMapping(String indexWildcard, String javaRegex, ActionListener<IndexResolution> listener) {
|
||||||
FieldCapabilitiesRequest fieldRequest = createFieldCapsRequest(indexWildcard);
|
FieldCapabilitiesRequest fieldRequest = createFieldCapsRequest(indexWildcard);
|
||||||
client.fieldCaps(fieldRequest,
|
client.fieldCaps(fieldRequest,
|
||||||
ActionListener.wrap(response -> listener.onResponse(mergedMapping(indexWildcard, response.get())), listener::onFailure));
|
ActionListener.wrap(
|
||||||
|
response -> listener.onResponse(mergedMappings(indexWildcard, response.getIndices(), response.get())),
|
||||||
|
listener::onFailure));
|
||||||
}
|
}
|
||||||
|
|
||||||
static IndexResolution mergedMapping(String indexPattern, Map<String, Map<String, FieldCapabilities>> fieldCaps) {
|
static IndexResolution mergedMappings(String indexPattern, String[] indexNames, Map<String, Map<String, FieldCapabilities>> fieldCaps) {
|
||||||
if (fieldCaps == null || fieldCaps.isEmpty()) {
|
if (fieldCaps == null || fieldCaps.isEmpty()) {
|
||||||
return IndexResolution.notFound(indexPattern);
|
return IndexResolution.notFound(indexPattern);
|
||||||
}
|
}
|
||||||
|
|
||||||
StringBuilder errorMessage = new StringBuilder();
|
// merge all indices onto the same one
|
||||||
|
List<EsIndex> indices = buildIndices(indexNames, null, fieldCaps, i -> indexPattern, (n, types) -> {
|
||||||
|
StringBuilder errorMessage = new StringBuilder();
|
||||||
|
|
||||||
NavigableSet<Entry<String, Map<String, FieldCapabilities>>> sortedFields = new TreeSet<>(
|
boolean hasUnmapped = types.containsKey(UNMAPPED);
|
||||||
// for some reason .reversed doesn't work (prolly due to inference)
|
|
||||||
Collections.reverseOrder(Comparator.comparing(Entry::getKey)));
|
|
||||||
sortedFields.addAll(fieldCaps.entrySet());
|
|
||||||
|
|
||||||
Map<String, EsField> hierarchicalMapping = new TreeMap<>();
|
if (types.size() > (hasUnmapped ? 2 : 1)) {
|
||||||
Map<String, EsField> flattedMapping = new LinkedHashMap<>();
|
// build the error message
|
||||||
|
// and create a MultiTypeField
|
||||||
// sort keys descending in order to easily detect multi-fields (a.b.c multi-field of a.b)
|
|
||||||
// without sorting, they can still be detected however without the emptyMap optimization
|
|
||||||
// (fields without multi-fields have no children)
|
|
||||||
for (Entry<String, Map<String, FieldCapabilities>> entry : sortedFields) {
|
|
||||||
|
|
||||||
InvalidMappedField invalidField = null;
|
for (Entry<String, FieldCapabilities> type : types.entrySet()) {
|
||||||
FieldCapabilities fieldCap = null;
|
// skip unmapped
|
||||||
errorMessage.setLength(0);
|
if (UNMAPPED.equals(type.getKey())) {
|
||||||
|
|
||||||
String name = entry.getKey();
|
|
||||||
|
|
||||||
// Skip any of the blacklisted field names.
|
|
||||||
if (!FIELD_NAMES_BLACKLIST.contains(name)) {
|
|
||||||
Map<String, FieldCapabilities> types = entry.getValue();
|
|
||||||
// field is mapped differently across indices
|
|
||||||
if (types.size() > 1) {
|
|
||||||
// build the error message
|
|
||||||
// and create a MultiTypeField
|
|
||||||
|
|
||||||
for (Entry<String, FieldCapabilities> type : types.entrySet()) {
|
|
||||||
if (errorMessage.length() > 0) {
|
|
||||||
errorMessage.append(", ");
|
|
||||||
}
|
|
||||||
errorMessage.append("[");
|
|
||||||
errorMessage.append(type.getKey());
|
|
||||||
errorMessage.append("] in ");
|
|
||||||
errorMessage.append(Arrays.toString(type.getValue().indices()));
|
|
||||||
}
|
|
||||||
|
|
||||||
errorMessage.insert(0, "mapped as [" + types.size() + "] incompatible types: ");
|
|
||||||
|
|
||||||
invalidField = new InvalidMappedField(name, errorMessage.toString());
|
|
||||||
}
|
|
||||||
// type is okay, check aggregation
|
|
||||||
else {
|
|
||||||
fieldCap = types.values().iterator().next();
|
|
||||||
|
|
||||||
// Skip internal fields (name starting with underscore and its type reported by field_caps starts with underscore
|
|
||||||
// as well). A meta field named "_version", for example, has the type named "_version".
|
|
||||||
if (name.startsWith("_") && fieldCap.getType().startsWith("_")) {
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
// validate search/agg-able
|
|
||||||
if (fieldCap.isAggregatable() && fieldCap.nonAggregatableIndices() != null) {
|
|
||||||
errorMessage.append("mapped as aggregatable except in ");
|
|
||||||
errorMessage.append(Arrays.toString(fieldCap.nonAggregatableIndices()));
|
|
||||||
}
|
|
||||||
if (fieldCap.isSearchable() && fieldCap.nonSearchableIndices() != null) {
|
|
||||||
if (errorMessage.length() > 0) {
|
|
||||||
errorMessage.append(",");
|
|
||||||
}
|
|
||||||
errorMessage.append("mapped as searchable except in ");
|
|
||||||
errorMessage.append(Arrays.toString(fieldCap.nonSearchableIndices()));
|
|
||||||
}
|
|
||||||
|
|
||||||
if (errorMessage.length() > 0) {
|
if (errorMessage.length() > 0) {
|
||||||
invalidField = new InvalidMappedField(name, errorMessage.toString());
|
errorMessage.append(", ");
|
||||||
}
|
}
|
||||||
|
errorMessage.append("[");
|
||||||
|
errorMessage.append(type.getKey());
|
||||||
|
errorMessage.append("] in ");
|
||||||
|
errorMessage.append(Arrays.toString(type.getValue().indices()));
|
||||||
}
|
}
|
||||||
|
|
||||||
// validation passes - create the field
|
errorMessage.insert(0, "mapped as [" + (types.size() - (hasUnmapped ? 1 : 0)) + "] incompatible types: ");
|
||||||
// if the name wasn't added before
|
|
||||||
final InvalidMappedField invalidF = invalidField;
|
return new InvalidMappedField(n, errorMessage.toString());
|
||||||
final FieldCapabilities fieldCapab = fieldCap;
|
}
|
||||||
|
// type is okay, check aggregation
|
||||||
EsField esField = flattedMapping.get(name);
|
else {
|
||||||
if (esField == null || (invalidF != null && (esField instanceof InvalidMappedField) == false)) {
|
FieldCapabilities fieldCap = types.values().iterator().next();
|
||||||
createField(name, fieldCaps, hierarchicalMapping, flattedMapping, s -> {
|
|
||||||
return invalidF != null ? invalidF : createField(s, fieldCapab.getType(), emptyMap(), fieldCapab.isAggregatable());
|
// validate search/agg-able
|
||||||
});
|
if (fieldCap.isAggregatable() && fieldCap.nonAggregatableIndices() != null) {
|
||||||
|
errorMessage.append("mapped as aggregatable except in ");
|
||||||
|
errorMessage.append(Arrays.toString(fieldCap.nonAggregatableIndices()));
|
||||||
|
}
|
||||||
|
if (fieldCap.isSearchable() && fieldCap.nonSearchableIndices() != null) {
|
||||||
|
if (errorMessage.length() > 0) {
|
||||||
|
errorMessage.append(",");
|
||||||
|
}
|
||||||
|
errorMessage.append("mapped as searchable except in ");
|
||||||
|
errorMessage.append(Arrays.toString(fieldCap.nonSearchableIndices()));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (errorMessage.length() > 0) {
|
||||||
|
return new InvalidMappedField(n, errorMessage.toString());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// everything checks
|
||||||
|
return null;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (indices.size() != 1) {
|
||||||
|
throw new SqlIllegalArgumentException("Incorrect merging of mappings (likely due to a bug) - expect 1 but found [{}]",
|
||||||
|
indices.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
return IndexResolution.valid(new EsIndex(indexPattern, hierarchicalMapping));
|
return IndexResolution.valid(indices.get(0));
|
||||||
}
|
}
|
||||||
|
|
||||||
private static EsField createField(String fieldName, Map<String, Map<String, FieldCapabilities>> globalCaps,
|
private static EsField createField(String fieldName, Map<String, Map<String, FieldCapabilities>> globalCaps,
|
||||||
Map<String, EsField> hierarchicalMapping, Map<String, EsField> flattedMapping,
|
Map<String, EsField> hierarchicalMapping,
|
||||||
|
Map<String, EsField> flattedMapping,
|
||||||
Function<String, EsField> field) {
|
Function<String, EsField> field) {
|
||||||
|
|
||||||
Map<String, EsField> parentProps = hierarchicalMapping;
|
Map<String, EsField> parentProps = hierarchicalMapping;
|
||||||
|
@ -359,17 +341,22 @@ public class IndexResolver {
|
||||||
// as such, create the field manually
|
// as such, create the field manually
|
||||||
fieldFunction = s -> createField(s, DataType.OBJECT.name(), new TreeMap<>(), false);
|
fieldFunction = s -> createField(s, DataType.OBJECT.name(), new TreeMap<>(), false);
|
||||||
} else {
|
} else {
|
||||||
FieldCapabilities parentCap = map.values().iterator().next();
|
Iterator<FieldCapabilities> iterator = map.values().iterator();
|
||||||
fieldFunction = s -> createField(s, parentCap.getType(), new TreeMap<>(), parentCap.isAggregatable());
|
FieldCapabilities parentCap = iterator.next();
|
||||||
|
if (iterator.hasNext() && UNMAPPED.equals(parentCap.getType())) {
|
||||||
|
parentCap = iterator.next();
|
||||||
|
}
|
||||||
|
final FieldCapabilities parentC = parentCap;
|
||||||
|
fieldFunction = s -> createField(s, parentC.getType(), new TreeMap<>(), parentC.isAggregatable());
|
||||||
}
|
}
|
||||||
|
|
||||||
parent = createField(parentName, globalCaps, hierarchicalMapping, flattedMapping, fieldFunction);
|
parent = createField(parentName, globalCaps, hierarchicalMapping, flattedMapping, fieldFunction);
|
||||||
}
|
}
|
||||||
parentProps = parent.getProperties();
|
parentProps = parent.getProperties();
|
||||||
}
|
}
|
||||||
|
|
||||||
EsField esField = field.apply(fieldName);
|
EsField esField = field.apply(fieldName);
|
||||||
|
|
||||||
parentProps.put(fieldName, esField);
|
parentProps.put(fieldName, esField);
|
||||||
flattedMapping.put(fullFieldName, esField);
|
flattedMapping.put(fullFieldName, esField);
|
||||||
|
|
||||||
|
@ -394,108 +381,133 @@ public class IndexResolver {
|
||||||
return new EsField(fieldName, esType, props, isAggregateable);
|
return new EsField(fieldName, esType, props, isAggregateable);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static FieldCapabilitiesRequest createFieldCapsRequest(String index) {
|
|
||||||
return new FieldCapabilitiesRequest()
|
|
||||||
.indices(Strings.commaDelimitedListToStringArray(index))
|
|
||||||
.fields("*")
|
|
||||||
//lenient because we throw our own errors looking at the response e.g. if something was not resolved
|
|
||||||
//also because this way security doesn't throw authorization exceptions but rather honors ignore_unavailable
|
|
||||||
.indicesOptions(IndicesOptions.lenientExpandOpen());
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: Concrete indices still uses get mapping
|
|
||||||
// waiting on https://github.com/elastic/elasticsearch/pull/34071
|
|
||||||
//
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Resolves a pattern to multiple, separate indices. Doesn't perform validation.
|
* Resolves a pattern to multiple, separate indices. Doesn't perform validation.
|
||||||
*/
|
*/
|
||||||
public void resolveAsSeparateMappings(String indexWildcard, String javaRegex, ActionListener<List<EsIndex>> listener) {
|
public void resolveAsSeparateMappings(String indexWildcard, String javaRegex, ActionListener<List<EsIndex>> listener) {
|
||||||
GetIndexRequest getIndexRequest = createGetIndexRequest(indexWildcard);
|
FieldCapabilitiesRequest fieldRequest = createFieldCapsRequest(indexWildcard);
|
||||||
client.admin().indices().getIndex(getIndexRequest, ActionListener.wrap(getIndexResponse -> {
|
client.fieldCaps(fieldRequest,
|
||||||
ImmutableOpenMap<String, ImmutableOpenMap<String, MappingMetaData>> mappings = getIndexResponse.getMappings();
|
ActionListener.wrap(
|
||||||
ImmutableOpenMap<String, List<AliasMetaData>> aliases = getIndexResponse.getAliases();
|
response -> listener.onResponse(separateMappings(indexWildcard, javaRegex, response.getIndices(), response.get())),
|
||||||
|
listener::onFailure));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
static List<EsIndex> separateMappings(String indexPattern, String javaRegex, String[] indexNames,
|
||||||
|
Map<String, Map<String, FieldCapabilities>> fieldCaps) {
|
||||||
|
return buildIndices(indexNames, javaRegex, fieldCaps, Function.identity(), (s, cap) -> null);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static class Fields {
|
||||||
|
final Map<String, EsField> hierarchicalMapping = new TreeMap<>();
|
||||||
|
final Map<String, EsField> flattedMapping = new LinkedHashMap<>();
|
||||||
|
}
|
||||||
|
|
||||||
List<EsIndex> results = new ArrayList<>(mappings.size());
|
/**
|
||||||
Pattern pattern = javaRegex != null ? Pattern.compile(javaRegex) : null;
|
* Assemble an index-based mapping from the field caps (which is field based) by looking at the indices associated with
|
||||||
for (ObjectObjectCursor<String, ImmutableOpenMap<String, MappingMetaData>> indexMappings : mappings) {
|
* each field.
|
||||||
/*
|
*/
|
||||||
* We support wildcard expressions here, and it's only for commands that only perform the get index call.
|
private static List<EsIndex> buildIndices(String[] indexNames, String javaRegex, Map<String, Map<String, FieldCapabilities>> fieldCaps,
|
||||||
* We can and simply have to use the concrete index name and show that to users.
|
Function<String, String> indexNameProcessor,
|
||||||
* Get index against an alias with security enabled, where the user has only access to get mappings for the alias
|
BiFunction<String, Map<String, FieldCapabilities>, InvalidMappedField> validityVerifier) {
|
||||||
* and not the concrete index: there is a well known information leak of the concrete index name in the response.
|
|
||||||
*/
|
|
||||||
String concreteIndex = indexMappings.key;
|
|
||||||
|
|
||||||
// take into account aliases
|
if (indexNames == null || indexNames.length == 0) {
|
||||||
List<AliasMetaData> aliasMetadata = aliases.get(concreteIndex);
|
return emptyList();
|
||||||
boolean matchesAlias = false;
|
}
|
||||||
if (pattern != null && aliasMetadata != null) {
|
|
||||||
for (AliasMetaData aliasMeta : aliasMetadata) {
|
final List<String> resolvedIndices = asList(indexNames);
|
||||||
if (pattern.matcher(aliasMeta.alias()).matches()) {
|
Map<String, Fields> indices = new LinkedHashMap<>(resolvedIndices.size());
|
||||||
matchesAlias = true;
|
Pattern pattern = javaRegex != null ? Pattern.compile(javaRegex) : null;
|
||||||
break;
|
|
||||||
|
// sort fields in reverse order to build the field hierarchy
|
||||||
|
Set<Entry<String, Map<String, FieldCapabilities>>> sortedFields = new TreeSet<>(
|
||||||
|
Collections.reverseOrder(Comparator.comparing(Entry::getKey)));
|
||||||
|
|
||||||
|
sortedFields.addAll(fieldCaps.entrySet());
|
||||||
|
|
||||||
|
for (Entry<String, Map<String, FieldCapabilities>> entry : sortedFields) {
|
||||||
|
String fieldName = entry.getKey();
|
||||||
|
Map<String, FieldCapabilities> types = entry.getValue();
|
||||||
|
|
||||||
|
// ignore size added by the mapper plugin
|
||||||
|
if (FIELD_NAMES_BLACKLIST.contains(fieldName)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// apply verification
|
||||||
|
final InvalidMappedField invalidField = validityVerifier.apply(fieldName, types);
|
||||||
|
|
||||||
|
// filter meta fields and unmapped
|
||||||
|
FieldCapabilities unmapped = types.get(UNMAPPED);
|
||||||
|
Set<String> unmappedIndices = unmapped != null ? new HashSet<>(asList(unmapped.indices())) : emptySet();
|
||||||
|
|
||||||
|
// check each type
|
||||||
|
for (Entry<String, FieldCapabilities> typeEntry : types.entrySet()) {
|
||||||
|
FieldCapabilities typeCap = typeEntry.getValue();
|
||||||
|
String[] capIndices = typeCap.indices();
|
||||||
|
|
||||||
|
// Skip internal fields (name starting with underscore and its type reported by field_caps starts
|
||||||
|
// with underscore as well). A meta field named "_version", for example, has the type named "_version".
|
||||||
|
if (typeEntry.getKey().startsWith("_") && typeCap.getType().startsWith("_")) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// compute the actual indices - if any are specified, take into account the unmapped indices
|
||||||
|
List<String> concreteIndices = null;
|
||||||
|
if (capIndices != null) {
|
||||||
|
if (unmappedIndices.isEmpty() == true) {
|
||||||
|
concreteIndices = asList(capIndices);
|
||||||
|
} else {
|
||||||
|
concreteIndices = new ArrayList<>(capIndices.length - unmappedIndices.size() + 1);
|
||||||
|
for (String capIndex : capIndices) {
|
||||||
|
// add only indices that have a mapping
|
||||||
|
if (unmappedIndices.contains(capIndex) == false) {
|
||||||
|
concreteIndices.add(capIndex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
concreteIndices = resolvedIndices;
|
||||||
|
}
|
||||||
|
|
||||||
|
// put the field in their respective mappings
|
||||||
|
for (String index : concreteIndices) {
|
||||||
|
if (pattern == null || pattern.matcher(index).matches()) {
|
||||||
|
String indexName = indexNameProcessor.apply(index);
|
||||||
|
Fields indexFields = indices.get(indexName);
|
||||||
|
if (indexFields == null) {
|
||||||
|
indexFields = new Fields();
|
||||||
|
indices.put(indexName, indexFields);
|
||||||
|
}
|
||||||
|
EsField field = indexFields.flattedMapping.get(fieldName);
|
||||||
|
if (field == null || (invalidField != null && (field instanceof InvalidMappedField) == false)) {
|
||||||
|
createField(fieldName, fieldCaps, indexFields.hierarchicalMapping, indexFields.flattedMapping, s ->
|
||||||
|
invalidField != null ? invalidField :
|
||||||
|
createField(s, typeCap.getType(), emptyMap(), typeCap.isAggregatable()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (pattern == null || matchesAlias || pattern.matcher(concreteIndex).matches()) {
|
|
||||||
IndexResolution getIndexResult = buildGetIndexResult(concreteIndex, concreteIndex, indexMappings.value);
|
|
||||||
if (getIndexResult.isValid()) {
|
|
||||||
results.add(getIndexResult.get());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
results.sort(Comparator.comparing(EsIndex::name));
|
}
|
||||||
listener.onResponse(results);
|
|
||||||
}, listener::onFailure));
|
// return indices in ascending order
|
||||||
|
List<EsIndex> foundIndices = new ArrayList<>(indices.size());
|
||||||
|
for (Entry<String, Fields> entry : indices.entrySet()) {
|
||||||
|
foundIndices.add(new EsIndex(entry.getKey(), entry.getValue().hierarchicalMapping));
|
||||||
|
}
|
||||||
|
foundIndices.sort(Comparator.comparing(EsIndex::name));
|
||||||
|
return foundIndices;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static GetIndexRequest createGetIndexRequest(String index) {
|
private static FieldCapabilitiesRequest createFieldCapsRequest(String index) {
|
||||||
return new GetIndexRequest()
|
return new FieldCapabilitiesRequest()
|
||||||
.local(true)
|
|
||||||
.indices(Strings.commaDelimitedListToStringArray(index))
|
.indices(Strings.commaDelimitedListToStringArray(index))
|
||||||
|
.fields("*")
|
||||||
|
.includeUnmapped(true)
|
||||||
//lenient because we throw our own errors looking at the response e.g. if something was not resolved
|
//lenient because we throw our own errors looking at the response e.g. if something was not resolved
|
||||||
//also because this way security doesn't throw authorization exceptions but rather honours ignore_unavailable
|
//also because this way security doesn't throw authorization exceptions but rather honors ignore_unavailable
|
||||||
.indicesOptions(IndicesOptions.lenientExpandOpen());
|
.indicesOptions(IndicesOptions.lenientExpandOpen());
|
||||||
}
|
}
|
||||||
|
}
|
||||||
private static IndexResolution buildGetIndexResult(String concreteIndex, String indexOrAlias,
|
|
||||||
ImmutableOpenMap<String, MappingMetaData> mappings) {
|
|
||||||
|
|
||||||
// Make sure that the index contains only a single type
|
|
||||||
MappingMetaData singleType = null;
|
|
||||||
List<String> typeNames = null;
|
|
||||||
for (ObjectObjectCursor<String, MappingMetaData> type : mappings) {
|
|
||||||
//Default mappings are ignored as they are applied to each type. Each type alone holds all of its fields.
|
|
||||||
if ("_default_".equals(type.key)) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (singleType != null) {
|
|
||||||
// There are more than one types
|
|
||||||
if (typeNames == null) {
|
|
||||||
typeNames = new ArrayList<>();
|
|
||||||
typeNames.add(singleType.type());
|
|
||||||
}
|
|
||||||
typeNames.add(type.key);
|
|
||||||
}
|
|
||||||
singleType = type.value;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (singleType == null) {
|
|
||||||
return IndexResolution.invalid("[" + indexOrAlias + "] doesn't have any types so it is incompatible with sql");
|
|
||||||
} else if (typeNames != null) {
|
|
||||||
Collections.sort(typeNames);
|
|
||||||
return IndexResolution.invalid(
|
|
||||||
"[" + indexOrAlias + "] contains more than one type " + typeNames + " so it is incompatible with sql");
|
|
||||||
} else {
|
|
||||||
try {
|
|
||||||
Map<String, EsField> mapping = Types.fromEs(singleType.sourceAsMap());
|
|
||||||
return IndexResolution.valid(new EsIndex(indexOrAlias, mapping));
|
|
||||||
} catch (MappingException ex) {
|
|
||||||
return IndexResolution.invalid(ex.getMessage());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -116,7 +116,7 @@ public class SysColumns extends Command {
|
||||||
|
|
||||||
Pattern columnMatcher = columnPattern != null ? Pattern.compile(columnPattern.asJavaRegex()) : null;
|
Pattern columnMatcher = columnPattern != null ? Pattern.compile(columnPattern.asJavaRegex()) : null;
|
||||||
|
|
||||||
// special case fo '%' (translated to *)
|
// special case for '%' (translated to *)
|
||||||
if ("*".equals(idx)) {
|
if ("*".equals(idx)) {
|
||||||
session.indexResolver().resolveAsSeparateMappings(idx, regex, ActionListener.wrap(esIndices -> {
|
session.indexResolver().resolveAsSeparateMappings(idx, regex, ActionListener.wrap(esIndices -> {
|
||||||
List<List<?>> rows = new ArrayList<>();
|
List<List<?>> rows = new ArrayList<>();
|
||||||
|
|
|
@ -17,6 +17,7 @@ import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Map.Entry;
|
import java.util.Map.Entry;
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
import static org.elasticsearch.common.logging.LoggerMessageFormat.format;
|
import static org.elasticsearch.common.logging.LoggerMessageFormat.format;
|
||||||
|
|
||||||
|
@ -28,11 +29,7 @@ public class IndexResolverTests extends ESTestCase {
|
||||||
assertNotSame(oneMapping, sameMapping);
|
assertNotSame(oneMapping, sameMapping);
|
||||||
assertEquals(oneMapping, sameMapping);
|
assertEquals(oneMapping, sameMapping);
|
||||||
|
|
||||||
String wildcard = "*";
|
IndexResolution resolution = merge(new EsIndex("a", oneMapping), new EsIndex("b", sameMapping));
|
||||||
|
|
||||||
IndexResolution resolution = IndexResolver.mergedMapping(wildcard, fromMappings(
|
|
||||||
new EsIndex("a", oneMapping),
|
|
||||||
new EsIndex("b", sameMapping)));
|
|
||||||
|
|
||||||
assertTrue(resolution.isValid());
|
assertTrue(resolution.isValid());
|
||||||
assertEqualsMaps(oneMapping, resolution.get().mapping());
|
assertEqualsMaps(oneMapping, resolution.get().mapping());
|
||||||
|
@ -44,10 +41,7 @@ public class IndexResolverTests extends ESTestCase {
|
||||||
|
|
||||||
assertNotEquals(basicMapping, numericMapping);
|
assertNotEquals(basicMapping, numericMapping);
|
||||||
|
|
||||||
String wildcard = "*";
|
IndexResolution resolution = merge(new EsIndex("basic", basicMapping), new EsIndex("numeric", numericMapping));
|
||||||
IndexResolution resolution = IndexResolver.mergedMapping(wildcard, fromMappings(
|
|
||||||
new EsIndex("basic", basicMapping),
|
|
||||||
new EsIndex("numeric", numericMapping)));
|
|
||||||
|
|
||||||
assertTrue(resolution.isValid());
|
assertTrue(resolution.isValid());
|
||||||
assertEquals(basicMapping.size() + numericMapping.size(), resolution.get().mapping().size());
|
assertEquals(basicMapping.size() + numericMapping.size(), resolution.get().mapping().size());
|
||||||
|
@ -60,8 +54,7 @@ public class IndexResolverTests extends ESTestCase {
|
||||||
assertNotEquals(basicMapping, incompatible);
|
assertNotEquals(basicMapping, incompatible);
|
||||||
|
|
||||||
String wildcard = "*";
|
String wildcard = "*";
|
||||||
IndexResolution resolution = IndexResolver.mergedMapping(wildcard,
|
IndexResolution resolution = merge(new EsIndex("basic", basicMapping), new EsIndex("incompatible", incompatible));
|
||||||
fromMappings(new EsIndex("basic", basicMapping), new EsIndex("incompatible", incompatible)));
|
|
||||||
|
|
||||||
assertTrue(resolution.isValid());
|
assertTrue(resolution.isValid());
|
||||||
|
|
||||||
|
@ -82,8 +75,7 @@ public class IndexResolverTests extends ESTestCase {
|
||||||
assertNotEquals(basicMapping, incompatible);
|
assertNotEquals(basicMapping, incompatible);
|
||||||
|
|
||||||
String wildcard = "*";
|
String wildcard = "*";
|
||||||
IndexResolution resolution = IndexResolver.mergedMapping(wildcard,
|
IndexResolution resolution = merge(new EsIndex("basic", basicMapping), new EsIndex("incompatible", incompatible));
|
||||||
fromMappings(new EsIndex("basic", basicMapping), new EsIndex("incompatible", incompatible)));
|
|
||||||
|
|
||||||
assertTrue(resolution.isValid());
|
assertTrue(resolution.isValid());
|
||||||
|
|
||||||
|
@ -97,8 +89,7 @@ public class IndexResolverTests extends ESTestCase {
|
||||||
public void testMultiLevelObjectMappings() throws Exception {
|
public void testMultiLevelObjectMappings() throws Exception {
|
||||||
Map<String, EsField> dottedMapping = TypesTests.loadMapping("mapping-dotted-field.json", true);
|
Map<String, EsField> dottedMapping = TypesTests.loadMapping("mapping-dotted-field.json", true);
|
||||||
|
|
||||||
String wildcard = "*";
|
IndexResolution resolution = merge(new EsIndex("a", dottedMapping));
|
||||||
IndexResolution resolution = IndexResolver.mergedMapping(wildcard, fromMappings(new EsIndex("a", dottedMapping)));
|
|
||||||
|
|
||||||
assertTrue(resolution.isValid());
|
assertTrue(resolution.isValid());
|
||||||
assertEqualsMaps(dottedMapping, resolution.get().mapping());
|
assertEqualsMaps(dottedMapping, resolution.get().mapping());
|
||||||
|
@ -107,8 +98,7 @@ public class IndexResolverTests extends ESTestCase {
|
||||||
public void testMultiLevelNestedMappings() throws Exception {
|
public void testMultiLevelNestedMappings() throws Exception {
|
||||||
Map<String, EsField> nestedMapping = TypesTests.loadMapping("mapping-nested.json", true);
|
Map<String, EsField> nestedMapping = TypesTests.loadMapping("mapping-nested.json", true);
|
||||||
|
|
||||||
String wildcard = "*";
|
IndexResolution resolution = merge(new EsIndex("a", nestedMapping));
|
||||||
IndexResolution resolution = IndexResolver.mergedMapping(wildcard, fromMappings(new EsIndex("a", nestedMapping)));
|
|
||||||
|
|
||||||
assertTrue(resolution.isValid());
|
assertTrue(resolution.isValid());
|
||||||
assertEqualsMaps(nestedMapping, resolution.get().mapping());
|
assertEqualsMaps(nestedMapping, resolution.get().mapping());
|
||||||
|
@ -122,7 +112,7 @@ public class IndexResolverTests extends ESTestCase {
|
||||||
addFieldCaps(fieldCaps, "text", "keyword", true, true);
|
addFieldCaps(fieldCaps, "text", "keyword", true, true);
|
||||||
|
|
||||||
String wildcard = "*";
|
String wildcard = "*";
|
||||||
IndexResolution resolution = IndexResolver.mergedMapping(wildcard, fieldCaps);
|
IndexResolution resolution = IndexResolver.mergedMappings(wildcard, new String[] { "index" }, fieldCaps);
|
||||||
assertTrue(resolution.isValid());
|
assertTrue(resolution.isValid());
|
||||||
|
|
||||||
EsIndex esIndex = resolution.get();
|
EsIndex esIndex = resolution.get();
|
||||||
|
@ -157,7 +147,7 @@ public class IndexResolverTests extends ESTestCase {
|
||||||
|
|
||||||
|
|
||||||
String wildcard = "*";
|
String wildcard = "*";
|
||||||
IndexResolution resolution = IndexResolver.mergedMapping(wildcard, fieldCaps);
|
IndexResolution resolution = IndexResolver.mergedMappings(wildcard, new String[] { "one-index" }, fieldCaps);
|
||||||
|
|
||||||
assertTrue(resolution.isValid());
|
assertTrue(resolution.isValid());
|
||||||
|
|
||||||
|
@ -175,8 +165,40 @@ public class IndexResolverTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
public void testSeparateSameMappingDifferentIndices() throws Exception {
|
||||||
|
Map<String, EsField> oneMapping = TypesTests.loadMapping("mapping-basic.json", true);
|
||||||
|
Map<String, EsField> sameMapping = TypesTests.loadMapping("mapping-basic.json", true);
|
||||||
|
assertNotSame(oneMapping, sameMapping);
|
||||||
|
assertEquals(oneMapping, sameMapping);
|
||||||
|
|
||||||
|
List<EsIndex> indices = separate(new EsIndex("a", oneMapping), new EsIndex("b", sameMapping));
|
||||||
|
|
||||||
|
assertEquals(2, indices.size());
|
||||||
|
assertEqualsMaps(oneMapping, indices.get(0).mapping());
|
||||||
|
assertEqualsMaps(sameMapping, indices.get(1).mapping());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testSeparateIncompatibleTypes() throws Exception {
|
||||||
|
Map<String, EsField> basicMapping = TypesTests.loadMapping("mapping-basic.json", true);
|
||||||
|
Map<String, EsField> incompatible = TypesTests.loadMapping("mapping-basic-incompatible.json");
|
||||||
|
|
||||||
|
assertNotEquals(basicMapping, incompatible);
|
||||||
|
|
||||||
|
List<EsIndex> indices = separate(new EsIndex("basic", basicMapping), new EsIndex("incompatible", incompatible));
|
||||||
|
|
||||||
|
assertEquals(2, indices.size());
|
||||||
|
assertEqualsMaps(basicMapping, indices.get(0).mapping());
|
||||||
|
assertEqualsMaps(incompatible, indices.get(1).mapping());
|
||||||
|
}
|
||||||
|
|
||||||
public static IndexResolution merge(EsIndex... indices) {
|
public static IndexResolution merge(EsIndex... indices) {
|
||||||
return IndexResolver.mergedMapping("*", fromMappings(indices));
|
return IndexResolver.mergedMappings("*", Stream.of(indices).map(EsIndex::name).toArray(String[]::new), fromMappings(indices));
|
||||||
|
}
|
||||||
|
|
||||||
|
public static List<EsIndex> separate(EsIndex... indices) {
|
||||||
|
return IndexResolver.separateMappings("*", null, Stream.of(indices).map(EsIndex::name).toArray(String[]::new),
|
||||||
|
fromMappings(indices));
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Map<String, Map<String, FieldCapabilities>> fromMappings(EsIndex... indices) {
|
public static Map<String, Map<String, FieldCapabilities>> fromMappings(EsIndex... indices) {
|
||||||
|
@ -215,7 +237,7 @@ public class IndexResolverTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
FieldCapabilities caps = map.computeIfAbsent(field.getDataType().typeName,
|
FieldCapabilities caps = map.computeIfAbsent(field.getDataType().typeName,
|
||||||
esType -> new UpdateableFieldCapabilities(fieldName, esType,
|
esType -> new UpdateableFieldCapabilities(fieldName, esType,
|
||||||
isSearchable(field.getDataType()),
|
isSearchable(field.getDataType()),
|
||||||
isAggregatable(field.getDataType())));
|
isAggregatable(field.getDataType())));
|
||||||
|
|
||||||
if (!field.isAggregatable()) {
|
if (!field.isAggregatable()) {
|
||||||
|
|
Loading…
Reference in New Issue