SQL: Support pattern against compatible indices (#34718)

Extend querying support on multiple indices from being strictly
identical to being just compatible.
Use FieldCapabilities API (extended through #33803) for mapping merging.

Close #31837 #31611
This commit is contained in:
Costin Leau 2018-10-23 17:07:51 +03:00 committed by GitHub
parent 36baf3823d
commit ca6808e55d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
25 changed files with 733 additions and 354 deletions

View File

@ -34,6 +34,6 @@ indices:
["source","yaml",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{sql-tests}/security/roles.yml[cli_jdbc]
include-tagged::{sql-tests}/security/roles.yml[cli_drivers]
--------------------------------------------------

View File

@ -36,8 +36,8 @@ public enum DataType {
SCALED_FLOAT(JDBCType.FLOAT, Double.class, Double.BYTES, 19, 25, false, true, true),
KEYWORD( JDBCType.VARCHAR, String.class, Integer.MAX_VALUE, 256, 0),
TEXT( JDBCType.VARCHAR, String.class, Integer.MAX_VALUE, Integer.MAX_VALUE, 0, false, false, false),
OBJECT( JDBCType.STRUCT, null, -1, 0, 0),
NESTED( JDBCType.STRUCT, null, -1, 0, 0),
OBJECT( JDBCType.STRUCT, null, -1, 0, 0, false, false, false),
NESTED( JDBCType.STRUCT, null, -1, 0, 0, false, false, false),
BINARY( JDBCType.VARBINARY, byte[].class, -1, Integer.MAX_VALUE, 0),
// since ODBC and JDBC interpret precision for Date as display size,
// the precision is 23 (number of chars in ISO8601 with millis) + Z (the UTC timezone)
@ -223,7 +223,11 @@ public enum DataType {
* For any dataType DataType.fromEsType(dataType.esType) == dataType
*/
public static DataType fromEsType(String esType) {
return DataType.valueOf(esType.toUpperCase(Locale.ROOT));
try {
return DataType.valueOf(esType.toUpperCase(Locale.ROOT));
} catch (IllegalArgumentException ex) {
return DataType.UNSUPPORTED;
}
}
public boolean isCompatibleWith(DataType other) {

View File

@ -15,6 +15,8 @@ import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse;
import org.elasticsearch.action.admin.indices.get.GetIndexRequest;
import org.elasticsearch.action.admin.indices.get.GetIndexRequest.Feature;
import org.elasticsearch.action.admin.indices.get.GetIndexResponse;
import org.elasticsearch.action.fieldcaps.FieldCapabilities;
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.IndicesOptions.Option;
import org.elasticsearch.action.support.IndicesOptions.WildcardStates;
@ -24,23 +26,34 @@ import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
import org.elasticsearch.xpack.sql.type.DataType;
import org.elasticsearch.xpack.sql.type.DateEsField;
import org.elasticsearch.xpack.sql.type.EsField;
import org.elasticsearch.xpack.sql.type.KeywordEsField;
import org.elasticsearch.xpack.sql.type.TextEsField;
import org.elasticsearch.xpack.sql.type.Types;
import org.elasticsearch.xpack.sql.type.UnsupportedEsField;
import org.elasticsearch.xpack.sql.util.CollectionUtils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.EnumSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Map.Entry;
import java.util.NavigableSet;
import java.util.Objects;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.regex.Pattern;
import static java.util.Collections.emptyList;
import static java.util.Collections.emptyMap;
public class IndexResolver {
@ -222,64 +235,157 @@ public class IndexResolver {
listener.onResponse(result);
}
/**
* Resolves a pattern to one (potentially compound meaning that spawns multiple indices) mapping.
*/
public void resolveWithSameMapping(String indexWildcard, String javaRegex, ActionListener<IndexResolution> listener) {
GetIndexRequest getIndexRequest = createGetIndexRequest(indexWildcard);
client.admin().indices().getIndex(getIndexRequest, ActionListener.wrap(response -> {
ImmutableOpenMap<String, ImmutableOpenMap<String, MappingMetaData>> mappings = response.getMappings();
public void resolveAsMergedMapping(String indexWildcard, String javaRegex, ActionListener<IndexResolution> listener) {
FieldCapabilitiesRequest fieldRequest = createFieldCapsRequest(indexWildcard);
client.fieldCaps(fieldRequest,
ActionListener.wrap(response -> listener.onResponse(mergedMapping(indexWildcard, response.get())), listener::onFailure));
}
List<IndexResolution> resolutions;
if (mappings.size() > 0) {
resolutions = new ArrayList<>(mappings.size());
Pattern pattern = javaRegex != null ? Pattern.compile(javaRegex) : null;
for (ObjectObjectCursor<String, ImmutableOpenMap<String, MappingMetaData>> indexMappings : mappings) {
String concreteIndex = indexMappings.key;
if (pattern == null || pattern.matcher(concreteIndex).matches()) {
resolutions.add(buildGetIndexResult(concreteIndex, concreteIndex, indexMappings.value));
static IndexResolution mergedMapping(String indexPattern, Map<String, Map<String, FieldCapabilities>> fieldCaps) {
if (fieldCaps == null || fieldCaps.isEmpty()) {
return IndexResolution.notFound(indexPattern);
}
StringBuilder errorMessage = new StringBuilder();
NavigableSet<Entry<String, Map<String, FieldCapabilities>>> sortedFields = new TreeSet<>(
// for some reason .reversed doesn't work (prolly due to inference)
Collections.reverseOrder(Comparator.comparing(Entry::getKey)));
sortedFields.addAll(fieldCaps.entrySet());
Map<String, EsField> hierarchicalMapping = new TreeMap<>();
Map<String, EsField> flattedMapping = new LinkedHashMap<>();
// sort keys descending in order to easily detect multi-fields (a.b.c multi-field of a.b)
// without sorting, they can still be detected however without the emptyMap optimization
// (fields without multi-fields have no children)
for (Entry<String, Map<String, FieldCapabilities>> entry : sortedFields) {
String name = entry.getKey();
// skip internal fields
if (!name.startsWith("_")) {
Map<String, FieldCapabilities> types = entry.getValue();
// field is mapped differently across indices
if (types.size() > 1) {
// build error message
for (Entry<String, FieldCapabilities> type : types.entrySet()) {
if (errorMessage.length() > 0) {
errorMessage.append(", ");
}
errorMessage.append("[");
errorMessage.append(type.getKey());
errorMessage.append("] in ");
errorMessage.append(Arrays.toString(type.getValue().indices()));
}
errorMessage.insert(0,
"[" + indexPattern + "] points to indices with incompatible mappings; " +
"field [" + name + "] is mapped in [" + types.size() + "] different ways: ");
}
if (errorMessage.length() > 0) {
return IndexResolution.invalid(errorMessage.toString());
}
FieldCapabilities fieldCap = types.values().iterator().next();
// validate search/agg-able
if (fieldCap.isAggregatable() && fieldCap.nonAggregatableIndices() != null) {
errorMessage.append("[" + indexPattern + "] points to indices with incompatible mappings: ");
errorMessage.append("field [" + name + "] is aggregateable except in ");
errorMessage.append(Arrays.toString(fieldCap.nonAggregatableIndices()));
}
if (fieldCap.isSearchable() && fieldCap.nonSearchableIndices() != null) {
if (errorMessage.length() > 0) {
errorMessage.append(",");
}
errorMessage.append("[" + indexPattern + "] points to indices with incompatible mappings: ");
errorMessage.append("field [" + name + "] is searchable except in ");
errorMessage.append(Arrays.toString(fieldCap.nonSearchableIndices()));
}
if (errorMessage.length() > 0) {
return IndexResolution.invalid(errorMessage.toString());
}
// validation passes - create the field
// and name wasn't added before
if (!flattedMapping.containsKey(name)) {
createField(name, fieldCap, fieldCaps, hierarchicalMapping, flattedMapping, false);
}
} else {
resolutions = emptyList();
}
listener.onResponse(merge(resolutions, indexWildcard));
}, listener::onFailure));
}
static IndexResolution merge(List<IndexResolution> resolutions, String indexWildcard) {
IndexResolution merged = null;
for (IndexResolution resolution : resolutions) {
// everything that follows gets compared
if (!resolution.isValid()) {
return resolution;
}
// initialize resolution on first run
if (merged == null) {
merged = resolution;
}
// need the same mapping across all resolutions
if (!merged.get().mapping().equals(resolution.get().mapping())) {
return IndexResolution.invalid(
"[" + indexWildcard + "] points to indices [" + merged.get().name() + "] "
+ "and [" + resolution.get().name() + "] which have different mappings. "
+ "When using multiple indices, the mappings must be identical.");
}
}
if (merged != null) {
// at this point, we are sure there's the same mapping across all (if that's the case) indices
// to keep things simple, use the given pattern as index name
merged = IndexResolution.valid(new EsIndex(indexWildcard, merged.get().mapping()));
} else {
merged = IndexResolution.notFound(indexWildcard);
}
return merged;
return IndexResolution.valid(new EsIndex(indexPattern, hierarchicalMapping));
}
private static EsField createField(String fieldName, FieldCapabilities caps, Map<String, Map<String, FieldCapabilities>> globalCaps,
Map<String, EsField> hierarchicalMapping, Map<String, EsField> flattedMapping, boolean hasChildren) {
Map<String, EsField> parentProps = hierarchicalMapping;
int dot = fieldName.lastIndexOf('.');
String fullFieldName = fieldName;
if (dot >= 0) {
String parentName = fieldName.substring(0, dot);
fieldName = fieldName.substring(dot + 1);
EsField parent = flattedMapping.get(parentName);
if (parent == null) {
Map<String, FieldCapabilities> map = globalCaps.get(parentName);
if (map == null) {
throw new SqlIllegalArgumentException("Cannot find field {}; this is likely a bug", parentName);
}
FieldCapabilities parentCap = map.values().iterator().next();
parent = createField(parentName, parentCap, globalCaps, hierarchicalMapping, flattedMapping, true);
}
parentProps = parent.getProperties();
}
EsField field = null;
Map<String, EsField> props = hasChildren ? new TreeMap<>() : emptyMap();
DataType esType = DataType.fromEsType(caps.getType());
switch (esType) {
case TEXT:
field = new TextEsField(fieldName, props, false);
break;
case KEYWORD:
int length = DataType.KEYWORD.defaultPrecision;
// TODO: to check whether isSearchable/isAggregateable takes into account the presence of the normalizer
boolean normalized = false;
field = new KeywordEsField(fieldName, props, caps.isAggregatable(), length, normalized);
break;
case DATE:
field = new DateEsField(fieldName, props, caps.isAggregatable());
break;
case UNSUPPORTED:
field = new UnsupportedEsField(fieldName, caps.getType());
break;
default:
field = new EsField(fieldName, esType, props, caps.isAggregatable());
}
parentProps.put(fieldName, field);
flattedMapping.put(fullFieldName, field);
return field;
}
private static FieldCapabilitiesRequest createFieldCapsRequest(String index) {
return new FieldCapabilitiesRequest()
.indices(Strings.commaDelimitedListToStringArray(index))
.fields("*")
//lenient because we throw our own errors looking at the response e.g. if something was not resolved
//also because this way security doesn't throw authorization exceptions but rather honors ignore_unavailable
.indicesOptions(IndicesOptions.lenientExpandOpen());
}
// TODO: Concrete indices still uses get mapping
// waiting on https://github.com/elastic/elasticsearch/pull/34071
//
/**
* Resolves a pattern to multiple, separate indices.
* Resolves a pattern to multiple, separate indices. Doesn't perform validation.
*/
public void resolveAsSeparateMappings(String indexWildcard, String javaRegex, ActionListener<List<EsIndex>> listener) {
GetIndexRequest getIndexRequest = createGetIndexRequest(indexWildcard);
@ -306,7 +412,7 @@ public class IndexResolver {
listener.onResponse(results);
}, listener::onFailure));
}
private static GetIndexRequest createGetIndexRequest(String index) {
return new GetIndexRequest()
.local(true)

View File

@ -54,13 +54,15 @@ public class ShowColumns extends Command {
@Override
public List<Attribute> output() {
return asList(new FieldAttribute(location(), "column", new KeywordEsField("column")),
new FieldAttribute(location(), "type", new KeywordEsField("type"))); }
new FieldAttribute(location(), "type", new KeywordEsField("type")),
new FieldAttribute(location(), "mapping", new KeywordEsField("mapping")));
}
@Override
public void execute(SqlSession session, ActionListener<SchemaRowSet> listener) {
String idx = index != null ? index : (pattern != null ? pattern.asIndexNameWildcard() : "*");
String regex = pattern != null ? pattern.asJavaRegex() : null;
session.indexResolver().resolveWithSameMapping(idx, regex, ActionListener.wrap(
session.indexResolver().resolveAsMergedMapping(idx, regex, ActionListener.wrap(
indexResult -> {
List<List<?>> rows = emptyList();
if (indexResult.isValid()) {
@ -69,8 +71,7 @@ public class ShowColumns extends Command {
}
listener.onResponse(Rows.of(output(), rows));
},
listener::onFailure
));
listener::onFailure));
}
private void fillInRows(Map<String, EsField> mapping, String prefix, List<List<?>> rows) {
@ -79,7 +80,7 @@ public class ShowColumns extends Command {
DataType dt = field.getDataType();
String name = e.getKey();
if (dt != null) {
rows.add(asList(prefix != null ? prefix + "." + name : name, dt.sqlName()));
rows.add(asList(prefix != null ? prefix + "." + name : name, dt.sqlName(), dt.name()));
if (field.getProperties().isEmpty() == false) {
String newPrefix = prefix != null ? prefix + "." + name : name;
fillInRows(field.getProperties(), newPrefix, rows);

View File

@ -172,7 +172,7 @@ public class QueryContainer {
// reference methods
//
private FieldExtraction topHitFieldRef(FieldAttribute fieldAttr) {
return new SearchHitFieldRef(aliasName(fieldAttr), fieldAttr.field().getDataType(), fieldAttr.field().hasDocValues());
return new SearchHitFieldRef(aliasName(fieldAttr), fieldAttr.field().getDataType(), fieldAttr.field().isAggregatable());
}
private Tuple<QueryContainer, FieldExtraction> nestedHitFieldRef(FieldAttribute attr) {
@ -181,10 +181,10 @@ public class QueryContainer {
String name = aliasName(attr);
Query q = rewriteToContainNestedField(query, attr.location(),
attr.nestedParent().name(), name, attr.field().hasDocValues());
attr.nestedParent().name(), name, attr.field().isAggregatable());
SearchHitFieldRef nestedFieldRef = new SearchHitFieldRef(name, attr.field().getDataType(),
attr.field().hasDocValues(), attr.parent().name());
attr.field().isAggregatable(), attr.parent().name());
nestedRefs.add(nestedFieldRef);
return new Tuple<>(new QueryContainer(q, aggs, columns, aliases, pseudoFunctions, scalarFunctions, sort, limit), nestedFieldRef);

View File

@ -127,7 +127,7 @@ public class SqlSession {
listener.onFailure(new MappingException("Cannot inspect indices in cluster/catalog [{}]", cluster));
}
indexResolver.resolveWithSameMapping(table.index(), null,
indexResolver.resolveAsMergedMapping(table.index(), null,
wrap(indexResult -> listener.onResponse(action.apply(indexResult)), listener::onFailure));
} else {
try {

View File

@ -15,14 +15,14 @@ import java.util.Objects;
*/
public class EsField {
private final DataType esDataType;
private final boolean hasDocValues;
private final boolean aggregatable;
private final Map<String, EsField> properties;
private final String name;
public EsField(String name, DataType esDataType, Map<String, EsField> properties, boolean hasDocValues) {
public EsField(String name, DataType esDataType, Map<String, EsField> properties, boolean aggregatable) {
this.name = name;
this.esDataType = esDataType;
this.hasDocValues = hasDocValues;
this.aggregatable = aggregatable;
this.properties = properties;
}
@ -41,10 +41,10 @@ public class EsField {
}
/**
* The field supports doc values
* This field can be aggregated
*/
public boolean hasDocValues() {
return hasDocValues;
public boolean isAggregatable() {
return aggregatable;
}
/**
@ -85,19 +85,27 @@ public class EsField {
return true;
}
@Override
public String toString() {
return name + "@" + esDataType.name() + "=" + properties;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
EsField field = (EsField) o;
return hasDocValues == field.hasDocValues &&
esDataType == field.esDataType &&
Objects.equals(properties, field.properties) &&
Objects.equals(name, field.name);
return aggregatable == field.aggregatable && esDataType == field.esDataType
&& Objects.equals(name, field.name)
&& Objects.equals(properties, field.properties);
}
@Override
public int hashCode() {
return Objects.hash(esDataType, hasDocValues, properties, name);
return Objects.hash(esDataType, aggregatable, properties, name);
}
}
}

View File

@ -5,12 +5,18 @@
*/
package org.elasticsearch.xpack.sql.analysis.index;
import org.elasticsearch.action.fieldcaps.FieldCapabilities;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.sql.type.DataType;
import org.elasticsearch.xpack.sql.type.EsField;
import org.elasticsearch.xpack.sql.type.TypesTests;
import java.util.Arrays;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Map.Entry;
public class IndexResolverTests extends ESTestCase {
@ -21,40 +27,175 @@ public class IndexResolverTests extends ESTestCase {
assertEquals(oneMapping, sameMapping);
String wildcard = "*";
IndexResolution resolution = IndexResolver.merge(
Arrays.asList(IndexResolution.valid(new EsIndex("a", oneMapping)), IndexResolution.valid(new EsIndex("b", sameMapping))),
wildcard);
IndexResolution resolution = IndexResolver.mergedMapping(wildcard, fromMappings(
new EsIndex("a", oneMapping),
new EsIndex("b", sameMapping)));
assertTrue(resolution.isValid());
EsIndex esIndex = resolution.get();
assertEquals(wildcard, esIndex.name());
assertEquals(sameMapping, esIndex.mapping());
assertEqualsMaps(oneMapping, resolution.get().mapping());
}
public void testMergeDifferentMapping() throws Exception {
Map<String, EsField> oneMapping = TypesTests.loadMapping("mapping-basic.json", true);
Map<String, EsField> sameMapping = TypesTests.loadMapping("mapping-basic.json", true);
Map<String, EsField> differentMapping = TypesTests.loadMapping("mapping-numeric.json", true);
public void testMergeCompatibleMapping() throws Exception {
Map<String, EsField> basicMapping = TypesTests.loadMapping("mapping-basic.json", true);
Map<String, EsField> numericMapping = TypesTests.loadMapping("mapping-numeric.json", true);
assertNotSame(oneMapping, sameMapping);
assertEquals(oneMapping, sameMapping);
assertNotEquals(oneMapping, differentMapping);
assertNotEquals(basicMapping, numericMapping);
String wildcard = "*";
IndexResolution resolution = IndexResolver.merge(
Arrays.asList(IndexResolution.valid(new EsIndex("a", oneMapping)),
IndexResolution.valid(new EsIndex("b", sameMapping)),
IndexResolution.valid(new EsIndex("diff", differentMapping))),
wildcard);
IndexResolution resolution = IndexResolver.mergedMapping(wildcard, fromMappings(
new EsIndex("basic", basicMapping),
new EsIndex("numeric", numericMapping)));
assertTrue(resolution.isValid());
assertEquals(basicMapping.size() + numericMapping.size(), resolution.get().mapping().size());
}
public void testMergeIncompatibleTypes() throws Exception {
Map<String, EsField> basicMapping = TypesTests.loadMapping("mapping-basic.json", true);
Map<String, EsField> incompatible = TypesTests.loadMapping("mapping-basic-incompatible.json");
assertNotEquals(basicMapping, incompatible);
String wildcard = "*";
IndexResolution resolution = IndexResolver.mergedMapping(wildcard,
fromMappings(new EsIndex("basic", basicMapping), new EsIndex("incompatible", incompatible)));
assertFalse(resolution.isValid());
MappingException ex = expectThrows(MappingException.class, () -> resolution.get());
MappingException me = expectThrows(MappingException.class, () -> resolution.get());
assertEquals(
"[*] points to indices [a] and [diff] which have different mappings. "
+ "When using multiple indices, the mappings must be identical.",
ex.getMessage());
"[*] points to indices with incompatible mappings;"
+ " field [gender] is mapped in [2] different ways: [text] in [incompatible], [keyword] in [basic]",
me.getMessage());
}
}
public void testMergeIncompatibleCapabilities() throws Exception {
Map<String, EsField> basicMapping = TypesTests.loadMapping("mapping-basic.json", true);
Map<String, EsField> incompatible = TypesTests.loadMapping("mapping-basic-nodocvalues.json", true);
assertNotEquals(basicMapping, incompatible);
String wildcard = "*";
IndexResolution resolution = IndexResolver.mergedMapping(wildcard,
fromMappings(new EsIndex("basic", basicMapping), new EsIndex("incompatible", incompatible)));
assertFalse(resolution.isValid());
MappingException me = expectThrows(MappingException.class, () -> resolution.get());
assertEquals(
"[*] points to indices with incompatible mappings: field [emp_no] is aggregateable except in [incompatible]",
me.getMessage());
}
public void testMultiLevelObjectMappings() throws Exception {
Map<String, EsField> dottedMapping = TypesTests.loadMapping("mapping-dotted-field.json", true);
String wildcard = "*";
IndexResolution resolution = IndexResolver.mergedMapping(wildcard, fromMappings(new EsIndex("a", dottedMapping)));
assertTrue(resolution.isValid());
assertEqualsMaps(dottedMapping, resolution.get().mapping());
}
public void testMultiLevelNestedMappings() throws Exception {
Map<String, EsField> nestedMapping = TypesTests.loadMapping("mapping-nested.json", true);
String wildcard = "*";
IndexResolution resolution = IndexResolver.mergedMapping(wildcard, fromMappings(new EsIndex("a", nestedMapping)));
assertTrue(resolution.isValid());
assertEqualsMaps(nestedMapping, resolution.get().mapping());
}
private static Map<String, Map<String, FieldCapabilities>> fromMappings(EsIndex... indices) {
Map<String, Map<String, FieldCapabilities>> merged = new HashMap<>();
// first pass: create the field caps
for (EsIndex index : indices) {
for (EsField field : index.mapping().values()) {
addFieldCaps(null, field, index.name(), merged);
}
}
// second pass: update indices
for (Entry<String, Map<String, FieldCapabilities>> entry : merged.entrySet()) {
String fieldName = entry.getKey();
Map<String, FieldCapabilities> caps = entry.getValue();
if (entry.getValue().size() > 1) {
for (EsIndex index : indices) {
EsField field = index.mapping().get(fieldName);
UpdateableFieldCapabilities fieldCaps = (UpdateableFieldCapabilities) caps.get(field.getDataType().esType);
fieldCaps.indices.add(index.name());
}
//TODO: what about nonAgg/SearchIndices?
}
}
return merged;
}
private static void addFieldCaps(String parent, EsField field, String indexName, Map<String, Map<String, FieldCapabilities>> merged) {
String fieldName = parent != null ? parent + "." + field.getName() : field.getName();
Map<String, FieldCapabilities> map = merged.get(fieldName);
if (map == null) {
map = new HashMap<>();
merged.put(fieldName, map);
}
FieldCapabilities caps = map.computeIfAbsent(field.getDataType().esType,
esType -> new UpdateableFieldCapabilities(fieldName, esType,
isSearchable(field.getDataType()),
isAggregatable(field.getDataType())));
if (!field.isAggregatable()) {
((UpdateableFieldCapabilities) caps).nonAggregatableIndices.add(indexName);
}
for (EsField nested : field.getProperties().values()) {
addFieldCaps(fieldName, nested, indexName, merged);
}
}
private static boolean isSearchable(DataType type) {
return type.isPrimitive();
}
private static boolean isAggregatable(DataType type) {
return type.isNumeric() || type == DataType.KEYWORD || type == DataType.DATE;
}
private static class UpdateableFieldCapabilities extends FieldCapabilities {
List<String> indices = new ArrayList<>();
List<String> nonSearchableIndices = new ArrayList<>();
List<String> nonAggregatableIndices = new ArrayList<>();
UpdateableFieldCapabilities(String name, String type, boolean isSearchable, boolean isAggregatable) {
super(name, type, isSearchable, isAggregatable);
}
@Override
public String[] indices() {
return indices.isEmpty() ? null : indices.toArray(new String[indices.size()]);
}
@Override
public String[] nonSearchableIndices() {
return nonSearchableIndices.isEmpty() ? null : nonSearchableIndices.toArray(new String[nonSearchableIndices.size()]);
}
@Override
public String[] nonAggregatableIndices() {
return nonAggregatableIndices.isEmpty() ? null : nonAggregatableIndices.toArray(new String[nonAggregatableIndices.size()]);
}
@Override
public String toString() {
return String.format(Locale.ROOT, "%s,%s->%s", getName(), getType(), indices);
}
}
private static <K, V> void assertEqualsMaps(Map<K, V> left, Map<K, V> right) {
for (Entry<K, V> entry : left.entrySet()) {
V rv = right.get(entry.getKey());
assertEquals(String.format(Locale.ROOT, "Key [%s] has different values", entry.getKey()), entry.getValue(), rv);
}
}
}

View File

@ -59,10 +59,10 @@ public class TypesTests extends ESTestCase {
assertThat(mapping.size(), is(1));
EsField type = mapping.get("full_name");
assertThat(type, instanceOf(TextEsField.class));
assertThat(type.hasDocValues(), is(false));
assertThat(type.isAggregatable(), is(false));
TextEsField ttype = (TextEsField) type;
assertThat(type.getPrecision(), is(Integer.MAX_VALUE));
assertThat(ttype.hasDocValues(), is(false));
assertThat(ttype.isAggregatable(), is(false));
}
public void testKeywordField() {
@ -71,7 +71,7 @@ public class TypesTests extends ESTestCase {
assertThat(mapping.size(), is(1));
EsField field = mapping.get("full_name");
assertThat(field, instanceOf(KeywordEsField.class));
assertThat(field.hasDocValues(), is(true));
assertThat(field.isAggregatable(), is(true));
assertThat(field.getPrecision(), is(256));
}
@ -81,7 +81,7 @@ public class TypesTests extends ESTestCase {
assertThat(mapping.size(), is(1));
EsField field = mapping.get("date");
assertThat(field.getDataType(), is(DATE));
assertThat(field.hasDocValues(), is(true));
assertThat(field.isAggregatable(), is(true));
assertThat(field.getPrecision(), is(24));
DateEsField dfield = (DateEsField) field;
@ -95,7 +95,7 @@ public class TypesTests extends ESTestCase {
assertThat(mapping.size(), is(1));
EsField field = mapping.get("date");
assertThat(field.getDataType(), is(DATE));
assertThat(field.hasDocValues(), is(true));
assertThat(field.isAggregatable(), is(true));
DateEsField dfield = (DateEsField) field;
// default types
assertThat(dfield.getFormats(), hasSize(2));
@ -107,7 +107,7 @@ public class TypesTests extends ESTestCase {
assertThat(mapping.size(), is(1));
EsField field = mapping.get("date");
assertThat(field.getDataType(), is(DATE));
assertThat(field.hasDocValues(), is(true));
assertThat(field.isAggregatable(), is(true));
DateEsField dfield = (DateEsField) field;
// default types
assertThat(dfield.getFormats(), hasSize(1));
@ -120,7 +120,7 @@ public class TypesTests extends ESTestCase {
EsField field = mapping.get("session_id");
assertThat(field, instanceOf(KeywordEsField.class));
assertThat(field.getPrecision(), is(15));
assertThat(field.hasDocValues(), is(false));
assertThat(field.isAggregatable(), is(false));
}
public void testDottedField() {

View File

@ -0,0 +1,22 @@
{
"properties" : {
"emp_no" : {
"type" : "long"
},
"first_name" : {
"type" : "text"
},
"gender" : {
"type" : "text"
},
"languages" : {
"type" : "byte"
},
"last_name" : {
"type" : "text"
},
"salary" : {
"type" : "integer"
}
}
}

View File

@ -0,0 +1,23 @@
{
"properties" : {
"emp_no" : {
"type" : "integer",
"doc_values" : false
},
"first_name" : {
"type" : "text"
},
"gender" : {
"type" : "keyword"
},
"languages" : {
"type" : "byte"
},
"last_name" : {
"type" : "text"
},
"salary" : {
"type" : "integer"
}
}
}

View File

@ -7,8 +7,8 @@ rest_minimal:
privileges: [read, "indices:admin/get"]
# end::rest
# tag::cli_jdbc
cli_or_jdbc_minimal:
# tag::cli_drivers
cli_or_drivers_minimal:
cluster:
- "cluster:monitor/main"
indices:
@ -16,7 +16,7 @@ cli_or_jdbc_minimal:
privileges: [read, "indices:admin/get"]
- names: bort
privileges: [read, "indices:admin/get"]
# end::cli_jdbc
# end::cli_drivers
read_something_else:
cluster:
@ -82,6 +82,6 @@ no_get_index:
- "cluster:monitor/main"
indices:
- names: test
privileges: [read]
privileges: [monitor]
- names: bort
privileges: [read]
privileges: [monitor]

View File

@ -7,9 +7,10 @@ package org.elasticsearch.xpack.qa.sql.security;
import org.elasticsearch.common.CheckedConsumer;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.xpack.qa.sql.cli.ErrorsTestCase;
import org.elasticsearch.xpack.qa.sql.cli.EmbeddedCli;
import org.elasticsearch.xpack.qa.sql.cli.EmbeddedCli.SecurityConfig;
import org.elasticsearch.xpack.qa.sql.cli.ErrorsTestCase;
import java.io.IOException;
import java.net.URISyntaxException;
import java.nio.file.Files;
@ -20,7 +21,6 @@ import java.util.List;
import java.util.Map;
import static org.elasticsearch.xpack.qa.sql.cli.CliIntegrationTestCase.elasticsearchAddress;
import static org.hamcrest.Matchers.both;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.startsWith;
@ -53,7 +53,7 @@ public class CliSecurityIT extends SqlSecurityTestCase {
private static class CliActions implements Actions {
@Override
public String minimalPermissionsForAllActions() {
return "cli_or_jdbc_minimal";
return "cli_or_drivers_minimal";
}
private SecurityConfig userSecurity(String user) {
@ -121,12 +121,19 @@ public class CliSecurityIT extends SqlSecurityTestCase {
}
@Override
public void expectDescribe(Map<String, String> columns, String user) throws Exception {
public void expectDescribe(Map<String, List<String>> columns, String user) throws Exception {
try (EmbeddedCli cli = new EmbeddedCli(elasticsearchAddress(), true, userSecurity(user))) {
assertThat(cli.command("DESCRIBE test"), containsString("column | type"));
assertEquals("---------------+---------------", cli.readLine());
for (Map.Entry<String, String> column : columns.entrySet()) {
assertThat(cli.readLine(), both(startsWith(column.getKey())).and(containsString("|" + column.getValue())));
String output = cli.command("DESCRIBE test");
assertThat(output, containsString("column"));
assertThat(output, containsString("type"));
assertThat(output, containsString("mapping"));
assertThat(cli.readLine(), containsString("-+---------------+---------------"));
for (Map.Entry<String, List<String>> column : columns.entrySet()) {
String line = cli.readLine();
assertThat(line, startsWith(column.getKey()));
for (String value : column.getValue()) {
assertThat(line, containsString(value));
}
}
assertEquals("", cli.readLine());
}

View File

@ -118,7 +118,7 @@ public class JdbcSecurityIT extends SqlSecurityTestCase {
private static class JdbcActions implements Actions {
@Override
public String minimalPermissionsForAllActions() {
return "cli_or_jdbc_minimal";
return "cli_or_drivers_minimal";
}
@Override
@ -158,22 +158,26 @@ public class JdbcSecurityIT extends SqlSecurityTestCase {
}
@Override
public void expectDescribe(Map<String, String> columns, String user) throws Exception {
public void expectDescribe(Map<String, List<String>> columns, String user) throws Exception {
try (Connection h2 = LocalH2.anonymousDb();
Connection es = es(userProperties(user))) {
// h2 doesn't have the same sort of DESCRIBE that we have so we emulate it
h2.createStatement().executeUpdate("CREATE TABLE mock (column VARCHAR, type VARCHAR)");
h2.createStatement().executeUpdate("CREATE TABLE mock (column VARCHAR, type VARCHAR, mapping VARCHAR)");
if (columns.size() > 0) {
StringBuilder insert = new StringBuilder();
insert.append("INSERT INTO mock (column, type) VALUES ");
insert.append("INSERT INTO mock (column, type, mapping) VALUES ");
boolean first = true;
for (Map.Entry<String, String> column : columns.entrySet()) {
for (Map.Entry<String, List<String>> column : columns.entrySet()) {
if (first) {
first = false;
} else {
insert.append(", ");
}
insert.append("('").append(column.getKey()).append("', '").append(column.getValue()).append("')");
insert.append("('").append(column.getKey()).append("'");
for (String value : column.getValue()) {
insert.append(", '").append(value).append("'");
}
insert.append(")");
}
h2.createStatement().executeUpdate(insert.toString());
}
@ -250,7 +254,7 @@ public class JdbcSecurityIT extends SqlSecurityTestCase {
// Metadata methods only available to JDBC
public void testMetaDataGetTablesWithFullAccess() throws Exception {
createUser("full_access", "cli_or_jdbc_minimal");
createUser("full_access", "cli_or_drivers_minimal");
expectActionMatchesAdmin(
con -> con.getMetaData().getTables("%", "%", "%t", null),
@ -283,7 +287,7 @@ public class JdbcSecurityIT extends SqlSecurityTestCase {
}
public void testMetaDataGetColumnsWorksAsFullAccess() throws Exception {
createUser("full_access", "cli_or_jdbc_minimal");
createUser("full_access", "cli_or_drivers_minimal");
expectActionMatchesAdmin(
con -> con.getMetaData().getColumns(null, "%", "%t", "%"),

View File

@ -93,15 +93,19 @@ public class RestSqlSecurityIT extends SqlSecurityTestCase {
}
@Override
public void expectDescribe(Map<String, String> columns, String user) throws Exception {
public void expectDescribe(Map<String, List<String>> columns, String user) throws Exception {
String mode = randomMode();
Map<String, Object> expected = new HashMap<>(3);
expected.put("columns", Arrays.asList(
columnInfo(mode, "column", "keyword", JDBCType.VARCHAR, 0),
columnInfo(mode, "type", "keyword", JDBCType.VARCHAR, 0)));
columnInfo(mode, "type", "keyword", JDBCType.VARCHAR, 0),
columnInfo(mode, "mapping", "keyword", JDBCType.VARCHAR, 0)));
List<List<String>> rows = new ArrayList<>(columns.size());
for (Map.Entry<String, String> column : columns.entrySet()) {
rows.add(Arrays.asList(column.getKey(), column.getValue()));
for (Map.Entry<String, List<String>> column : columns.entrySet()) {
List<String> cols = new ArrayList<>();
cols.add(column.getKey());
cols.addAll(column.getValue());
rows.add(cols);
}
expected.put("rows", rows);
@ -232,7 +236,7 @@ public class RestSqlSecurityIT extends SqlSecurityTestCase {
assertEquals(404, e.getResponse().getStatusLine().getStatusCode());
createAuditLogAsserter()
.expectSqlCompositeAction("test_admin", "test")
.expectSqlCompositeActionFieldCaps("test_admin", "test")
.expect(true, SQL_ACTION_NAME, "full_access", empty())
// one scroll access denied per shard
.expect("access_denied", SQL_ACTION_NAME, "full_access", "default_native", empty(), "InternalScrollSearchRequest")

View File

@ -10,6 +10,8 @@ import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.SpecialPermission;
import org.elasticsearch.action.admin.indices.get.GetIndexAction;
import org.elasticsearch.action.admin.indices.get.GetIndexRequest;
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesAction;
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest;
import org.elasticsearch.client.Request;
import org.elasticsearch.client.ResponseException;
import org.elasticsearch.common.Strings;
@ -40,11 +42,12 @@ import java.util.Map;
import java.util.TreeMap;
import java.util.function.Function;
import static java.util.Arrays.asList;
import static java.util.Collections.singletonMap;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.hasItems;
import static org.hamcrest.Matchers.is;
public abstract class SqlSecurityTestCase extends ESRestTestCase {
/**
@ -65,7 +68,7 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
* to 1 and completely scrolls the results.
*/
void expectScrollMatchesAdmin(String adminSql, String user, String userSql) throws Exception;
void expectDescribe(Map<String, String> columns, String user) throws Exception;
void expectDescribe(Map<String, List<String>> columns, String user) throws Exception;
void expectShowTables(List<String> tables, String user) throws Exception;
void expectForbidden(String user, String sql) throws Exception;
void expectUnknownIndex(String user, String sql) throws Exception;
@ -196,7 +199,7 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
public void testQueryWorksAsAdmin() throws Exception {
actions.queryWorksAsAdmin();
createAuditLogAsserter()
.expectSqlCompositeAction("test_admin", "test")
.expectSqlCompositeActionFieldCaps("test_admin", "test")
.assertLogs();
}
@ -205,8 +208,8 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
actions.expectMatchesAdmin("SELECT * FROM test ORDER BY a", "full_access", "SELECT * FROM test ORDER BY a");
createAuditLogAsserter()
.expectSqlCompositeAction("test_admin", "test")
.expectSqlCompositeAction("full_access", "test")
.expectSqlCompositeActionFieldCaps("test_admin", "test")
.expectSqlCompositeActionFieldCaps("full_access", "test")
.assertLogs();
}
@ -215,12 +218,12 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
actions.expectScrollMatchesAdmin("SELECT * FROM test ORDER BY a", "full_access", "SELECT * FROM test ORDER BY a");
createAuditLogAsserter()
.expectSqlCompositeAction("test_admin", "test")
.expectSqlCompositeActionFieldCaps("test_admin", "test")
/* Scrolling doesn't have to access the index again, at least not through sql.
* If we asserted query and scroll logs then we would see the scroll. */
.expect(true, SQL_ACTION_NAME, "test_admin", empty())
.expect(true, SQL_ACTION_NAME, "test_admin", empty())
.expectSqlCompositeAction("full_access", "test")
.expectSqlCompositeActionFieldCaps("full_access", "test")
.expect(true, SQL_ACTION_NAME, "full_access", empty())
.expect(true, SQL_ACTION_NAME, "full_access", empty())
.assertLogs();
@ -243,7 +246,7 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
//This user has permission to run sql queries so they are given preliminary authorization
.expect(true, SQL_ACTION_NAME, "wrong_access", empty())
//the following get index is granted too but against the no indices placeholder, as ignore_unavailable=true
.expect(true, GetIndexAction.NAME, "wrong_access", hasItems("*", "-*"))
.expect(true, FieldCapabilitiesAction.NAME, "wrong_access", hasItems("*", "-*"))
.assertLogs();
}
@ -252,8 +255,8 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
actions.expectMatchesAdmin("SELECT a FROM test ORDER BY a", "only_a", "SELECT * FROM test ORDER BY a");
createAuditLogAsserter()
.expectSqlCompositeAction("test_admin", "test")
.expectSqlCompositeAction("only_a", "test")
.expectSqlCompositeActionFieldCaps("test_admin", "test")
.expectSqlCompositeActionFieldCaps("only_a", "test")
.assertLogs();
}
@ -262,18 +265,18 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
actions.expectScrollMatchesAdmin("SELECT a FROM test ORDER BY a", "only_a", "SELECT * FROM test ORDER BY a");
createAuditLogAsserter()
.expectSqlCompositeAction("test_admin", "test")
.expectSqlCompositeActionFieldCaps("test_admin", "test")
/* Scrolling doesn't have to access the index again, at least not through sql.
* If we asserted query and scroll logs then we would see the scroll. */
.expect(true, SQL_ACTION_NAME, "test_admin", empty())
.expect(true, SQL_ACTION_NAME, "test_admin", empty())
.expectSqlCompositeAction("only_a", "test")
.expectSqlCompositeActionFieldCaps("only_a", "test")
.expect(true, SQL_ACTION_NAME, "only_a", empty())
.expect(true, SQL_ACTION_NAME, "only_a", empty())
.assertLogs();
}
public void testQueryStringSingeFieldGrantedWrongRequested() throws Exception {
public void testQueryStringSingleFieldGrantedWrongRequested() throws Exception {
createUser("only_a", "read_test_a");
actions.expectUnknownColumn("only_a", "SELECT c FROM test", "c");
@ -284,7 +287,7 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
* out but it failed in SQL because it couldn't compile the
* query without the metadata for the missing field. */
createAuditLogAsserter()
.expectSqlCompositeAction("only_a", "test")
.expectSqlCompositeActionFieldCaps("only_a", "test")
.assertLogs();
}
@ -293,8 +296,8 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
actions.expectMatchesAdmin("SELECT a, b FROM test ORDER BY a", "not_c", "SELECT * FROM test ORDER BY a");
createAuditLogAsserter()
.expectSqlCompositeAction("test_admin", "test")
.expectSqlCompositeAction("not_c", "test")
.expectSqlCompositeActionFieldCaps("test_admin", "test")
.expectSqlCompositeActionFieldCaps("not_c", "test")
.assertLogs();
}
@ -303,12 +306,12 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
actions.expectScrollMatchesAdmin("SELECT a, b FROM test ORDER BY a", "not_c", "SELECT * FROM test ORDER BY a");
createAuditLogAsserter()
.expectSqlCompositeAction("test_admin", "test")
.expectSqlCompositeActionFieldCaps("test_admin", "test")
/* Scrolling doesn't have to access the index again, at least not through sql.
* If we asserted query and scroll logs then we would see the scroll. */
.expect(true, SQL_ACTION_NAME, "test_admin", empty())
.expect(true, SQL_ACTION_NAME, "test_admin", empty())
.expectSqlCompositeAction("not_c", "test")
.expectSqlCompositeActionFieldCaps("not_c", "test")
.expect(true, SQL_ACTION_NAME, "not_c", empty())
.expect(true, SQL_ACTION_NAME, "not_c", empty())
.assertLogs();
@ -325,7 +328,7 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
* out but it failed in SQL because it couldn't compile the
* query without the metadata for the missing field. */
createAuditLogAsserter()
.expectSqlCompositeAction("not_c", "test")
.expectSqlCompositeActionFieldCaps("not_c", "test")
.assertLogs();
}
@ -334,15 +337,15 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
actions.expectMatchesAdmin("SELECT * FROM test WHERE c != 3 ORDER BY a", "no_3s", "SELECT * FROM test ORDER BY a");
createAuditLogAsserter()
.expectSqlCompositeAction("test_admin", "test")
.expectSqlCompositeAction("no_3s", "test")
.expectSqlCompositeActionFieldCaps("test_admin", "test")
.expectSqlCompositeActionFieldCaps("no_3s", "test")
.assertLogs();
}
public void testShowTablesWorksAsAdmin() throws Exception {
actions.expectShowTables(Arrays.asList("bort", "test"), null);
createAuditLogAsserter()
.expectSqlCompositeAction("test_admin", "bort", "test")
.expectSqlCompositeActionGetIndex("test_admin", "bort", "test")
.assertLogs();
}
@ -351,8 +354,8 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
actions.expectMatchesAdmin("SHOW TABLES LIKE '%t'", "full_access", "SHOW TABLES");
createAuditLogAsserter()
.expectSqlCompositeAction("test_admin", "bort", "test")
.expectSqlCompositeAction("full_access", "bort", "test")
.expectSqlCompositeActionGetIndex("test_admin", "bort", "test")
.expectSqlCompositeActionGetIndex("full_access", "bort", "test")
.assertLogs();
}
@ -370,8 +373,7 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
actions.expectMatchesAdmin("SHOW TABLES LIKE 'bort'", "read_bort", "SHOW TABLES");
createAuditLogAsserter()
.expectSqlCompositeAction("test_admin", "bort")
.expectSqlCompositeAction("read_bort", "bort")
.expectSqlCompositeActionGetIndex("test_admin", "bort").expectSqlCompositeActionGetIndex("read_bort", "bort")
.assertLogs();
}
@ -388,13 +390,13 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
}
public void testDescribeWorksAsAdmin() throws Exception {
Map<String, String> expected = new TreeMap<>();
expected.put("a", "BIGINT");
expected.put("b", "BIGINT");
expected.put("c", "BIGINT");
Map<String, List<String>> expected = new TreeMap<>();
expected.put("a", asList("BIGINT", "LONG"));
expected.put("b", asList("BIGINT", "LONG"));
expected.put("c", asList("BIGINT", "LONG"));
actions.expectDescribe(expected, null);
createAuditLogAsserter()
.expectSqlCompositeAction("test_admin", "test")
.expectSqlCompositeActionFieldCaps("test_admin", "test")
.assertLogs();
}
@ -403,8 +405,8 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
actions.expectMatchesAdmin("DESCRIBE test", "full_access", "DESCRIBE test");
createAuditLogAsserter()
.expectSqlCompositeAction("test_admin", "test")
.expectSqlCompositeAction("full_access", "test")
.expectSqlCompositeActionFieldCaps("test_admin", "test")
.expectSqlCompositeActionFieldCaps("full_access", "test")
.assertLogs();
}
@ -425,28 +427,28 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
//This user has permission to run sql queries so they are given preliminary authorization
.expect(true, SQL_ACTION_NAME, "wrong_access", empty())
//the following get index is granted too but against the no indices placeholder, as ignore_unavailable=true
.expect(true, GetIndexAction.NAME, "wrong_access", hasItems("*", "-*"))
.expect(true, FieldCapabilitiesAction.NAME, "wrong_access", hasItems("*", "-*"))
.assertLogs();
}
public void testDescribeSingleFieldGranted() throws Exception {
createUser("only_a", "read_test_a");
actions.expectDescribe(singletonMap("a", "BIGINT"), "only_a");
actions.expectDescribe(singletonMap("a", asList("BIGINT", "LONG")), "only_a");
createAuditLogAsserter()
.expectSqlCompositeAction("only_a", "test")
.expectSqlCompositeActionFieldCaps("only_a", "test")
.assertLogs();
}
public void testDescribeSingleFieldExcepted() throws Exception {
createUser("not_c", "read_test_a_and_b");
Map<String, String> expected = new TreeMap<>();
expected.put("a", "BIGINT");
expected.put("b", "BIGINT");
Map<String, List<String>> expected = new TreeMap<>();
expected.put("a", asList("BIGINT", "LONG"));
expected.put("b", asList("BIGINT", "LONG"));
actions.expectDescribe(expected, "not_c");
createAuditLogAsserter()
.expectSqlCompositeAction("not_c", "test")
.expectSqlCompositeActionFieldCaps("not_c", "test")
.assertLogs();
}
@ -455,8 +457,8 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
actions.expectMatchesAdmin("DESCRIBE test", "no_3s", "DESCRIBE test");
createAuditLogAsserter()
.expectSqlCompositeAction("test_admin", "test")
.expectSqlCompositeAction("no_3s", "test")
.expectSqlCompositeActionFieldCaps("test_admin", "test")
.expectSqlCompositeActionFieldCaps("no_3s", "test")
.assertLogs();
}
@ -497,12 +499,18 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
protected class AuditLogAsserter {
protected final List<Function<Map<String, Object>, Boolean>> logCheckers = new ArrayList<>();
public AuditLogAsserter expectSqlCompositeAction(String user, String... indices) {
public AuditLogAsserter expectSqlCompositeActionGetIndex(String user, String... indices) {
expect(true, SQL_ACTION_NAME, user, empty());
expect(true, GetIndexAction.NAME, user, hasItems(indices));
return this;
}
public AuditLogAsserter expectSqlCompositeActionFieldCaps(String user, String... indices) {
expect(true, SQL_ACTION_NAME, user, empty());
expect(true, FieldCapabilitiesAction.NAME, user, hasItems(indices));
return this;
}
public AuditLogAsserter expect(boolean granted, String action, String principal,
Matcher<? extends Iterable<? extends String>> indicesMatcher) {
String request;
@ -513,6 +521,9 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
case GetIndexAction.NAME:
request = GetIndexRequest.class.getSimpleName();
break;
case FieldCapabilitiesAction.NAME:
request = FieldCapabilitiesRequest.class.getSimpleName();
break;
default:
throw new IllegalArgumentException("Unknown action [" + action + "]");
}
@ -523,7 +534,7 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
public AuditLogAsserter expect(String eventAction, String action, String principal, String realm,
Matcher<? extends Iterable<? extends String>> indicesMatcher, String request) {
logCheckers.add(m ->
logCheckers.add(m ->
eventAction.equals(m.get("event.action"))
&& action.equals(m.get("action"))
&& principal.equals(m.get("user.name"))
@ -564,7 +575,9 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
continue;
}
assertThat(log.containsKey("action"), is(true));
if (false == (SQL_ACTION_NAME.equals(log.get("action")) || GetIndexAction.NAME.equals(log.get("action")))) {
if (false == (SQL_ACTION_NAME.equals(log.get("action"))
|| GetIndexAction.NAME.equals(log.get("action"))
|| FieldCapabilitiesAction.NAME.equals(log.get("action")))) {
// TODO we may want to extend this and the assertions to SearchAction.NAME as well
continue;
}

View File

@ -5,13 +5,13 @@
*/
package org.elasticsearch.xpack.qa.sql.cli;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.cli.MockTerminal;
import org.elasticsearch.cli.Terminal;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.xpack.sql.cli.Cli;
import org.elasticsearch.xpack.sql.cli.CliTerminal;
import org.elasticsearch.xpack.sql.cli.JLineTerminal;
@ -49,7 +49,7 @@ import static org.junit.Assert.fail;
* and doesn't run super frequently.
*/
public class EmbeddedCli implements Closeable {
private static final Logger logger = Loggers.getLogger(EmbeddedCli.class);
private static final Logger logger = LogManager.getLogger(EmbeddedCli.class);
private final Thread exec;
private final Cli cli;
@ -151,7 +151,9 @@ public class EmbeddedCli implements Closeable {
}
// Throw out the logo
while (false == readLine().contains("SQL"));
while (false == readLine().contains("SQL")) {
;
}
assertConnectionTest();
} catch (IOException e) {
try {

View File

@ -5,9 +5,10 @@
*/
package org.elasticsearch.xpack.qa.sql.cli;
import java.io.IOException;
import org.elasticsearch.client.Request;
import java.io.IOException;
import static org.hamcrest.Matchers.startsWith;
/**
@ -43,7 +44,8 @@ public abstract class ErrorsTestCase extends CliIntegrationTestCase implements o
client().performRequest(request);
assertFoundOneProblem(command("SELECT * FROM test"));
assertEquals("line 1:15: [test] doesn't have any types so it is incompatible with sql" + END, readLine());
//assertEquals("line 1:15: [test] doesn't have any types so it is incompatible with sql" + END, readLine());
assertEquals("line 1:15: Unknown index [test]" + END, readLine());
}
@Override

View File

@ -43,7 +43,7 @@ public class DataLoader {
protected static void loadEmpDatasetIntoEs(RestClient client) throws Exception {
loadEmpDatasetIntoEs(client, "test_emp", "employees");
loadEmpDatasetIntoEs(client, "test_emp_copy", "employees");
loadEmpDatasetWithExtraIntoEs(client, "test_emp_copy", "employees");
makeAlias(client, "test_alias", "test_emp", "test_emp_copy");
makeAlias(client, "test_alias_emp", "test_emp", "test_emp_copy");
}
@ -63,6 +63,14 @@ public class DataLoader {
}
protected static void loadEmpDatasetIntoEs(RestClient client, String index, String fileName) throws Exception {
loadEmpDatasetIntoEs(client, index, fileName, false);
}
protected static void loadEmpDatasetWithExtraIntoEs(RestClient client, String index, String fileName) throws Exception {
loadEmpDatasetIntoEs(client, index, fileName, true);
}
private static void loadEmpDatasetIntoEs(RestClient client, String index, String fileName, boolean extraFields) throws Exception {
Request request = new Request("PUT", "/" + index);
XContentBuilder createIndex = JsonXContent.contentBuilder().startObject();
createIndex.startObject("settings");
@ -76,10 +84,26 @@ public class DataLoader {
{
createIndex.startObject("properties");
{
createIndex.startObject("emp_no").field("type", "integer").endObject();
createIndex.startObject("emp_no").field("type", "integer");
if (extraFields) {
createIndex.field("copy_to", "extra_no");
}
createIndex.endObject();
if (extraFields) {
createIndex.startObject("extra_no").field("type", "integer").endObject();
}
createString("first_name", createIndex);
createString("last_name", createIndex);
createIndex.startObject("gender").field("type", "keyword").endObject();
createIndex.startObject("gender").field("type", "keyword");
if (extraFields) {
createIndex.field("copy_to", "extra_gender");
}
createIndex.endObject();
if (extraFields) {
createIndex.startObject("extra_gender").field("type", "keyword").endObject();
}
createIndex.startObject("birth_date").field("type", "date").endObject();
createIndex.startObject("hire_date").field("type", "date").endObject();
createIndex.startObject("salary").field("type", "integer").endObject();

View File

@ -5,9 +5,10 @@
*/
package org.elasticsearch.xpack.qa.sql.jdbc;
import org.elasticsearch.client.Request;
import java.sql.Connection;
import java.sql.SQLException;
import org.elasticsearch.client.Request;
import static org.hamcrest.Matchers.startsWith;
@ -40,7 +41,9 @@ public class ErrorsTestCase extends JdbcIntegrationTestCase implements org.elast
try (Connection c = esJdbc()) {
SQLException e = expectThrows(SQLException.class, () -> c.prepareStatement("SELECT * FROM test").executeQuery());
assertEquals("Found 1 problem(s)\nline 1:15: [test] doesn't have any types so it is incompatible with sql", e.getMessage());
// see https://github.com/elastic/elasticsearch/issues/34719
//assertEquals("Found 1 problem(s)\nline 1:15: [test] doesn't have any types so it is incompatible with sql", e.getMessage());
assertEquals("Found 1 problem(s)\nline 1:15: Unknown index [test]", e.getMessage());
}
}

View File

@ -219,7 +219,9 @@ public abstract class RestSqlTestCase extends ESRestTestCase implements ErrorsTe
client().performRequest(request);
String mode = randomFrom("jdbc", "plain");
expectBadRequest(() -> runSql(mode, "SELECT * FROM test"),
containsString("1:15: [test] doesn't have any types so it is incompatible with sql"));
// see https://github.com/elastic/elasticsearch/issues/34719
//containsString("1:15: [test] doesn't have any types so it is incompatible with sql"));
containsString("1:15: Unknown index [test]"));
}
@Override

View File

@ -26,47 +26,51 @@ emp_no:i | first_name:s
describeAlias
DESCRIBE test_alias;
column:s | type:s
birth_date | TIMESTAMP
dep | STRUCT
dep.dep_id | VARCHAR
dep.dep_name | VARCHAR
dep.dep_name.keyword | VARCHAR
dep.from_date | TIMESTAMP
dep.to_date | TIMESTAMP
emp_no | INTEGER
first_name | VARCHAR
first_name.keyword | VARCHAR
gender | VARCHAR
hire_date | TIMESTAMP
languages | TINYINT
last_name | VARCHAR
last_name.keyword | VARCHAR
salary | INTEGER
column | type | mapping
--------------------+---------------+---------------
birth_date |TIMESTAMP |DATE
dep |STRUCT |NESTED
dep.dep_id |VARCHAR |KEYWORD
dep.dep_name |VARCHAR |TEXT
dep.dep_name.keyword|VARCHAR |KEYWORD
dep.from_date |TIMESTAMP |DATE
dep.to_date |TIMESTAMP |DATE
emp_no |INTEGER |INTEGER
extra_gender |VARCHAR |KEYWORD
extra_no |INTEGER |INTEGER
first_name |VARCHAR |TEXT
first_name.keyword |VARCHAR |KEYWORD
gender |VARCHAR |KEYWORD
hire_date |TIMESTAMP |DATE
languages |TINYINT |BYTE
last_name |VARCHAR |TEXT
last_name.keyword |VARCHAR |KEYWORD
salary |INTEGER |INTEGER
;
describePattern
DESCRIBE "test_*";
column:s | type:s
birth_date | TIMESTAMP
dep | STRUCT
dep.dep_id | VARCHAR
dep.dep_name | VARCHAR
dep.dep_name.keyword | VARCHAR
dep.from_date | TIMESTAMP
dep.to_date | TIMESTAMP
emp_no | INTEGER
first_name | VARCHAR
first_name.keyword | VARCHAR
gender | VARCHAR
hire_date | TIMESTAMP
languages | TINYINT
last_name | VARCHAR
last_name.keyword | VARCHAR
salary | INTEGER
column | type | mapping
--------------------+---------------+---------------
birth_date |TIMESTAMP |DATE
dep |STRUCT |NESTED
dep.dep_id |VARCHAR |KEYWORD
dep.dep_name |VARCHAR |TEXT
dep.dep_name.keyword|VARCHAR |KEYWORD
dep.from_date |TIMESTAMP |DATE
dep.to_date |TIMESTAMP |DATE
emp_no |INTEGER |INTEGER
extra_gender |VARCHAR |KEYWORD
extra_no |INTEGER |INTEGER
first_name |VARCHAR |TEXT
first_name.keyword |VARCHAR |KEYWORD
gender |VARCHAR |KEYWORD
hire_date |TIMESTAMP |DATE
languages |TINYINT |BYTE
last_name |VARCHAR |TEXT
last_name.keyword |VARCHAR |KEYWORD
salary |INTEGER |INTEGER
;
showAlias

View File

@ -200,89 +200,98 @@ test_alias_emp |ALIAS
describeSimpleLike
DESCRIBE LIKE 'test_emp';
column:s | type:s
birth_date | TIMESTAMP
dep | STRUCT
dep.dep_id | VARCHAR
dep.dep_name | VARCHAR
dep.dep_name.keyword | VARCHAR
dep.from_date | TIMESTAMP
dep.to_date | TIMESTAMP
emp_no | INTEGER
first_name | VARCHAR
first_name.keyword | VARCHAR
gender | VARCHAR
hire_date | TIMESTAMP
languages | TINYINT
last_name | VARCHAR
last_name.keyword | VARCHAR
salary | INTEGER
column | type | mapping
--------------------+---------------+---------------
birth_date |TIMESTAMP |DATE
dep |STRUCT |NESTED
dep.dep_id |VARCHAR |KEYWORD
dep.dep_name |VARCHAR |TEXT
dep.dep_name.keyword|VARCHAR |KEYWORD
dep.from_date |TIMESTAMP |DATE
dep.to_date |TIMESTAMP |DATE
emp_no |INTEGER |INTEGER
extra_gender |VARCHAR |KEYWORD
extra_no |INTEGER |INTEGER
first_name |VARCHAR |TEXT
first_name.keyword |VARCHAR |KEYWORD
gender |VARCHAR |KEYWORD
hire_date |TIMESTAMP |DATE
languages |TINYINT |BYTE
last_name |VARCHAR |TEXT
last_name.keyword |VARCHAR |KEYWORD
salary |INTEGER |INTEGER
;
describeMultiLike
DESCRIBE LIKE 'test_emp%';
column:s | type:s
birth_date | TIMESTAMP
dep | STRUCT
dep.dep_id | VARCHAR
dep.dep_name | VARCHAR
dep.dep_name.keyword | VARCHAR
dep.from_date | TIMESTAMP
dep.to_date | TIMESTAMP
emp_no | INTEGER
first_name | VARCHAR
first_name.keyword | VARCHAR
gender | VARCHAR
hire_date | TIMESTAMP
languages | TINYINT
last_name | VARCHAR
last_name.keyword | VARCHAR
salary | INTEGER
column | type | mapping
--------------------+---------------+---------------
birth_date |TIMESTAMP |DATE
dep |STRUCT |NESTED
dep.dep_id |VARCHAR |KEYWORD
dep.dep_name |VARCHAR |TEXT
dep.dep_name.keyword|VARCHAR |KEYWORD
dep.from_date |TIMESTAMP |DATE
dep.to_date |TIMESTAMP |DATE
emp_no |INTEGER |INTEGER
extra_gender |VARCHAR |KEYWORD
extra_no |INTEGER |INTEGER
first_name |VARCHAR |TEXT
first_name.keyword |VARCHAR |KEYWORD
gender |VARCHAR |KEYWORD
hire_date |TIMESTAMP |DATE
languages |TINYINT |BYTE
last_name |VARCHAR |TEXT
last_name.keyword |VARCHAR |KEYWORD
salary |INTEGER |INTEGER
;
describeSimpleIdentifier
DESCRIBE "test_emp";
column:s | type:s
birth_date | TIMESTAMP
dep | STRUCT
dep.dep_id | VARCHAR
dep.dep_name | VARCHAR
dep.dep_name.keyword | VARCHAR
dep.from_date | TIMESTAMP
dep.to_date | TIMESTAMP
emp_no | INTEGER
first_name | VARCHAR
first_name.keyword | VARCHAR
gender | VARCHAR
hire_date | TIMESTAMP
languages | TINYINT
last_name | VARCHAR
last_name.keyword | VARCHAR
salary | INTEGER
column | type | mapping
--------------------+---------------+---------------
birth_date |TIMESTAMP |DATE
dep |STRUCT |NESTED
dep.dep_id |VARCHAR |KEYWORD
dep.dep_name |VARCHAR |TEXT
dep.dep_name.keyword|VARCHAR |KEYWORD
dep.from_date |TIMESTAMP |DATE
dep.to_date |TIMESTAMP |DATE
emp_no |INTEGER |INTEGER
first_name |VARCHAR |TEXT
first_name.keyword |VARCHAR |KEYWORD
gender |VARCHAR |KEYWORD
hire_date |TIMESTAMP |DATE
languages |TINYINT |BYTE
last_name |VARCHAR |TEXT
last_name.keyword |VARCHAR |KEYWORD
salary |INTEGER |INTEGER
;
describeIncludeExcludeIdentifier
DESCRIBE "test_emp*,-test_emp_*";
// NB: need to pursue how the resolution is done
// should aliases be included or excluded?
describeIncludeExcludeIdentifier-Ignore
DESCRIBE "test_*,-test_alias*";
column:s | type:s
birth_date | TIMESTAMP
dep | STRUCT
dep.dep_id | VARCHAR
dep.dep_name | VARCHAR
dep.dep_name.keyword | VARCHAR
dep.from_date | TIMESTAMP
dep.to_date | TIMESTAMP
emp_no | INTEGER
first_name | VARCHAR
first_name.keyword | VARCHAR
gender | VARCHAR
hire_date | TIMESTAMP
languages | TINYINT
last_name | VARCHAR
last_name.keyword | VARCHAR
salary | INTEGER
column | type | mapping
--------------------+---------------+---------------
birth_date |TIMESTAMP |DATE
dep |STRUCT |NESTED
dep.dep_id |VARCHAR |KEYWORD
dep.dep_name |VARCHAR |TEXT
dep.dep_name.keyword|VARCHAR |KEYWORD
dep.from_date |TIMESTAMP |DATE
dep.to_date |TIMESTAMP |DATE
emp_no |INTEGER |INTEGER
first_name |VARCHAR |TEXT
first_name.keyword |VARCHAR |KEYWORD
gender |VARCHAR |KEYWORD
hire_date |TIMESTAMP |DATE
languages |TINYINT |BYTE
last_name |VARCHAR |TEXT
last_name.keyword |VARCHAR |KEYWORD
salary |INTEGER |INTEGER
;

View File

@ -12,24 +12,24 @@ describeTable
// tag::describeTable
DESCRIBE emp;
column | type
--------------------+---------------
birth_date |TIMESTAMP
dep |STRUCT
dep.dep_id |VARCHAR
dep.dep_name |VARCHAR
dep.dep_name.keyword|VARCHAR
dep.from_date |TIMESTAMP
dep.to_date |TIMESTAMP
emp_no |INTEGER
first_name |VARCHAR
first_name.keyword |VARCHAR
gender |VARCHAR
hire_date |TIMESTAMP
languages |TINYINT
last_name |VARCHAR
last_name.keyword |VARCHAR
salary |INTEGER
column | type | mapping
--------------------+---------------+---------------
birth_date |TIMESTAMP |DATE
dep |STRUCT |NESTED
dep.dep_id |VARCHAR |KEYWORD
dep.dep_name |VARCHAR |TEXT
dep.dep_name.keyword|VARCHAR |KEYWORD
dep.from_date |TIMESTAMP |DATE
dep.to_date |TIMESTAMP |DATE
emp_no |INTEGER |INTEGER
first_name |VARCHAR |TEXT
first_name.keyword |VARCHAR |KEYWORD
gender |VARCHAR |KEYWORD
hire_date |TIMESTAMP |DATE
languages |TINYINT |BYTE
last_name |VARCHAR |TEXT
last_name.keyword |VARCHAR |KEYWORD
salary |INTEGER |INTEGER
// end::describeTable
;
@ -51,24 +51,24 @@ showColumns
// tag::showColumns
SHOW COLUMNS IN emp;
column | type
--------------------+---------------
birth_date |TIMESTAMP
dep |STRUCT
dep.dep_id |VARCHAR
dep.dep_name |VARCHAR
dep.dep_name.keyword|VARCHAR
dep.from_date |TIMESTAMP
dep.to_date |TIMESTAMP
emp_no |INTEGER
first_name |VARCHAR
first_name.keyword |VARCHAR
gender |VARCHAR
hire_date |TIMESTAMP
languages |TINYINT
last_name |VARCHAR
last_name.keyword |VARCHAR
salary |INTEGER
column | type | mapping
--------------------+---------------+---------------
birth_date |TIMESTAMP |DATE
dep |STRUCT |NESTED
dep.dep_id |VARCHAR |KEYWORD
dep.dep_name |VARCHAR |TEXT
dep.dep_name.keyword|VARCHAR |KEYWORD
dep.from_date |TIMESTAMP |DATE
dep.to_date |TIMESTAMP |DATE
emp_no |INTEGER |INTEGER
first_name |VARCHAR |TEXT
first_name.keyword |VARCHAR |KEYWORD
gender |VARCHAR |KEYWORD
hire_date |TIMESTAMP |DATE
languages |TINYINT |BYTE
last_name |VARCHAR |TEXT
last_name.keyword |VARCHAR |KEYWORD
salary |INTEGER |INTEGER
// end::showColumns
;

View File

@ -6,24 +6,24 @@
describeParent
DESCRIBE test_emp;
column | type
birth_date | TIMESTAMP
dep | STRUCT
dep.dep_id | VARCHAR
dep.dep_name | VARCHAR
dep.dep_name.keyword | VARCHAR
dep.from_date | TIMESTAMP
dep.to_date | TIMESTAMP
emp_no | INTEGER
first_name | VARCHAR
first_name.keyword | VARCHAR
gender | VARCHAR
hire_date | TIMESTAMP
languages | TINYINT
last_name | VARCHAR
last_name.keyword | VARCHAR
salary | INTEGER
column | type | mapping
--------------------+---------------+---------------
birth_date |TIMESTAMP |DATE
dep |STRUCT |NESTED
dep.dep_id |VARCHAR |KEYWORD
dep.dep_name |VARCHAR |TEXT
dep.dep_name.keyword|VARCHAR |KEYWORD
dep.from_date |TIMESTAMP |DATE
dep.to_date |TIMESTAMP |DATE
emp_no |INTEGER |INTEGER
first_name |VARCHAR |TEXT
first_name.keyword |VARCHAR |KEYWORD
gender |VARCHAR |KEYWORD
hire_date |TIMESTAMP |DATE
languages |TINYINT |BYTE
last_name |VARCHAR |TEXT
last_name.keyword |VARCHAR |KEYWORD
salary |INTEGER |INTEGER
;
// disable until we figure out how to use field names with . in their name