[Backport] Validate `query` field when creating roles (#46275) (#47094)

In the current implementation, the validation of the role query
occurs at runtime when the query is being executed.

This commit adds validation for the role query when creating a role
but not for the template query as we do not have the runtime
information required for evaluating the template query (eg. authenticated user's
information). This is similar to the scripts that we
store but do not evaluate or parse if they are valid queries or not.

For validation, the query is evaluated (if not a template), parsed to build the
QueryBuilder and verify if the query type is allowed.

Closes #34252
This commit is contained in:
Yogesh Gaikwad 2019-09-26 17:57:36 +10:00 committed by GitHub
parent 97d977f381
commit 9a64b7a888
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
25 changed files with 627 additions and 235 deletions

View File

@ -190,7 +190,7 @@ public class SecurityIT extends ESRestHighLevelClientTestCase {
.name(roleName)
.clusterPrivileges(randomSubsetOf(randomInt(3), Role.ClusterPrivilegeName.ALL_ARRAY))
.indicesPrivileges(
randomArray(3, IndicesPrivileges[]::new, () -> IndicesPrivilegesTests.createNewRandom(randomAlphaOfLength(3))))
randomArray(3, IndicesPrivileges[]::new, () -> IndicesPrivilegesTests.createNewRandom("{\"match_all\": {}}")))
.applicationResourcePrivileges(randomArray(3, ApplicationResourcePrivileges[]::new,
() -> ApplicationResourcePrivilegesTests.createNewRandom(randomAlphaOfLength(3).toLowerCase(Locale.ROOT))))
.runAsPrivilege(randomArray(3, String[]::new, () -> randomAlphaOfLength(3)));

View File

@ -12,29 +12,18 @@ import org.apache.lucene.search.join.BitSetProducer;
import org.apache.lucene.search.join.ToChildBlockJoinQuery;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.BoostingQueryBuilder;
import org.elasticsearch.index.query.ConstantScoreQueryBuilder;
import org.elasticsearch.index.query.GeoShapeQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryRewriteContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.Rewriteable;
import org.elasticsearch.index.query.TermsQueryBuilder;
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
import org.elasticsearch.index.search.NestedHelper;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.xpack.core.security.authz.support.SecurityQueryTemplateEvaluator;
import org.elasticsearch.xpack.core.security.authz.support.DLSRoleQueryValidator;
import org.elasticsearch.xpack.core.security.user.User;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.function.Function;
@ -127,11 +116,9 @@ public final class DocumentPermissions {
BooleanQuery.Builder filter) throws IOException {
for (BytesReference bytesReference : queries) {
QueryShardContext queryShardContext = queryShardContextProvider.apply(shardId);
String templateResult = SecurityQueryTemplateEvaluator.evaluateTemplate(bytesReference.utf8ToString(), scriptService, user);
try (XContentParser parser = XContentFactory.xContent(templateResult).createParser(queryShardContext.getXContentRegistry(),
LoggingDeprecationHandler.INSTANCE, templateResult)) {
QueryBuilder queryBuilder = queryShardContext.parseInnerQueryBuilder(parser);
verifyRoleQuery(queryBuilder);
QueryBuilder queryBuilder = DLSRoleQueryValidator.evaluateAndVerifyRoleQuery(bytesReference, scriptService,
queryShardContext.getXContentRegistry(), user);
if (queryBuilder != null) {
failIfQueryUsesClient(queryBuilder, queryShardContext);
Query roleQuery = queryShardContext.toQuery(queryBuilder).query();
filter.add(roleQuery, SHOULD);
@ -153,50 +140,6 @@ public final class DocumentPermissions {
filter.setMinimumNumberShouldMatch(1);
}
/**
* Checks whether the role query contains queries we know can't be used as DLS role query.
*/
static void verifyRoleQuery(QueryBuilder queryBuilder) throws IOException {
if (queryBuilder instanceof TermsQueryBuilder) {
TermsQueryBuilder termsQueryBuilder = (TermsQueryBuilder) queryBuilder;
if (termsQueryBuilder.termsLookup() != null) {
throw new IllegalArgumentException("terms query with terms lookup isn't supported as part of a role query");
}
} else if (queryBuilder instanceof GeoShapeQueryBuilder) {
GeoShapeQueryBuilder geoShapeQueryBuilder = (GeoShapeQueryBuilder) queryBuilder;
if (geoShapeQueryBuilder.shape() == null) {
throw new IllegalArgumentException("geoshape query referring to indexed shapes isn't support as part of a role query");
}
} else if (queryBuilder.getName().equals("percolate")) {
// actually only if percolate query is referring to an existing document then this is problematic,
// a normal percolate query does work. However we can't check that here as this query builder is inside
// another module. So we don't allow the entire percolate query. I don't think users would ever use
// a percolate query as role query, so this restriction shouldn't prohibit anyone from using dls.
throw new IllegalArgumentException("percolate query isn't support as part of a role query");
} else if (queryBuilder.getName().equals("has_child")) {
throw new IllegalArgumentException("has_child query isn't support as part of a role query");
} else if (queryBuilder.getName().equals("has_parent")) {
throw new IllegalArgumentException("has_parent query isn't support as part of a role query");
} else if (queryBuilder instanceof BoolQueryBuilder) {
BoolQueryBuilder boolQueryBuilder = (BoolQueryBuilder) queryBuilder;
List<QueryBuilder> clauses = new ArrayList<>();
clauses.addAll(boolQueryBuilder.filter());
clauses.addAll(boolQueryBuilder.must());
clauses.addAll(boolQueryBuilder.mustNot());
clauses.addAll(boolQueryBuilder.should());
for (QueryBuilder clause : clauses) {
verifyRoleQuery(clause);
}
} else if (queryBuilder instanceof ConstantScoreQueryBuilder) {
verifyRoleQuery(((ConstantScoreQueryBuilder) queryBuilder).innerQuery());
} else if (queryBuilder instanceof FunctionScoreQueryBuilder) {
verifyRoleQuery(((FunctionScoreQueryBuilder) queryBuilder).query());
} else if (queryBuilder instanceof BoostingQueryBuilder) {
verifyRoleQuery(((BoostingQueryBuilder) queryBuilder).negativeQuery());
verifyRoleQuery(((BoostingQueryBuilder) queryBuilder).positiveQuery());
}
}
/**
* Fall back validation that verifies that queries during rewrite don't use
* the client to make remote calls. In the case of DLS this can cause a dead

View File

@ -0,0 +1,182 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.core.security.authz.support;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParseException;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.AbstractQueryBuilder;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.BoostingQueryBuilder;
import org.elasticsearch.index.query.ConstantScoreQueryBuilder;
import org.elasticsearch.index.query.GeoShapeQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.TermsQueryBuilder;
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.xpack.core.security.authz.RoleDescriptor;
import org.elasticsearch.xpack.core.security.user.User;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* This class helps in evaluating the query field if it is template,
* validating the query and checking if the query type is allowed to be used in DLS role query.
*/
public final class DLSRoleQueryValidator {
private DLSRoleQueryValidator() {
}
/**
* Validates the query field in the {@link RoleDescriptor.IndicesPrivileges} only if it is not a template query.<br>
* It parses the query and builds the {@link QueryBuilder}, also checks if the query type is supported in DLS role query.
*
* @param indicesPrivileges {@link RoleDescriptor.IndicesPrivileges}
* @param xContentRegistry {@link NamedXContentRegistry} for finding named queries
*/
public static void validateQueryField(RoleDescriptor.IndicesPrivileges[] indicesPrivileges,
NamedXContentRegistry xContentRegistry) {
if (indicesPrivileges != null) {
for (int i = 0; i < indicesPrivileges.length; i++) {
BytesReference query = indicesPrivileges[i].getQuery();
try {
if (query != null) {
if (isTemplateQuery(query, xContentRegistry)) {
// skip template query, this requires runtime information like 'User' information.
continue;
}
evaluateAndVerifyRoleQuery(query.utf8ToString(), xContentRegistry);
}
} catch (ParsingException | IllegalArgumentException | IOException e) {
throw new ElasticsearchParseException("failed to parse field 'query' for indices [" +
Strings.arrayToCommaDelimitedString(indicesPrivileges[i].getIndices()) +
"] at index privilege [" + i + "] of role descriptor", e);
}
}
}
}
private static boolean isTemplateQuery(BytesReference query, NamedXContentRegistry xContentRegistry) throws IOException {
try (XContentParser parser = XContentType.JSON.xContent().createParser(xContentRegistry,
LoggingDeprecationHandler.INSTANCE, query.utf8ToString())) {
XContentParser.Token token = parser.nextToken();
if (token != XContentParser.Token.START_OBJECT) {
throw new XContentParseException(parser.getTokenLocation(), "expected [" + XContentParser.Token.START_OBJECT + "] but " +
"found [" + token + "] instead");
}
token = parser.nextToken();
if (token != XContentParser.Token.FIELD_NAME) {
throw new XContentParseException(parser.getTokenLocation(), "expected [" + XContentParser.Token.FIELD_NAME + "] with " +
"value a query name or 'template' but found [" + token + "] instead");
}
String fieldName = parser.currentName();
if ("template".equals(fieldName)) {
return true;
}
}
return false;
}
/**
* Evaluates the query if it is a template and then validates the query by parsing
* and building the {@link QueryBuilder}. It also checks if the query type is
* supported in DLS role query.
*
* @param query {@link BytesReference} query field from the role
* @param scriptService {@link ScriptService} used for evaluation of a template query
* @param xContentRegistry {@link NamedXContentRegistry} for finding named queries
* @param user {@link User} used when evaluation a template query
* @return {@link QueryBuilder} if the query is valid and allowed, in case {@link RoleDescriptor.IndicesPrivileges}
* * does not have a query field then it returns {@code null}.
*/
@Nullable
public static QueryBuilder evaluateAndVerifyRoleQuery(BytesReference query, ScriptService scriptService,
NamedXContentRegistry xContentRegistry, User user) {
if (query != null) {
String templateResult = SecurityQueryTemplateEvaluator.evaluateTemplate(query.utf8ToString(), scriptService,
user);
try {
return evaluateAndVerifyRoleQuery(templateResult, xContentRegistry);
} catch (ElasticsearchParseException | ParsingException | XContentParseException | IOException e) {
throw new ElasticsearchParseException("failed to parse field 'query' from the role descriptor", e);
}
}
return null;
}
@Nullable
private static QueryBuilder evaluateAndVerifyRoleQuery(String query, NamedXContentRegistry xContentRegistry) throws IOException {
if (query != null) {
try (XContentParser parser = XContentFactory.xContent(query).createParser(xContentRegistry,
LoggingDeprecationHandler.INSTANCE, query)) {
QueryBuilder queryBuilder = AbstractQueryBuilder.parseInnerQueryBuilder(parser);
verifyRoleQuery(queryBuilder);
return queryBuilder;
}
}
return null;
}
/**
* Checks whether the role query contains queries we know can't be used as DLS role query.
*
* @param queryBuilder {@link QueryBuilder} for given query
*/
// pkg protected for testing
static void verifyRoleQuery(QueryBuilder queryBuilder) {
if (queryBuilder instanceof TermsQueryBuilder) {
TermsQueryBuilder termsQueryBuilder = (TermsQueryBuilder) queryBuilder;
if (termsQueryBuilder.termsLookup() != null) {
throw new IllegalArgumentException("terms query with terms lookup isn't supported as part of a role query");
}
} else if (queryBuilder instanceof GeoShapeQueryBuilder) {
GeoShapeQueryBuilder geoShapeQueryBuilder = (GeoShapeQueryBuilder) queryBuilder;
if (geoShapeQueryBuilder.shape() == null) {
throw new IllegalArgumentException("geoshape query referring to indexed shapes isn't supported as part of a role query");
}
} else if (queryBuilder.getName().equals("percolate")) {
// actually only if percolate query is referring to an existing document then this is problematic,
// a normal percolate query does work. However we can't check that here as this query builder is inside
// another module. So we don't allow the entire percolate query. I don't think users would ever use
// a percolate query as role query, so this restriction shouldn't prohibit anyone from using dls.
throw new IllegalArgumentException("percolate query isn't supported as part of a role query");
} else if (queryBuilder.getName().equals("has_child")) {
throw new IllegalArgumentException("has_child query isn't supported as part of a role query");
} else if (queryBuilder.getName().equals("has_parent")) {
throw new IllegalArgumentException("has_parent query isn't supported as part of a role query");
} else if (queryBuilder instanceof BoolQueryBuilder) {
BoolQueryBuilder boolQueryBuilder = (BoolQueryBuilder) queryBuilder;
List<QueryBuilder> clauses = new ArrayList<>();
clauses.addAll(boolQueryBuilder.filter());
clauses.addAll(boolQueryBuilder.must());
clauses.addAll(boolQueryBuilder.mustNot());
clauses.addAll(boolQueryBuilder.should());
for (QueryBuilder clause : clauses) {
verifyRoleQuery(clause);
}
} else if (queryBuilder instanceof ConstantScoreQueryBuilder) {
verifyRoleQuery(((ConstantScoreQueryBuilder) queryBuilder).innerQuery());
} else if (queryBuilder instanceof FunctionScoreQueryBuilder) {
verifyRoleQuery(((FunctionScoreQueryBuilder) queryBuilder).query());
} else if (queryBuilder instanceof BoostingQueryBuilder) {
verifyRoleQuery(((BoostingQueryBuilder) queryBuilder).negativeQuery());
verifyRoleQuery(((BoostingQueryBuilder) queryBuilder).positiveQuery());
}
}
}

View File

@ -44,10 +44,8 @@ public final class SecurityQueryTemplateEvaluator {
* @return resultant query string after compiling and executing the script.
* If the source does not contain template then it will return the query
* source without any modifications.
* @throws IOException thrown when there is any error parsing the query
* string.
*/
public static String evaluateTemplate(final String querySource, final ScriptService scriptService, final User user) throws IOException {
public static String evaluateTemplate(final String querySource, final ScriptService scriptService, final User user) {
// EMPTY is safe here because we never use namedObject
try (XContentParser parser = XContentFactory.xContent(querySource).createParser(NamedXContentRegistry.EMPTY,
LoggingDeprecationHandler.INSTANCE, querySource)) {
@ -76,6 +74,8 @@ public final class SecurityQueryTemplateEvaluator {
} else {
return querySource;
}
} catch (IOException ioe) {
throw new ElasticsearchParseException("failed to parse query", ioe);
}
}

View File

@ -28,12 +28,10 @@ import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.TermQueryBuilder;
import org.elasticsearch.index.query.TermsQueryBuilder;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.license.XPackLicenseState;
@ -55,9 +53,7 @@ import static java.util.Collections.singleton;
import static java.util.Collections.singletonMap;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.equalTo;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.when;
@ -76,9 +72,6 @@ public class SecurityIndexReaderWrapperIntegrationTests extends AbstractBuilderT
final Authentication authentication = mock(Authentication.class);
when(authentication.getUser()).thenReturn(mock(User.class));
threadContext.putTransient(AuthenticationField.AUTHENTICATION_KEY, authentication);
IndicesAccessControl.IndexAccessControl indexAccessControl = new IndicesAccessControl.IndexAccessControl(true, new
FieldPermissions(),
DocumentPermissions.filteredBy(singleton(new BytesArray("{\"match_all\" : {}}"))));
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(shardId.getIndex(), Settings.EMPTY);
Client client = mock(Client.class);
when(client.settings()).thenReturn(Settings.EMPTY);
@ -90,14 +83,6 @@ public class SecurityIndexReaderWrapperIntegrationTests extends AbstractBuilderT
DocumentSubsetBitsetCache bitsetCache = new DocumentSubsetBitsetCache(Settings.EMPTY);
XPackLicenseState licenseState = mock(XPackLicenseState.class);
when(licenseState.isDocumentAndFieldLevelSecurityAllowed()).thenReturn(true);
SecurityIndexReaderWrapper wrapper = new SecurityIndexReaderWrapper(s -> queryShardContext,
bitsetCache, threadContext, licenseState, scriptService) {
@Override
protected IndicesAccessControl getIndicesAccessControl() {
return new IndicesAccessControl(true, singletonMap("_index", indexAccessControl));
}
};
Directory directory = newDirectory();
IndexWriter iw = new IndexWriter(
@ -144,17 +129,32 @@ public class SecurityIndexReaderWrapperIntegrationTests extends AbstractBuilderT
DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(directory), shardId);
for (int i = 0; i < numValues; i++) {
String termQuery = "{\"term\": {\"field\": \""+ values[i] + "\"} }";
IndicesAccessControl.IndexAccessControl indexAccessControl = new IndicesAccessControl.IndexAccessControl(true, new
FieldPermissions(),
DocumentPermissions.filteredBy(singleton(new BytesArray(termQuery))));
SecurityIndexReaderWrapper wrapper = new SecurityIndexReaderWrapper(s -> queryShardContext,
bitsetCache, threadContext, licenseState, scriptService) {
@Override
protected IndicesAccessControl getIndicesAccessControl() {
return new IndicesAccessControl(true, singletonMap("_index", indexAccessControl));
}
};
ParsedQuery parsedQuery = new ParsedQuery(new TermQuery(new Term("field", values[i])));
doReturn(new TermQueryBuilder("field", values[i])).when(queryShardContext).parseInnerQueryBuilder(any(XContentParser.class));
when(queryShardContext.toQuery(new TermsQueryBuilder("field", values[i]))).thenReturn(parsedQuery);
DirectoryReader wrappedDirectoryReader = wrapper.apply(directoryReader);
IndexSearcher indexSearcher = new ContextIndexSearcher(wrappedDirectoryReader,
IndexSearcher.getDefaultSimilarity(), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy());
int expectedHitCount = valuesHitCount[i];
logger.info("Going to verify hit count with query [{}] with expected total hits [{}]", parsedQuery.query(), expectedHitCount);
TotalHitCountCollector countCollector = new TotalHitCountCollector();
indexSearcher.search(new MatchAllDocsQuery(), countCollector);
assertThat(countCollector.getTotalHits(), equalTo(expectedHitCount));
assertThat(wrappedDirectoryReader.numDocs(), equalTo(expectedHitCount));
}

View File

@ -6,23 +6,14 @@
package org.elasticsearch.xpack.core.security.authz.permission;
import org.apache.lucene.search.join.ScoreMode;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.BoostingQueryBuilder;
import org.elasticsearch.index.query.ConstantScoreQueryBuilder;
import org.elasticsearch.index.query.GeoShapeQueryBuilder;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryRewriteContext;
import org.elasticsearch.index.query.TermsQueryBuilder;
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
import org.elasticsearch.indices.TermsLookup;
import org.elasticsearch.join.query.HasChildQueryBuilder;
import org.elasticsearch.join.query.HasParentQueryBuilder;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
@ -68,44 +59,6 @@ public class DocumentPermissionsTests extends ESTestCase {
assertThat(ae.getMessage(), containsString("nested scoping for document permissions is not permitted"));
}
public void testVerifyRoleQuery() throws Exception {
QueryBuilder queryBuilder1 = new TermsQueryBuilder("field", "val1", "val2");
DocumentPermissions.verifyRoleQuery(queryBuilder1);
QueryBuilder queryBuilder2 = new TermsQueryBuilder("field", new TermsLookup("_index", "_type", "_id", "_path"));
Exception e = expectThrows(IllegalArgumentException.class, () -> DocumentPermissions.verifyRoleQuery(queryBuilder2));
assertThat(e.getMessage(), equalTo("terms query with terms lookup isn't supported as part of a role query"));
QueryBuilder queryBuilder3 = new GeoShapeQueryBuilder("field", "_id", "_type");
e = expectThrows(IllegalArgumentException.class, () -> DocumentPermissions.verifyRoleQuery(queryBuilder3));
assertThat(e.getMessage(), equalTo("geoshape query referring to indexed shapes isn't support as part of a role query"));
QueryBuilder queryBuilder4 = new HasChildQueryBuilder("_type", new MatchAllQueryBuilder(), ScoreMode.None);
e = expectThrows(IllegalArgumentException.class, () -> DocumentPermissions.verifyRoleQuery(queryBuilder4));
assertThat(e.getMessage(), equalTo("has_child query isn't support as part of a role query"));
QueryBuilder queryBuilder5 = new HasParentQueryBuilder("_type", new MatchAllQueryBuilder(), false);
e = expectThrows(IllegalArgumentException.class, () -> DocumentPermissions.verifyRoleQuery(queryBuilder5));
assertThat(e.getMessage(), equalTo("has_parent query isn't support as part of a role query"));
QueryBuilder queryBuilder6 = new BoolQueryBuilder().must(new GeoShapeQueryBuilder("field", "_id", "_type"));
e = expectThrows(IllegalArgumentException.class, () -> DocumentPermissions.verifyRoleQuery(queryBuilder6));
assertThat(e.getMessage(), equalTo("geoshape query referring to indexed shapes isn't support as part of a role query"));
QueryBuilder queryBuilder7 = new ConstantScoreQueryBuilder(new GeoShapeQueryBuilder("field", "_id", "_type"));
e = expectThrows(IllegalArgumentException.class, () -> DocumentPermissions.verifyRoleQuery(queryBuilder7));
assertThat(e.getMessage(), equalTo("geoshape query referring to indexed shapes isn't support as part of a role query"));
QueryBuilder queryBuilder8 = new FunctionScoreQueryBuilder(new GeoShapeQueryBuilder("field", "_id", "_type"));
e = expectThrows(IllegalArgumentException.class, () -> DocumentPermissions.verifyRoleQuery(queryBuilder8));
assertThat(e.getMessage(), equalTo("geoshape query referring to indexed shapes isn't support as part of a role query"));
QueryBuilder queryBuilder9 = new BoostingQueryBuilder(new GeoShapeQueryBuilder("field", "_id", "_type"),
new MatchAllQueryBuilder());
e = expectThrows(IllegalArgumentException.class, () -> DocumentPermissions.verifyRoleQuery(queryBuilder9));
assertThat(e.getMessage(), equalTo("geoshape query referring to indexed shapes isn't support as part of a role query"));
}
public void testFailIfQueryUsesClient() throws Exception {
Client client = mock(Client.class);
when(client.settings()).thenReturn(Settings.EMPTY);

View File

@ -0,0 +1,63 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.core.security.authz.support;
import org.apache.lucene.search.join.ScoreMode;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.BoostingQueryBuilder;
import org.elasticsearch.index.query.ConstantScoreQueryBuilder;
import org.elasticsearch.index.query.GeoShapeQueryBuilder;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.TermsQueryBuilder;
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
import org.elasticsearch.indices.TermsLookup;
import org.elasticsearch.join.query.HasChildQueryBuilder;
import org.elasticsearch.join.query.HasParentQueryBuilder;
import org.elasticsearch.test.ESTestCase;
import static org.hamcrest.Matchers.equalTo;
public class DLSRoleQueryValidatorTests extends ESTestCase {
public void testVerifyRoleQuery() throws Exception {
QueryBuilder queryBuilder1 = new TermsQueryBuilder("field", "val1", "val2");
DLSRoleQueryValidator.verifyRoleQuery(queryBuilder1);
QueryBuilder queryBuilder2 = new TermsQueryBuilder("field", new TermsLookup("_index", "_id", "_path"));
Exception e = expectThrows(IllegalArgumentException.class, () -> DLSRoleQueryValidator.verifyRoleQuery(queryBuilder2));
assertThat(e.getMessage(), equalTo("terms query with terms lookup isn't supported as part of a role query"));
QueryBuilder queryBuilder3 = new GeoShapeQueryBuilder("field", "_id", "_type");
e = expectThrows(IllegalArgumentException.class, () -> DLSRoleQueryValidator.verifyRoleQuery(queryBuilder3));
assertThat(e.getMessage(), equalTo("geoshape query referring to indexed shapes isn't supported as part of a role query"));
QueryBuilder queryBuilder4 = new HasChildQueryBuilder("_type", new MatchAllQueryBuilder(), ScoreMode.None);
e = expectThrows(IllegalArgumentException.class, () -> DLSRoleQueryValidator.verifyRoleQuery(queryBuilder4));
assertThat(e.getMessage(), equalTo("has_child query isn't supported as part of a role query"));
QueryBuilder queryBuilder5 = new HasParentQueryBuilder("_type", new MatchAllQueryBuilder(), false);
e = expectThrows(IllegalArgumentException.class, () -> DLSRoleQueryValidator.verifyRoleQuery(queryBuilder5));
assertThat(e.getMessage(), equalTo("has_parent query isn't supported as part of a role query"));
QueryBuilder queryBuilder6 = new BoolQueryBuilder().must(new GeoShapeQueryBuilder("field", "_id", "_type"));
e = expectThrows(IllegalArgumentException.class, () -> DLSRoleQueryValidator.verifyRoleQuery(queryBuilder6));
assertThat(e.getMessage(), equalTo("geoshape query referring to indexed shapes isn't supported as part of a role query"));
QueryBuilder queryBuilder7 = new ConstantScoreQueryBuilder(new GeoShapeQueryBuilder("field", "_id", "_type"));
e = expectThrows(IllegalArgumentException.class, () -> DLSRoleQueryValidator.verifyRoleQuery(queryBuilder7));
assertThat(e.getMessage(), equalTo("geoshape query referring to indexed shapes isn't supported as part of a role query"));
QueryBuilder queryBuilder8 = new FunctionScoreQueryBuilder(new GeoShapeQueryBuilder("field", "_id", "_type"));
e = expectThrows(IllegalArgumentException.class, () -> DLSRoleQueryValidator.verifyRoleQuery(queryBuilder8));
assertThat(e.getMessage(), equalTo("geoshape query referring to indexed shapes isn't supported as part of a role query"));
QueryBuilder queryBuilder9 = new BoostingQueryBuilder(new GeoShapeQueryBuilder("field", "_id", "_type"),
new MatchAllQueryBuilder());
e = expectThrows(IllegalArgumentException.class, () -> DLSRoleQueryValidator.verifyRoleQuery(queryBuilder9));
assertThat(e.getMessage(), equalTo("geoshape query referring to indexed shapes isn't supported as part of a role query"));
}
}

View File

@ -388,7 +388,7 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw
NamedXContentRegistry xContentRegistry, Environment environment,
NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry) {
try {
return createComponents(client, threadPool, clusterService, resourceWatcherService, scriptService);
return createComponents(client, threadPool, clusterService, resourceWatcherService, scriptService, xContentRegistry);
} catch (final Exception e) {
throw new IllegalStateException("security initialization failed", e);
}
@ -396,7 +396,8 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw
// pkg private for testing - tests want to pass in their set of extensions hence we are not using the extension service directly
Collection<Object> createComponents(Client client, ThreadPool threadPool, ClusterService clusterService,
ResourceWatcherService resourceWatcherService, ScriptService scriptService) throws Exception {
ResourceWatcherService resourceWatcherService, ScriptService scriptService,
NamedXContentRegistry xContentRegistry) throws Exception {
if (enabled == false) {
return Collections.emptyList();
}
@ -451,7 +452,8 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw
dlsBitsetCache.set(new DocumentSubsetBitsetCache(settings));
final FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(settings);
final FileRolesStore fileRolesStore = new FileRolesStore(settings, env, resourceWatcherService, getLicenseState());
final FileRolesStore fileRolesStore = new FileRolesStore(settings, env, resourceWatcherService, getLicenseState(),
xContentRegistry);
final NativeRolesStore nativeRolesStore = new NativeRolesStore(settings, client, getLicenseState(), securityIndex.get());
final ReservedRolesStore reservedRolesStore = new ReservedRolesStore();
List<BiConsumer<Set<String>, ActionListener<RoleRetrievalResult>>> rolesProviders = new ArrayList<>();

View File

@ -6,11 +6,13 @@
package org.elasticsearch.xpack.security.action;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.HandledTransportAction;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.core.security.SecurityContext;
@ -18,6 +20,8 @@ import org.elasticsearch.xpack.core.security.action.CreateApiKeyAction;
import org.elasticsearch.xpack.core.security.action.CreateApiKeyRequest;
import org.elasticsearch.xpack.core.security.action.CreateApiKeyResponse;
import org.elasticsearch.xpack.core.security.authc.Authentication;
import org.elasticsearch.xpack.core.security.authz.RoleDescriptor;
import org.elasticsearch.xpack.core.security.authz.support.DLSRoleQueryValidator;
import org.elasticsearch.xpack.security.authc.ApiKeyService;
import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore;
@ -32,14 +36,16 @@ public final class TransportCreateApiKeyAction extends HandledTransportAction<Cr
private final ApiKeyService apiKeyService;
private final SecurityContext securityContext;
private final CompositeRolesStore rolesStore;
private final NamedXContentRegistry xContentRegistry;
@Inject
public TransportCreateApiKeyAction(TransportService transportService, ActionFilters actionFilters, ApiKeyService apiKeyService,
SecurityContext context, CompositeRolesStore rolesStore) {
SecurityContext context, CompositeRolesStore rolesStore, NamedXContentRegistry xContentRegistry) {
super(CreateApiKeyAction.NAME, transportService, actionFilters, (Writeable.Reader<CreateApiKeyRequest>) CreateApiKeyRequest::new);
this.apiKeyService = apiKeyService;
this.securityContext = context;
this.rolesStore = rolesStore;
this.xContentRegistry = xContentRegistry;
}
@Override
@ -49,7 +55,17 @@ public final class TransportCreateApiKeyAction extends HandledTransportAction<Cr
listener.onFailure(new IllegalStateException("authentication is required"));
} else {
rolesStore.getRoleDescriptors(new HashSet<>(Arrays.asList(authentication.getUser().roles())),
ActionListener.wrap(roleDescriptors -> apiKeyService.createApiKey(authentication, request, roleDescriptors, listener),
ActionListener.wrap(roleDescriptors -> {
for (RoleDescriptor rd : roleDescriptors) {
try {
DLSRoleQueryValidator.validateQueryField(rd.getIndicesPrivileges(), xContentRegistry);
} catch (ElasticsearchException | IllegalArgumentException e) {
listener.onFailure(e);
return;
}
}
apiKeyService.createApiKey(authentication, request, roleDescriptors, listener);
},
listener::onFailure));
}
}

View File

@ -5,26 +5,32 @@
*/
package org.elasticsearch.xpack.security.action.role;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.HandledTransportAction;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.core.security.action.role.PutRoleAction;
import org.elasticsearch.xpack.core.security.action.role.PutRoleRequest;
import org.elasticsearch.xpack.core.security.action.role.PutRoleResponse;
import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore;
import org.elasticsearch.xpack.core.security.authz.support.DLSRoleQueryValidator;
import org.elasticsearch.xpack.security.authz.store.NativeRolesStore;
public class TransportPutRoleAction extends HandledTransportAction<PutRoleRequest, PutRoleResponse> {
private final NativeRolesStore rolesStore;
private final NamedXContentRegistry xContentRegistry;
@Inject
public TransportPutRoleAction(ActionFilters actionFilters, NativeRolesStore rolesStore, TransportService transportService) {
public TransportPutRoleAction(ActionFilters actionFilters, NativeRolesStore rolesStore, TransportService transportService,
NamedXContentRegistry xContentRegistry) {
super(PutRoleAction.NAME, transportService, actionFilters, PutRoleRequest::new);
this.rolesStore = rolesStore;
this.xContentRegistry = xContentRegistry;
}
@Override
@ -35,6 +41,13 @@ public class TransportPutRoleAction extends HandledTransportAction<PutRoleReques
return;
}
try {
DLSRoleQueryValidator.validateQueryField(request.roleDescriptor().getIndicesPrivileges(), xContentRegistry);
} catch (ElasticsearchException | IllegalArgumentException e) {
listener.onFailure(e);
return;
}
rolesStore.putRole(request, request.roleDescriptor(), new ActionListener<Boolean>() {
@Override
public void onResponse(Boolean created) {

View File

@ -8,10 +8,13 @@ package org.elasticsearch.xpack.security.action.user;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.HandledTransportAction;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.core.security.SecurityContext;
import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesAction;
import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesRequest;
import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesResponse;
@ -36,15 +39,20 @@ public class TransportHasPrivilegesAction extends HandledTransportAction<HasPriv
private final ThreadPool threadPool;
private final AuthorizationService authorizationService;
private final NativePrivilegeStore privilegeStore;
private final SecurityContext securityContext;
private final NamedXContentRegistry xContentRegistry;
@Inject
public TransportHasPrivilegesAction(ThreadPool threadPool, TransportService transportService,
ActionFilters actionFilters, AuthorizationService authorizationService,
NativePrivilegeStore privilegeStore) {
NativePrivilegeStore privilegeStore, SecurityContext context,
NamedXContentRegistry xContentRegistry) {
super(HasPrivilegesAction.NAME, transportService, actionFilters, HasPrivilegesRequest::new);
this.threadPool = threadPool;
this.authorizationService = authorizationService;
this.privilegeStore = privilegeStore;
this.xContentRegistry = xContentRegistry;
this.securityContext = context;
}
@Override
@ -58,6 +66,18 @@ public class TransportHasPrivilegesAction extends HandledTransportAction<HasPriv
return;
}
final RoleDescriptor.IndicesPrivileges[] indicesPrivileges = request.indexPrivileges();
if (indicesPrivileges != null) {
for (int i = 0; i < indicesPrivileges.length; i++) {
BytesReference query = indicesPrivileges[i].getQuery();
if (query != null) {
listener.onFailure(
new IllegalArgumentException("users may only check the index privileges without any DLS role query"));
return;
}
}
}
resolveApplicationPrivileges(request, ActionListener.wrap(applicationPrivilegeDescriptors ->
authorizationService.checkPrivileges(authentication, request, applicationPrivilegeDescriptors, listener),
listener::onFailure));

View File

@ -323,7 +323,7 @@ public class ESNativeRealmMigrateTool extends LoggingAwareMultiCommand {
}
terminal.println("importing roles from [" + rolesFile + "]...");
Logger logger = getTerminalLogger(terminal);
Map<String, RoleDescriptor> roles = FileRolesStore.parseRoleDescriptors(rolesFile, logger, true, Settings.EMPTY);
Map<String, RoleDescriptor> roles = FileRolesStore.parseRoleDescriptors(rolesFile, logger, true, Settings.EMPTY, null);
Set<String> existingRoles;
try {
existingRoles = getRolesThatExist(terminal, env.settings(), env, options);

View File

@ -9,6 +9,7 @@ import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.common.Nullable;
@ -29,6 +30,7 @@ import org.elasticsearch.xpack.core.security.authz.RoleDescriptor;
import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges;
import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore;
import org.elasticsearch.xpack.core.security.authz.store.RoleRetrievalResult;
import org.elasticsearch.xpack.core.security.authz.support.DLSRoleQueryValidator;
import org.elasticsearch.xpack.core.security.support.NoOpLogger;
import org.elasticsearch.xpack.core.security.support.Validation;
@ -61,27 +63,30 @@ public class FileRolesStore implements BiConsumer<Set<String>, ActionListener<Ro
private final Settings settings;
private final Path file;
private final XPackLicenseState licenseState;
private final NamedXContentRegistry xContentRegistry;
private final List<Consumer<Set<String>>> listeners = new ArrayList<>();
private volatile Map<String, RoleDescriptor> permissions;
public FileRolesStore(Settings settings, Environment env, ResourceWatcherService watcherService, XPackLicenseState licenseState)
public FileRolesStore(Settings settings, Environment env, ResourceWatcherService watcherService, XPackLicenseState licenseState,
NamedXContentRegistry xContentRegistry)
throws IOException {
this(settings, env, watcherService, null, licenseState);
this(settings, env, watcherService, null, licenseState, xContentRegistry);
}
FileRolesStore(Settings settings, Environment env, ResourceWatcherService watcherService, Consumer<Set<String>> listener,
XPackLicenseState licenseState) throws IOException {
XPackLicenseState licenseState, NamedXContentRegistry xContentRegistry) throws IOException {
this.settings = settings;
this.file = resolveFile(env);
if (listener != null) {
listeners.add(listener);
}
this.licenseState = licenseState;
this.xContentRegistry = xContentRegistry;
FileWatcher watcher = new FileWatcher(file.getParent());
watcher.addListener(new FileListener());
watcherService.add(watcher, ResourceWatcherService.Frequency.HIGH);
permissions = parseFile(file, logger, settings, licenseState);
permissions = parseFile(file, logger, settings, licenseState, xContentRegistry);
}
@ -150,15 +155,17 @@ public class FileRolesStore implements BiConsumer<Set<String>, ActionListener<Ro
}
public static Set<String> parseFileForRoleNames(Path path, Logger logger) {
return parseRoleDescriptors(path, logger, false, Settings.EMPTY).keySet();
// EMPTY is safe here because we never use namedObject as we are just parsing role names
return parseRoleDescriptors(path, logger, false, Settings.EMPTY, NamedXContentRegistry.EMPTY).keySet();
}
public static Map<String, RoleDescriptor> parseFile(Path path, Logger logger, Settings settings, XPackLicenseState licenseState) {
return parseFile(path, logger, true, settings, licenseState);
public static Map<String, RoleDescriptor> parseFile(Path path, Logger logger, Settings settings, XPackLicenseState licenseState,
NamedXContentRegistry xContentRegistry) {
return parseFile(path, logger, true, settings, licenseState, xContentRegistry);
}
public static Map<String, RoleDescriptor> parseFile(Path path, Logger logger, boolean resolvePermission,
Settings settings, XPackLicenseState licenseState) {
public static Map<String, RoleDescriptor> parseFile(Path path, Logger logger, boolean resolvePermission, Settings settings,
XPackLicenseState licenseState, NamedXContentRegistry xContentRegistry) {
if (logger == null) {
logger = NoOpLogger.INSTANCE;
}
@ -170,7 +177,7 @@ public class FileRolesStore implements BiConsumer<Set<String>, ActionListener<Ro
List<String> roleSegments = roleSegments(path);
final boolean flsDlsLicensed = licenseState.isDocumentAndFieldLevelSecurityAllowed();
for (String segment : roleSegments) {
RoleDescriptor descriptor = parseRoleDescriptor(segment, path, logger, resolvePermission, settings);
RoleDescriptor descriptor = parseRoleDescriptor(segment, path, logger, resolvePermission, settings, xContentRegistry);
if (descriptor != null) {
if (ReservedRolesStore.isReserved(descriptor.getName())) {
logger.warn("role [{}] is reserved. the relevant role definition in the mapping file will be ignored",
@ -202,7 +209,8 @@ public class FileRolesStore implements BiConsumer<Set<String>, ActionListener<Ro
return unmodifiableMap(roles);
}
public static Map<String, RoleDescriptor> parseRoleDescriptors(Path path, Logger logger, boolean resolvePermission, Settings settings) {
public static Map<String, RoleDescriptor> parseRoleDescriptors(Path path, Logger logger, boolean resolvePermission, Settings settings,
NamedXContentRegistry xContentRegistry) {
if (logger == null) {
logger = NoOpLogger.INSTANCE;
}
@ -213,7 +221,7 @@ public class FileRolesStore implements BiConsumer<Set<String>, ActionListener<Ro
try {
List<String> roleSegments = roleSegments(path);
for (String segment : roleSegments) {
RoleDescriptor rd = parseRoleDescriptor(segment, path, logger, resolvePermission, settings);
RoleDescriptor rd = parseRoleDescriptor(segment, path, logger, resolvePermission, settings, xContentRegistry);
if (rd != null) {
roles.put(rd.getName(), rd);
}
@ -231,12 +239,12 @@ public class FileRolesStore implements BiConsumer<Set<String>, ActionListener<Ro
}
@Nullable
static RoleDescriptor parseRoleDescriptor(String segment, Path path, Logger logger, boolean resolvePermissions, Settings settings) {
static RoleDescriptor parseRoleDescriptor(String segment, Path path, Logger logger, boolean resolvePermissions, Settings settings,
NamedXContentRegistry xContentRegistry) {
String roleName = null;
try {
// EMPTY is safe here because we never use namedObject
XContentParser parser = YamlXContent.yamlXContent
.createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, segment);
.createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, segment);
XContentParser.Token token = parser.nextToken();
if (token == XContentParser.Token.START_OBJECT) {
token = parser.nextToken();
@ -258,7 +266,7 @@ public class FileRolesStore implements BiConsumer<Set<String>, ActionListener<Ro
// we pass true as last parameter because we do not want to reject files if field permissions
// are given in 2.x syntax
RoleDescriptor descriptor = RoleDescriptor.parse(roleName, parser, true);
return checkDescriptor(descriptor, path, logger, settings);
return checkDescriptor(descriptor, path, logger, settings, xContentRegistry);
} else {
logger.error("invalid role definition [{}] in roles file [{}]. skipping role...", roleName, path.toAbsolutePath());
return null;
@ -295,16 +303,25 @@ public class FileRolesStore implements BiConsumer<Set<String>, ActionListener<Ro
}
@Nullable
private static RoleDescriptor checkDescriptor(RoleDescriptor descriptor, Path path, Logger logger, Settings settings) {
private static RoleDescriptor checkDescriptor(RoleDescriptor descriptor, Path path, Logger logger, Settings settings,
NamedXContentRegistry xContentRegistry) {
String roleName = descriptor.getName();
// first check if FLS/DLS is enabled on the role...
for (RoleDescriptor.IndicesPrivileges privilege : descriptor.getIndicesPrivileges()) {
if ((privilege.getQuery() != null || privilege.getGrantedFields() != null || privilege.getDeniedFields() != null)
&& XPackSettings.DLS_FLS_ENABLED.get(settings) == false) {
if (descriptor.isUsingDocumentOrFieldLevelSecurity()) {
if (XPackSettings.DLS_FLS_ENABLED.get(settings) == false) {
logger.error("invalid role definition [{}] in roles file [{}]. document and field level security is not " +
"enabled. set [{}] to [true] in the configuration file. skipping role...", roleName, path
.toAbsolutePath(), XPackSettings.DLS_FLS_ENABLED.getKey());
return null;
} else if (xContentRegistry != null) {
try {
DLSRoleQueryValidator.validateQueryField(descriptor.getIndicesPrivileges(), xContentRegistry);
} catch (ElasticsearchException | IllegalArgumentException e) {
logger.error((Supplier<?>) () -> new ParameterizedMessage(
"invalid role definition [{}] in roles file [{}]. failed to validate query field. skipping role...", roleName,
path.toAbsolutePath()), e);
return null;
}
}
}
return descriptor;
@ -350,7 +367,7 @@ public class FileRolesStore implements BiConsumer<Set<String>, ActionListener<Ro
if (file.equals(FileRolesStore.this.file)) {
final Map<String, RoleDescriptor> previousPermissions = permissions;
try {
permissions = parseFile(file, logger, settings, licenseState);
permissions = parseFile(file, logger, settings, licenseState, xContentRegistry);
logger.info("updated roles (roles file [{}] {})", file.toAbsolutePath(),
Files.exists(file) ? "changed" : "removed");
} catch (Exception e) {

View File

@ -128,7 +128,8 @@ public class SecurityTests extends ESTestCase {
Client client = mock(Client.class);
when(client.threadPool()).thenReturn(threadPool);
when(client.settings()).thenReturn(settings);
return security.createComponents(client, threadPool, clusterService, mock(ResourceWatcherService.class), mock(ScriptService.class));
return security.createComponents(client, threadPool, clusterService, mock(ResourceWatcherService.class), mock(ScriptService.class),
xContentRegistry());
}
private static <T> T findComponent(Class<T> type, Collection<Object> components) {

View File

@ -5,10 +5,21 @@
*/
package org.elasticsearch.xpack.security.action.role;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.ElasticsearchSecurityException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.TermQueryBuilder;
import org.elasticsearch.join.query.HasChildQueryBuilder;
import org.elasticsearch.join.query.HasParentQueryBuilder;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.transport.Transport;
@ -22,6 +33,7 @@ import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.concurrent.atomic.AtomicReference;
@ -41,12 +53,26 @@ import static org.mockito.Mockito.verifyZeroInteractions;
public class TransportPutRoleActionTests extends ESTestCase {
@Override
protected NamedXContentRegistry xContentRegistry() {
return new NamedXContentRegistry(Arrays.asList(
new NamedXContentRegistry.Entry(QueryBuilder.class, new ParseField(MatchAllQueryBuilder.NAME),
(p, c) -> MatchAllQueryBuilder.fromXContent(p)),
new NamedXContentRegistry.Entry(QueryBuilder.class, new ParseField(HasChildQueryBuilder.NAME),
(p, c) -> HasChildQueryBuilder.fromXContent(p)),
new NamedXContentRegistry.Entry(QueryBuilder.class, new ParseField(HasParentQueryBuilder.NAME),
(p, c) -> HasParentQueryBuilder.fromXContent(p)),
new NamedXContentRegistry.Entry(QueryBuilder.class, new ParseField(TermQueryBuilder.NAME),
(p, c) -> TermQueryBuilder.fromXContent(p))));
}
public void testReservedRole() {
final String roleName = randomFrom(new ArrayList<>(ReservedRolesStore.names()));
NativeRolesStore rolesStore = mock(NativeRolesStore.class);
TransportService transportService = new TransportService(Settings.EMPTY, mock(Transport.class), null,
TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet());
TransportPutRoleAction action = new TransportPutRoleAction(mock(ActionFilters.class), rolesStore, transportService);
TransportPutRoleAction action = new TransportPutRoleAction(mock(ActionFilters.class), rolesStore, transportService,
xContentRegistry());
PutRoleRequest request = new PutRoleRequest();
request.name(roleName);
@ -76,7 +102,8 @@ public class TransportPutRoleActionTests extends ESTestCase {
NativeRolesStore rolesStore = mock(NativeRolesStore.class);
TransportService transportService = new TransportService(Settings.EMPTY, mock(Transport.class), null,
TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet());
TransportPutRoleAction action = new TransportPutRoleAction(mock(ActionFilters.class), rolesStore, transportService);
TransportPutRoleAction action = new TransportPutRoleAction(mock(ActionFilters.class), rolesStore, transportService,
xContentRegistry());
final boolean created = randomBoolean();
PutRoleRequest request = new PutRoleRequest();
@ -119,7 +146,8 @@ public class TransportPutRoleActionTests extends ESTestCase {
NativeRolesStore rolesStore = mock(NativeRolesStore.class);
TransportService transportService = new TransportService(Settings.EMPTY, mock(Transport.class), null,
TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet());
TransportPutRoleAction action = new TransportPutRoleAction(mock(ActionFilters.class), rolesStore, transportService);
TransportPutRoleAction action = new TransportPutRoleAction(mock(ActionFilters.class), rolesStore, transportService,
xContentRegistry());
PutRoleRequest request = new PutRoleRequest();
request.name(roleName);
@ -154,4 +182,78 @@ public class TransportPutRoleActionTests extends ESTestCase {
assertThat(throwableRef.get(), is(sameInstance(e)));
verify(rolesStore, times(1)).putRole(eq(request), any(RoleDescriptor.class), any(ActionListener.class));
}
public void testCreationOfRoleWithMalformedQueryJsonFails() {
NativeRolesStore rolesStore = mock(NativeRolesStore.class);
TransportService transportService = new TransportService(Settings.EMPTY, mock(Transport.class), null,
TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet());
TransportPutRoleAction action = new TransportPutRoleAction(mock(ActionFilters.class), rolesStore, transportService,
xContentRegistry());
PutRoleRequest request = new PutRoleRequest();
request.name("test");
String[] malformedQueryJson = new String[]{"{ \"match_all\": { \"unknown_field\": \"\" } }",
"{ malformed JSON }",
"{ \"unknown\": {\"\"} }",
"{}"};
BytesReference query = new BytesArray(randomFrom(malformedQueryJson));
request.addIndex(new String[]{"idx1"}, new String[]{"read"}, null, null, query, randomBoolean());
final AtomicReference<Throwable> throwableRef = new AtomicReference<>();
final AtomicReference<PutRoleResponse> responseRef = new AtomicReference<>();
action.doExecute(mock(Task.class), request, new ActionListener<PutRoleResponse>() {
@Override
public void onResponse(PutRoleResponse response) {
responseRef.set(response);
}
@Override
public void onFailure(Exception e) {
throwableRef.set(e);
}
});
assertThat(responseRef.get(), is(nullValue()));
assertThat(throwableRef.get(), is(notNullValue()));
Throwable t = throwableRef.get();
assertThat(t, instanceOf(ElasticsearchParseException.class));
assertThat(t.getMessage(), containsString("failed to parse field 'query' for indices [" +
Strings.arrayToCommaDelimitedString(new String[]{"idx1"}) +
"] at index privilege [0] of role descriptor"));
}
public void testCreationOfRoleWithUnsupportedQueryFails() throws Exception {
NativeRolesStore rolesStore = mock(NativeRolesStore.class);
TransportService transportService = new TransportService(Settings.EMPTY, mock(Transport.class), null,
TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet());
TransportPutRoleAction action = new TransportPutRoleAction(mock(ActionFilters.class), rolesStore, transportService,
xContentRegistry());
PutRoleRequest request = new PutRoleRequest();
request.name("test");
String hasChildQuery = "{ \"has_child\": { \"type\": \"child\", \"query\": { \"match_all\": {} } } }";
String hasParentQuery = "{ \"has_parent\": { \"parent_type\": \"parent\", \"query\": { \"match_all\": {} } } }";
BytesReference query = new BytesArray(randomFrom(hasChildQuery, hasParentQuery));
request.addIndex(new String[]{"idx1"}, new String[]{"read"}, null, null, query, randomBoolean());
final AtomicReference<Throwable> throwableRef = new AtomicReference<>();
final AtomicReference<PutRoleResponse> responseRef = new AtomicReference<>();
action.doExecute(mock(Task.class), request, new ActionListener<PutRoleResponse>() {
@Override
public void onResponse(PutRoleResponse response) {
responseRef.set(response);
}
@Override
public void onFailure(Exception e) {
throwableRef.set(e);
}
});
assertThat(responseRef.get(), is(nullValue()));
assertThat(throwableRef.get(), is(notNullValue()));
Throwable t = throwableRef.get();
assertThat(t, instanceOf(ElasticsearchParseException.class));
assertThat(t.getMessage(), containsString("failed to parse field 'query' for indices [" +
Strings.arrayToCommaDelimitedString(new String[]{"idx1"}) +
"] at index privilege [0] of role descriptor"));
}
}

View File

@ -0,0 +1,78 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.security.action.user;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.PlainActionFuture;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.TestThreadPool;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.core.security.SecurityContext;
import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesRequest;
import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesResponse;
import org.elasticsearch.xpack.core.security.authc.Authentication;
import org.elasticsearch.xpack.core.security.authc.AuthenticationField;
import org.elasticsearch.xpack.core.security.authz.RoleDescriptor;
import org.elasticsearch.xpack.core.security.user.User;
import org.elasticsearch.xpack.security.authz.AuthorizationService;
import org.elasticsearch.xpack.security.authz.store.NativePrivilegeStore;
import org.junit.After;
import org.junit.Before;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.notNullValue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class TransportHasPrivilegesActionTests extends ESTestCase {
private ThreadPool threadPool;
@Before
public void createThreadPool() {
threadPool = new TestThreadPool("has privileges action tests");
}
@After
public void stopThreadPool() {
terminate(threadPool);
}
public void testHasPrivilegesRequestDoesNotAllowDLSRoleQueryBasedIndicesPrivileges() {
final ThreadContext threadContext = threadPool.getThreadContext();
final SecurityContext context = mock(SecurityContext.class);
final User user = new User("user-1", "superuser");
final Authentication authentication = new Authentication(user,
new Authentication.RealmRef("native", "default_native", "node1"), null);
when(context.getAuthentication()).thenReturn(authentication);
threadContext.putTransient(AuthenticationField.AUTHENTICATION_KEY, authentication);
final TransportHasPrivilegesAction transportHasPrivilegesAction = new TransportHasPrivilegesAction(threadPool,
mock(TransportService.class), mock(ActionFilters.class), mock(AuthorizationService.class), mock(NativePrivilegeStore.class),
context, xContentRegistry());
final HasPrivilegesRequest request = new HasPrivilegesRequest();
final RoleDescriptor.IndicesPrivileges[] indicesPrivileges = new RoleDescriptor.IndicesPrivileges[randomIntBetween(1, 5)];
for (int i = 0; i < indicesPrivileges.length; i++) {
indicesPrivileges[i] = RoleDescriptor.IndicesPrivileges.builder()
.privileges(randomFrom("read", "write"))
.indices(randomAlphaOfLengthBetween(2, 8))
.query(new BytesArray(randomAlphaOfLength(5)))
.build();
}
request.indexPrivileges(indicesPrivileges);
request.username("user-1");
final PlainActionFuture<HasPrivilegesResponse> listener = new PlainActionFuture<>();
transportHasPrivilegesAction.doExecute(mock(Task.class), request, listener);
final IllegalArgumentException ile = expectThrows(IllegalArgumentException.class, () -> listener.actionGet());
assertThat(ile, notNullValue());
assertThat(ile.getMessage(), containsString("users may only check the index privileges without any DLS role query"));
}
}

View File

@ -131,7 +131,7 @@ public class ESNativeMigrateToolTests extends NativeRealmIntegTestCase {
.cluster("all", "none")
.runAs("root", "nobody")
.addIndices(new String[] { "index" }, new String[] { "read" }, new String[] { "body", "title" }, null,
new BytesArray("{\"query\": {\"match_all\": {}}}"), randomBoolean())
new BytesArray("{\"match_all\": {}}"), randomBoolean())
.get();
addedRoles.add(rname);
}

View File

@ -198,7 +198,7 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase {
.cluster("all", "none")
.runAs("root", "nobody")
.addIndices(new String[]{"index"}, new String[]{"read"}, new String[]{"body", "title"}, null,
new BytesArray("{\"query\": {\"match_all\": {}}}"), randomBoolean())
new BytesArray("{\"match_all\": {}}"), randomBoolean())
.metadata(metadata)
.get();
logger.error("--> waiting for .security index");
@ -215,13 +215,13 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase {
.cluster("all", "none")
.runAs("root", "nobody")
.addIndices(new String[]{"index"}, new String[]{"read"}, new String[]{"body", "title"}, null,
new BytesArray("{\"query\": {\"match_all\": {}}}"), randomBoolean())
new BytesArray("{\"match_all\": {}}"), randomBoolean())
.get();
c.preparePutRole("test_role3")
.cluster("all", "none")
.runAs("root", "nobody")
.addIndices(new String[]{"index"}, new String[]{"read"}, new String[]{"body", "title"}, null,
new BytesArray("{\"query\": {\"match_all\": {}}}"), randomBoolean())
new BytesArray("{\"match_all\": {}}"), randomBoolean())
.get();
logger.info("--> retrieving all roles");

View File

@ -61,7 +61,7 @@ public class RoleDescriptorTests extends ESTestCase {
.indices("i1", "i2")
.privileges("read")
.grantedFields("body", "title")
.query("{\"query\": {\"match_all\": {}}}")
.query("{\"match_all\": {}}")
.build()
};
final RoleDescriptor.ApplicationResourcePrivileges[] applicationPrivileges = {
@ -82,7 +82,7 @@ public class RoleDescriptorTests extends ESTestCase {
assertThat(descriptor.toString(), is("Role[name=test, cluster=[all,none]" +
", global=[{APPLICATION:manage:applications=app01,app02}]" +
", indicesPrivileges=[IndicesPrivileges[indices=[i1,i2], allowRestrictedIndices=[false], privileges=[read]" +
", field_security=[grant=[body,title], except=null], query={\"query\": {\"match_all\": {}}}],]" +
", field_security=[grant=[body,title], except=null], query={\"match_all\": {}}],]" +
", applicationPrivileges=[ApplicationResourcePrivileges[application=my_app, privileges=[read,write], resources=[*]],]" +
", runAs=[sudo], metadata=[{}]]"));
}
@ -94,7 +94,7 @@ public class RoleDescriptorTests extends ESTestCase {
.privileges("read")
.grantedFields("body", "title")
.allowRestrictedIndices(randomBoolean())
.query("{\"query\": {\"match_all\": {}}}")
.query("{\"match_all\": {}}")
.build()
};
final RoleDescriptor.ApplicationResourcePrivileges[] applicationPrivileges = {
@ -136,7 +136,7 @@ public class RoleDescriptorTests extends ESTestCase {
"\"p2\"]}, {\"names\": \"idx2\", \"allow_restricted_indices\": true, \"privileges\": [\"p3\"], \"field_security\": " +
"{\"grant\": [\"f1\", \"f2\"]}}, {\"names\": " +
"\"idx2\", \"allow_restricted_indices\": false," +
"\"privileges\": [\"p3\"], \"field_security\": {\"grant\": [\"f1\", \"f2\"]}, \"query\": \"{\\\"match_all\\\": {}}\"}]}";
"\"privileges\": [\"p3\"], \"field_security\": {\"grant\": [\"f1\", \"f2\"]}, \"query\": {\"match_all\": {}} }]}";
rd = RoleDescriptor.parse("test", new BytesArray(q), false, XContentType.JSON);
assertEquals("test", rd.getName());
assertArrayEquals(new String[] { "a", "b" }, rd.getClusterPrivileges());
@ -261,6 +261,18 @@ public class RoleDescriptorTests extends ESTestCase {
assertNull(rd.getIndicesPrivileges()[0].getQuery());
}
public void testParseNullQuery() throws Exception {
String json = "{\"cluster\":[\"a\", \"b\"], \"run_as\": [\"m\", \"n\"], \"index\": [{\"names\": [\"idx1\",\"idx2\"], " +
"\"privileges\": [\"p1\", \"p2\"], \"query\": null}]}";
RoleDescriptor rd = RoleDescriptor.parse("test", new BytesArray(json), false, XContentType.JSON);
assertEquals("test", rd.getName());
assertArrayEquals(new String[] { "a", "b" }, rd.getClusterPrivileges());
assertEquals(1, rd.getIndicesPrivileges().length);
assertArrayEquals(new String[] { "idx1", "idx2" }, rd.getIndicesPrivileges()[0].getIndices());
assertArrayEquals(new String[] { "m", "n" }, rd.getRunAs());
assertNull(rd.getIndicesPrivileges()[0].getQuery());
}
public void testParseEmptyQueryUsingDeprecatedIndicesField() throws Exception {
String json = "{\"cluster\":[\"a\", \"b\"], \"run_as\": [\"m\", \"n\"], \"indices\": [{\"names\": [\"idx1\",\"idx2\"], " +
"\"privileges\": [\"p1\", \"p2\"], \"query\": \"\"}]}";
@ -283,4 +295,5 @@ public class RoleDescriptorTests extends ESTestCase {
assertEquals(1, parsed.getTransientMetadata().size());
assertEquals(true, parsed.getTransientMetadata().get("enabled"));
}
}

View File

@ -9,9 +9,13 @@ import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.automaton.MinimizationOperations;
import org.apache.lucene.util.automaton.Operations;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.env.Environment;
import org.elasticsearch.env.TestEnvironment;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.license.XPackLicenseState;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.TestThreadPool;
@ -45,6 +49,7 @@ import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import static java.util.Collections.singletonList;
import static org.hamcrest.Matchers.arrayContaining;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
@ -61,11 +66,17 @@ import static org.mockito.Mockito.when;
public class FileRolesStoreTests extends ESTestCase {
@Override
protected NamedXContentRegistry xContentRegistry() {
return new NamedXContentRegistry(singletonList(new NamedXContentRegistry.Entry(QueryBuilder.class,
new ParseField(MatchAllQueryBuilder.NAME), (p, c) -> MatchAllQueryBuilder.fromXContent(p))));
}
public void testParseFile() throws Exception {
Path path = getDataPath("roles.yml");
Map<String, RoleDescriptor> roles = FileRolesStore.parseFile(path, logger, Settings.builder()
.put(XPackSettings.DLS_FLS_ENABLED.getKey(), true)
.build(), new XPackLicenseState(Settings.EMPTY));
.build(), new XPackLicenseState(Settings.EMPTY), xContentRegistry());
assertThat(roles, notNullValue());
assertThat(roles.size(), is(9));
@ -235,6 +246,8 @@ public class FileRolesStoreTests extends ESTestCase {
assertTrue(group.getFieldPermissions().grantsAccessTo("boo"));
assertTrue(group.getFieldPermissions().hasFieldLevelSecurity());
assertThat(group.getQuery(), notNullValue());
assertThat(roles.get("role_query_invalid"), nullValue());
}
public void testParseFileWithFLSAndDLSDisabled() throws Exception {
@ -244,14 +257,15 @@ public class FileRolesStoreTests extends ESTestCase {
events.clear();
Map<String, RoleDescriptor> roles = FileRolesStore.parseFile(path, logger, Settings.builder()
.put(XPackSettings.DLS_FLS_ENABLED.getKey(), false)
.build(), new XPackLicenseState(Settings.EMPTY));
.build(), new XPackLicenseState(Settings.EMPTY), xContentRegistry());
assertThat(roles, notNullValue());
assertThat(roles.size(), is(6));
assertThat(roles.get("role_fields"), nullValue());
assertThat(roles.get("role_query"), nullValue());
assertThat(roles.get("role_query_fields"), nullValue());
assertThat(roles.get("role_query_invalid"), nullValue());
assertThat(events, hasSize(3));
assertThat(events, hasSize(4));
assertThat(
events.get(0),
startsWith("invalid role definition [role_fields] in roles file [" + path.toAbsolutePath() +
@ -262,6 +276,9 @@ public class FileRolesStoreTests extends ESTestCase {
assertThat(events.get(2),
startsWith("invalid role definition [role_query_fields] in roles file [" + path.toAbsolutePath() +
"]. document and field level security is not enabled."));
assertThat(events.get(3),
startsWith("invalid role definition [role_query_invalid] in roles file [" + path.toAbsolutePath() +
"]. document and field level security is not enabled."));
}
public void testParseFileWithFLSAndDLSUnlicensed() throws Exception {
@ -271,7 +288,7 @@ public class FileRolesStoreTests extends ESTestCase {
events.clear();
XPackLicenseState licenseState = mock(XPackLicenseState.class);
when(licenseState.isDocumentAndFieldLevelSecurityAllowed()).thenReturn(false);
Map<String, RoleDescriptor> roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY, licenseState);
Map<String, RoleDescriptor> roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY, licenseState, xContentRegistry());
assertThat(roles, notNullValue());
assertThat(roles.size(), is(9));
assertNotNull(roles.get("role_fields"));
@ -295,7 +312,8 @@ public class FileRolesStoreTests extends ESTestCase {
public void testDefaultRolesFile() throws Exception {
// TODO we should add the config dir to the resources so we don't copy this stuff around...
Path path = getDataPath("default_roles.yml");
Map<String, RoleDescriptor> roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY, new XPackLicenseState(Settings.EMPTY));
Map<String, RoleDescriptor> roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY, new XPackLicenseState(Settings.EMPTY),
xContentRegistry());
assertThat(roles, notNullValue());
assertThat(roles.size(), is(0));
}
@ -325,7 +343,7 @@ public class FileRolesStoreTests extends ESTestCase {
FileRolesStore store = new FileRolesStore(settings, env, watcherService, roleSet -> {
modifiedRoles.addAll(roleSet);
latch.countDown();
}, new XPackLicenseState(Settings.EMPTY));
}, new XPackLicenseState(Settings.EMPTY), xContentRegistry());
Set<RoleDescriptor> descriptors = store.roleDescriptors(Collections.singleton("role1"));
assertThat(descriptors, notNullValue());
@ -368,7 +386,7 @@ public class FileRolesStoreTests extends ESTestCase {
store = new FileRolesStore(settings, env, watcherService, roleSet -> {
truncatedFileRolesModified.addAll(roleSet);
truncateLatch.countDown();
}, new XPackLicenseState(Settings.EMPTY));
}, new XPackLicenseState(Settings.EMPTY), xContentRegistry());
final Set<String> allRolesPreTruncate = store.getAllRoleNames();
try (BufferedWriter writer = Files.newBufferedWriter(tmp, StandardCharsets.UTF_8, StandardOpenOption.TRUNCATE_EXISTING)) {
@ -391,7 +409,7 @@ public class FileRolesStoreTests extends ESTestCase {
store = new FileRolesStore(settings, env, watcherService, roleSet -> {
modifiedFileRolesModified.addAll(roleSet);
modifyLatch.countDown();
}, new XPackLicenseState(Settings.EMPTY));
}, new XPackLicenseState(Settings.EMPTY), xContentRegistry());
try (BufferedWriter writer = Files.newBufferedWriter(tmp, StandardCharsets.UTF_8, StandardOpenOption.TRUNCATE_EXISTING)) {
writer.append("role5:").append(System.lineSeparator());
@ -416,7 +434,8 @@ public class FileRolesStoreTests extends ESTestCase {
public void testThatEmptyFileDoesNotResultInLoop() throws Exception {
Path file = createTempFile();
Files.write(file, Collections.singletonList("#"), StandardCharsets.UTF_8);
Map<String, RoleDescriptor> roles = FileRolesStore.parseFile(file, logger, Settings.EMPTY, new XPackLicenseState(Settings.EMPTY));
Map<String, RoleDescriptor> roles = FileRolesStore.parseFile(file, logger, Settings.EMPTY, new XPackLicenseState(Settings.EMPTY),
xContentRegistry());
assertThat(roles.keySet(), is(empty()));
}
@ -425,7 +444,8 @@ public class FileRolesStoreTests extends ESTestCase {
Logger logger = CapturingLogger.newCapturingLogger(Level.ERROR, null);
List<String> entries = CapturingLogger.output(logger.getName(), Level.ERROR);
entries.clear();
Map<String, RoleDescriptor> roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY, new XPackLicenseState(Settings.EMPTY));
Map<String, RoleDescriptor> roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY, new XPackLicenseState(Settings.EMPTY),
xContentRegistry());
assertThat(roles.size(), is(1));
assertThat(roles, hasKey("valid_role"));
RoleDescriptor descriptor = roles.get("valid_role");
@ -467,7 +487,8 @@ public class FileRolesStoreTests extends ESTestCase {
List<String> events = CapturingLogger.output(logger.getName(), Level.ERROR);
events.clear();
Path path = getDataPath("reserved_roles.yml");
Map<String, RoleDescriptor> roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY, new XPackLicenseState(Settings.EMPTY));
Map<String, RoleDescriptor> roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY, new XPackLicenseState(Settings.EMPTY),
xContentRegistry());
assertThat(roles, notNullValue());
assertThat(roles.size(), is(1));
@ -498,7 +519,8 @@ public class FileRolesStoreTests extends ESTestCase {
.put(XPackSettings.DLS_FLS_ENABLED.getKey(), flsDlsEnabled)
.build();
Environment env = TestEnvironment.newEnvironment(settings);
FileRolesStore store = new FileRolesStore(settings, env, mock(ResourceWatcherService.class), new XPackLicenseState(Settings.EMPTY));
FileRolesStore store = new FileRolesStore(settings, env, mock(ResourceWatcherService.class), new XPackLicenseState(Settings.EMPTY),
xContentRegistry());
Map<String, Object> usageStats = store.usageStats();
@ -512,9 +534,10 @@ public class FileRolesStoreTests extends ESTestCase {
Path path = getDataPath("roles2xformat.yml");
byte[] bytes = Files.readAllBytes(path);
String roleString = new String(bytes, Charset.defaultCharset());
RoleDescriptor role = FileRolesStore.parseRoleDescriptor(roleString, path, logger, true, Settings.EMPTY);
RoleDescriptor role = FileRolesStore.parseRoleDescriptor(roleString, path, logger, true, Settings.EMPTY, xContentRegistry());
RoleDescriptor.IndicesPrivileges indicesPrivileges = role.getIndicesPrivileges()[0];
assertThat(indicesPrivileges.getGrantedFields(), arrayContaining("foo", "boo"));
assertNull(indicesPrivileges.getDeniedFields());
}
}

View File

@ -65,8 +65,16 @@ role_query_fields:
privileges:
- READ
query:
match_all:
match_all: {}
field_security:
grant:
- foo
- boo
role_query_invalid:
indices:
- names:
- 'query_idx'
privileges:
- READ
query: '{ "unknown": {} }'

View File

@ -5,7 +5,7 @@ role1:
privileges:
- READ
query:
match_all:
match_all: {}
fields:
- foo
- boo

View File

@ -7,7 +7,11 @@ setup:
cluster.health:
wait_for_status: yellow
---
"Test use prohibited query inside role query":
- do:
catch: /terms query with terms lookup isn't supported as part of a role query/
security.put_role:
name: "role"
body: >
@ -24,49 +28,3 @@ setup:
]
}
- do:
security.put_user:
username: "joe"
body: >
{
"password": "x-pack-test-password",
"roles" : [ "role" ]
}
---
teardown:
- do:
security.delete_user:
username: "joe"
ignore: 404
- do:
security.delete_role:
name: "role"
ignore: 404
---
"Test use prohibited query inside role query":
- do:
headers:
Authorization: "Basic am9lOngtcGFjay10ZXN0LXBhc3N3b3Jk"
index:
index: index
type: type
id: 1
body: >
{
"foo": "bar"
}
- do:
catch: /terms query with terms lookup isn't supported as part of a role query/
headers:
Authorization: "Basic am9lOngtcGFjay10ZXN0LXBhc3N3b3Jk"
search:
rest_total_hits_as_int: true
index: index
body: { "query" : { "match_all" : {} } }