In the current implementation, the validation of the role query occurs at runtime when the query is being executed. This commit adds validation for the role query when creating a role but not for the template query as we do not have the runtime information required for evaluating the template query (eg. authenticated user's information). This is similar to the scripts that we store but do not evaluate or parse if they are valid queries or not. For validation, the query is evaluated (if not a template), parsed to build the QueryBuilder and verify if the query type is allowed. Closes #34252
This commit is contained in:
parent
97d977f381
commit
9a64b7a888
|
@ -190,7 +190,7 @@ public class SecurityIT extends ESRestHighLevelClientTestCase {
|
||||||
.name(roleName)
|
.name(roleName)
|
||||||
.clusterPrivileges(randomSubsetOf(randomInt(3), Role.ClusterPrivilegeName.ALL_ARRAY))
|
.clusterPrivileges(randomSubsetOf(randomInt(3), Role.ClusterPrivilegeName.ALL_ARRAY))
|
||||||
.indicesPrivileges(
|
.indicesPrivileges(
|
||||||
randomArray(3, IndicesPrivileges[]::new, () -> IndicesPrivilegesTests.createNewRandom(randomAlphaOfLength(3))))
|
randomArray(3, IndicesPrivileges[]::new, () -> IndicesPrivilegesTests.createNewRandom("{\"match_all\": {}}")))
|
||||||
.applicationResourcePrivileges(randomArray(3, ApplicationResourcePrivileges[]::new,
|
.applicationResourcePrivileges(randomArray(3, ApplicationResourcePrivileges[]::new,
|
||||||
() -> ApplicationResourcePrivilegesTests.createNewRandom(randomAlphaOfLength(3).toLowerCase(Locale.ROOT))))
|
() -> ApplicationResourcePrivilegesTests.createNewRandom(randomAlphaOfLength(3).toLowerCase(Locale.ROOT))))
|
||||||
.runAsPrivilege(randomArray(3, String[]::new, () -> randomAlphaOfLength(3)));
|
.runAsPrivilege(randomArray(3, String[]::new, () -> randomAlphaOfLength(3)));
|
||||||
|
|
|
@ -12,29 +12,18 @@ import org.apache.lucene.search.join.BitSetProducer;
|
||||||
import org.apache.lucene.search.join.ToChildBlockJoinQuery;
|
import org.apache.lucene.search.join.ToChildBlockJoinQuery;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.lucene.search.Queries;
|
import org.elasticsearch.common.lucene.search.Queries;
|
||||||
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
|
|
||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
|
||||||
import org.elasticsearch.index.query.BoolQueryBuilder;
|
|
||||||
import org.elasticsearch.index.query.BoostingQueryBuilder;
|
|
||||||
import org.elasticsearch.index.query.ConstantScoreQueryBuilder;
|
|
||||||
import org.elasticsearch.index.query.GeoShapeQueryBuilder;
|
|
||||||
import org.elasticsearch.index.query.QueryBuilder;
|
import org.elasticsearch.index.query.QueryBuilder;
|
||||||
import org.elasticsearch.index.query.QueryRewriteContext;
|
import org.elasticsearch.index.query.QueryRewriteContext;
|
||||||
import org.elasticsearch.index.query.QueryShardContext;
|
import org.elasticsearch.index.query.QueryShardContext;
|
||||||
import org.elasticsearch.index.query.Rewriteable;
|
import org.elasticsearch.index.query.Rewriteable;
|
||||||
import org.elasticsearch.index.query.TermsQueryBuilder;
|
|
||||||
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
|
|
||||||
import org.elasticsearch.index.search.NestedHelper;
|
import org.elasticsearch.index.search.NestedHelper;
|
||||||
import org.elasticsearch.index.shard.ShardId;
|
import org.elasticsearch.index.shard.ShardId;
|
||||||
import org.elasticsearch.script.ScriptService;
|
import org.elasticsearch.script.ScriptService;
|
||||||
import org.elasticsearch.xpack.core.security.authz.support.SecurityQueryTemplateEvaluator;
|
import org.elasticsearch.xpack.core.security.authz.support.DLSRoleQueryValidator;
|
||||||
import org.elasticsearch.xpack.core.security.user.User;
|
import org.elasticsearch.xpack.core.security.user.User;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.List;
|
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
|
|
||||||
|
@ -127,11 +116,9 @@ public final class DocumentPermissions {
|
||||||
BooleanQuery.Builder filter) throws IOException {
|
BooleanQuery.Builder filter) throws IOException {
|
||||||
for (BytesReference bytesReference : queries) {
|
for (BytesReference bytesReference : queries) {
|
||||||
QueryShardContext queryShardContext = queryShardContextProvider.apply(shardId);
|
QueryShardContext queryShardContext = queryShardContextProvider.apply(shardId);
|
||||||
String templateResult = SecurityQueryTemplateEvaluator.evaluateTemplate(bytesReference.utf8ToString(), scriptService, user);
|
QueryBuilder queryBuilder = DLSRoleQueryValidator.evaluateAndVerifyRoleQuery(bytesReference, scriptService,
|
||||||
try (XContentParser parser = XContentFactory.xContent(templateResult).createParser(queryShardContext.getXContentRegistry(),
|
queryShardContext.getXContentRegistry(), user);
|
||||||
LoggingDeprecationHandler.INSTANCE, templateResult)) {
|
if (queryBuilder != null) {
|
||||||
QueryBuilder queryBuilder = queryShardContext.parseInnerQueryBuilder(parser);
|
|
||||||
verifyRoleQuery(queryBuilder);
|
|
||||||
failIfQueryUsesClient(queryBuilder, queryShardContext);
|
failIfQueryUsesClient(queryBuilder, queryShardContext);
|
||||||
Query roleQuery = queryShardContext.toQuery(queryBuilder).query();
|
Query roleQuery = queryShardContext.toQuery(queryBuilder).query();
|
||||||
filter.add(roleQuery, SHOULD);
|
filter.add(roleQuery, SHOULD);
|
||||||
|
@ -153,50 +140,6 @@ public final class DocumentPermissions {
|
||||||
filter.setMinimumNumberShouldMatch(1);
|
filter.setMinimumNumberShouldMatch(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Checks whether the role query contains queries we know can't be used as DLS role query.
|
|
||||||
*/
|
|
||||||
static void verifyRoleQuery(QueryBuilder queryBuilder) throws IOException {
|
|
||||||
if (queryBuilder instanceof TermsQueryBuilder) {
|
|
||||||
TermsQueryBuilder termsQueryBuilder = (TermsQueryBuilder) queryBuilder;
|
|
||||||
if (termsQueryBuilder.termsLookup() != null) {
|
|
||||||
throw new IllegalArgumentException("terms query with terms lookup isn't supported as part of a role query");
|
|
||||||
}
|
|
||||||
} else if (queryBuilder instanceof GeoShapeQueryBuilder) {
|
|
||||||
GeoShapeQueryBuilder geoShapeQueryBuilder = (GeoShapeQueryBuilder) queryBuilder;
|
|
||||||
if (geoShapeQueryBuilder.shape() == null) {
|
|
||||||
throw new IllegalArgumentException("geoshape query referring to indexed shapes isn't support as part of a role query");
|
|
||||||
}
|
|
||||||
} else if (queryBuilder.getName().equals("percolate")) {
|
|
||||||
// actually only if percolate query is referring to an existing document then this is problematic,
|
|
||||||
// a normal percolate query does work. However we can't check that here as this query builder is inside
|
|
||||||
// another module. So we don't allow the entire percolate query. I don't think users would ever use
|
|
||||||
// a percolate query as role query, so this restriction shouldn't prohibit anyone from using dls.
|
|
||||||
throw new IllegalArgumentException("percolate query isn't support as part of a role query");
|
|
||||||
} else if (queryBuilder.getName().equals("has_child")) {
|
|
||||||
throw new IllegalArgumentException("has_child query isn't support as part of a role query");
|
|
||||||
} else if (queryBuilder.getName().equals("has_parent")) {
|
|
||||||
throw new IllegalArgumentException("has_parent query isn't support as part of a role query");
|
|
||||||
} else if (queryBuilder instanceof BoolQueryBuilder) {
|
|
||||||
BoolQueryBuilder boolQueryBuilder = (BoolQueryBuilder) queryBuilder;
|
|
||||||
List<QueryBuilder> clauses = new ArrayList<>();
|
|
||||||
clauses.addAll(boolQueryBuilder.filter());
|
|
||||||
clauses.addAll(boolQueryBuilder.must());
|
|
||||||
clauses.addAll(boolQueryBuilder.mustNot());
|
|
||||||
clauses.addAll(boolQueryBuilder.should());
|
|
||||||
for (QueryBuilder clause : clauses) {
|
|
||||||
verifyRoleQuery(clause);
|
|
||||||
}
|
|
||||||
} else if (queryBuilder instanceof ConstantScoreQueryBuilder) {
|
|
||||||
verifyRoleQuery(((ConstantScoreQueryBuilder) queryBuilder).innerQuery());
|
|
||||||
} else if (queryBuilder instanceof FunctionScoreQueryBuilder) {
|
|
||||||
verifyRoleQuery(((FunctionScoreQueryBuilder) queryBuilder).query());
|
|
||||||
} else if (queryBuilder instanceof BoostingQueryBuilder) {
|
|
||||||
verifyRoleQuery(((BoostingQueryBuilder) queryBuilder).negativeQuery());
|
|
||||||
verifyRoleQuery(((BoostingQueryBuilder) queryBuilder).positiveQuery());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Fall back validation that verifies that queries during rewrite don't use
|
* Fall back validation that verifies that queries during rewrite don't use
|
||||||
* the client to make remote calls. In the case of DLS this can cause a dead
|
* the client to make remote calls. In the case of DLS this can cause a dead
|
||||||
|
|
|
@ -0,0 +1,182 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.xpack.core.security.authz.support;
|
||||||
|
|
||||||
|
import org.elasticsearch.ElasticsearchParseException;
|
||||||
|
import org.elasticsearch.common.Nullable;
|
||||||
|
import org.elasticsearch.common.ParsingException;
|
||||||
|
import org.elasticsearch.common.Strings;
|
||||||
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
|
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
|
||||||
|
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParseException;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentType;
|
||||||
|
import org.elasticsearch.index.query.AbstractQueryBuilder;
|
||||||
|
import org.elasticsearch.index.query.BoolQueryBuilder;
|
||||||
|
import org.elasticsearch.index.query.BoostingQueryBuilder;
|
||||||
|
import org.elasticsearch.index.query.ConstantScoreQueryBuilder;
|
||||||
|
import org.elasticsearch.index.query.GeoShapeQueryBuilder;
|
||||||
|
import org.elasticsearch.index.query.QueryBuilder;
|
||||||
|
import org.elasticsearch.index.query.TermsQueryBuilder;
|
||||||
|
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
|
||||||
|
import org.elasticsearch.script.ScriptService;
|
||||||
|
import org.elasticsearch.xpack.core.security.authz.RoleDescriptor;
|
||||||
|
import org.elasticsearch.xpack.core.security.user.User;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This class helps in evaluating the query field if it is template,
|
||||||
|
* validating the query and checking if the query type is allowed to be used in DLS role query.
|
||||||
|
*/
|
||||||
|
public final class DLSRoleQueryValidator {
|
||||||
|
|
||||||
|
private DLSRoleQueryValidator() {
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validates the query field in the {@link RoleDescriptor.IndicesPrivileges} only if it is not a template query.<br>
|
||||||
|
* It parses the query and builds the {@link QueryBuilder}, also checks if the query type is supported in DLS role query.
|
||||||
|
*
|
||||||
|
* @param indicesPrivileges {@link RoleDescriptor.IndicesPrivileges}
|
||||||
|
* @param xContentRegistry {@link NamedXContentRegistry} for finding named queries
|
||||||
|
*/
|
||||||
|
public static void validateQueryField(RoleDescriptor.IndicesPrivileges[] indicesPrivileges,
|
||||||
|
NamedXContentRegistry xContentRegistry) {
|
||||||
|
if (indicesPrivileges != null) {
|
||||||
|
for (int i = 0; i < indicesPrivileges.length; i++) {
|
||||||
|
BytesReference query = indicesPrivileges[i].getQuery();
|
||||||
|
try {
|
||||||
|
if (query != null) {
|
||||||
|
if (isTemplateQuery(query, xContentRegistry)) {
|
||||||
|
// skip template query, this requires runtime information like 'User' information.
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
evaluateAndVerifyRoleQuery(query.utf8ToString(), xContentRegistry);
|
||||||
|
}
|
||||||
|
} catch (ParsingException | IllegalArgumentException | IOException e) {
|
||||||
|
throw new ElasticsearchParseException("failed to parse field 'query' for indices [" +
|
||||||
|
Strings.arrayToCommaDelimitedString(indicesPrivileges[i].getIndices()) +
|
||||||
|
"] at index privilege [" + i + "] of role descriptor", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static boolean isTemplateQuery(BytesReference query, NamedXContentRegistry xContentRegistry) throws IOException {
|
||||||
|
try (XContentParser parser = XContentType.JSON.xContent().createParser(xContentRegistry,
|
||||||
|
LoggingDeprecationHandler.INSTANCE, query.utf8ToString())) {
|
||||||
|
XContentParser.Token token = parser.nextToken();
|
||||||
|
if (token != XContentParser.Token.START_OBJECT) {
|
||||||
|
throw new XContentParseException(parser.getTokenLocation(), "expected [" + XContentParser.Token.START_OBJECT + "] but " +
|
||||||
|
"found [" + token + "] instead");
|
||||||
|
}
|
||||||
|
token = parser.nextToken();
|
||||||
|
if (token != XContentParser.Token.FIELD_NAME) {
|
||||||
|
throw new XContentParseException(parser.getTokenLocation(), "expected [" + XContentParser.Token.FIELD_NAME + "] with " +
|
||||||
|
"value a query name or 'template' but found [" + token + "] instead");
|
||||||
|
}
|
||||||
|
String fieldName = parser.currentName();
|
||||||
|
if ("template".equals(fieldName)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Evaluates the query if it is a template and then validates the query by parsing
|
||||||
|
* and building the {@link QueryBuilder}. It also checks if the query type is
|
||||||
|
* supported in DLS role query.
|
||||||
|
*
|
||||||
|
* @param query {@link BytesReference} query field from the role
|
||||||
|
* @param scriptService {@link ScriptService} used for evaluation of a template query
|
||||||
|
* @param xContentRegistry {@link NamedXContentRegistry} for finding named queries
|
||||||
|
* @param user {@link User} used when evaluation a template query
|
||||||
|
* @return {@link QueryBuilder} if the query is valid and allowed, in case {@link RoleDescriptor.IndicesPrivileges}
|
||||||
|
* * does not have a query field then it returns {@code null}.
|
||||||
|
*/
|
||||||
|
@Nullable
|
||||||
|
public static QueryBuilder evaluateAndVerifyRoleQuery(BytesReference query, ScriptService scriptService,
|
||||||
|
NamedXContentRegistry xContentRegistry, User user) {
|
||||||
|
if (query != null) {
|
||||||
|
String templateResult = SecurityQueryTemplateEvaluator.evaluateTemplate(query.utf8ToString(), scriptService,
|
||||||
|
user);
|
||||||
|
try {
|
||||||
|
return evaluateAndVerifyRoleQuery(templateResult, xContentRegistry);
|
||||||
|
} catch (ElasticsearchParseException | ParsingException | XContentParseException | IOException e) {
|
||||||
|
throw new ElasticsearchParseException("failed to parse field 'query' from the role descriptor", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nullable
|
||||||
|
private static QueryBuilder evaluateAndVerifyRoleQuery(String query, NamedXContentRegistry xContentRegistry) throws IOException {
|
||||||
|
if (query != null) {
|
||||||
|
try (XContentParser parser = XContentFactory.xContent(query).createParser(xContentRegistry,
|
||||||
|
LoggingDeprecationHandler.INSTANCE, query)) {
|
||||||
|
QueryBuilder queryBuilder = AbstractQueryBuilder.parseInnerQueryBuilder(parser);
|
||||||
|
verifyRoleQuery(queryBuilder);
|
||||||
|
return queryBuilder;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks whether the role query contains queries we know can't be used as DLS role query.
|
||||||
|
*
|
||||||
|
* @param queryBuilder {@link QueryBuilder} for given query
|
||||||
|
*/
|
||||||
|
// pkg protected for testing
|
||||||
|
static void verifyRoleQuery(QueryBuilder queryBuilder) {
|
||||||
|
if (queryBuilder instanceof TermsQueryBuilder) {
|
||||||
|
TermsQueryBuilder termsQueryBuilder = (TermsQueryBuilder) queryBuilder;
|
||||||
|
if (termsQueryBuilder.termsLookup() != null) {
|
||||||
|
throw new IllegalArgumentException("terms query with terms lookup isn't supported as part of a role query");
|
||||||
|
}
|
||||||
|
} else if (queryBuilder instanceof GeoShapeQueryBuilder) {
|
||||||
|
GeoShapeQueryBuilder geoShapeQueryBuilder = (GeoShapeQueryBuilder) queryBuilder;
|
||||||
|
if (geoShapeQueryBuilder.shape() == null) {
|
||||||
|
throw new IllegalArgumentException("geoshape query referring to indexed shapes isn't supported as part of a role query");
|
||||||
|
}
|
||||||
|
} else if (queryBuilder.getName().equals("percolate")) {
|
||||||
|
// actually only if percolate query is referring to an existing document then this is problematic,
|
||||||
|
// a normal percolate query does work. However we can't check that here as this query builder is inside
|
||||||
|
// another module. So we don't allow the entire percolate query. I don't think users would ever use
|
||||||
|
// a percolate query as role query, so this restriction shouldn't prohibit anyone from using dls.
|
||||||
|
throw new IllegalArgumentException("percolate query isn't supported as part of a role query");
|
||||||
|
} else if (queryBuilder.getName().equals("has_child")) {
|
||||||
|
throw new IllegalArgumentException("has_child query isn't supported as part of a role query");
|
||||||
|
} else if (queryBuilder.getName().equals("has_parent")) {
|
||||||
|
throw new IllegalArgumentException("has_parent query isn't supported as part of a role query");
|
||||||
|
} else if (queryBuilder instanceof BoolQueryBuilder) {
|
||||||
|
BoolQueryBuilder boolQueryBuilder = (BoolQueryBuilder) queryBuilder;
|
||||||
|
List<QueryBuilder> clauses = new ArrayList<>();
|
||||||
|
clauses.addAll(boolQueryBuilder.filter());
|
||||||
|
clauses.addAll(boolQueryBuilder.must());
|
||||||
|
clauses.addAll(boolQueryBuilder.mustNot());
|
||||||
|
clauses.addAll(boolQueryBuilder.should());
|
||||||
|
for (QueryBuilder clause : clauses) {
|
||||||
|
verifyRoleQuery(clause);
|
||||||
|
}
|
||||||
|
} else if (queryBuilder instanceof ConstantScoreQueryBuilder) {
|
||||||
|
verifyRoleQuery(((ConstantScoreQueryBuilder) queryBuilder).innerQuery());
|
||||||
|
} else if (queryBuilder instanceof FunctionScoreQueryBuilder) {
|
||||||
|
verifyRoleQuery(((FunctionScoreQueryBuilder) queryBuilder).query());
|
||||||
|
} else if (queryBuilder instanceof BoostingQueryBuilder) {
|
||||||
|
verifyRoleQuery(((BoostingQueryBuilder) queryBuilder).negativeQuery());
|
||||||
|
verifyRoleQuery(((BoostingQueryBuilder) queryBuilder).positiveQuery());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -44,10 +44,8 @@ public final class SecurityQueryTemplateEvaluator {
|
||||||
* @return resultant query string after compiling and executing the script.
|
* @return resultant query string after compiling and executing the script.
|
||||||
* If the source does not contain template then it will return the query
|
* If the source does not contain template then it will return the query
|
||||||
* source without any modifications.
|
* source without any modifications.
|
||||||
* @throws IOException thrown when there is any error parsing the query
|
|
||||||
* string.
|
|
||||||
*/
|
*/
|
||||||
public static String evaluateTemplate(final String querySource, final ScriptService scriptService, final User user) throws IOException {
|
public static String evaluateTemplate(final String querySource, final ScriptService scriptService, final User user) {
|
||||||
// EMPTY is safe here because we never use namedObject
|
// EMPTY is safe here because we never use namedObject
|
||||||
try (XContentParser parser = XContentFactory.xContent(querySource).createParser(NamedXContentRegistry.EMPTY,
|
try (XContentParser parser = XContentFactory.xContent(querySource).createParser(NamedXContentRegistry.EMPTY,
|
||||||
LoggingDeprecationHandler.INSTANCE, querySource)) {
|
LoggingDeprecationHandler.INSTANCE, querySource)) {
|
||||||
|
@ -76,6 +74,8 @@ public final class SecurityQueryTemplateEvaluator {
|
||||||
} else {
|
} else {
|
||||||
return querySource;
|
return querySource;
|
||||||
}
|
}
|
||||||
|
} catch (IOException ioe) {
|
||||||
|
throw new ElasticsearchParseException("failed to parse query", ioe);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -28,12 +28,10 @@ import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.util.BigArrays;
|
import org.elasticsearch.common.util.BigArrays;
|
||||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
|
||||||
import org.elasticsearch.index.IndexSettings;
|
import org.elasticsearch.index.IndexSettings;
|
||||||
import org.elasticsearch.index.mapper.MapperService;
|
import org.elasticsearch.index.mapper.MapperService;
|
||||||
import org.elasticsearch.index.query.ParsedQuery;
|
import org.elasticsearch.index.query.ParsedQuery;
|
||||||
import org.elasticsearch.index.query.QueryShardContext;
|
import org.elasticsearch.index.query.QueryShardContext;
|
||||||
import org.elasticsearch.index.query.TermQueryBuilder;
|
|
||||||
import org.elasticsearch.index.query.TermsQueryBuilder;
|
import org.elasticsearch.index.query.TermsQueryBuilder;
|
||||||
import org.elasticsearch.index.shard.ShardId;
|
import org.elasticsearch.index.shard.ShardId;
|
||||||
import org.elasticsearch.license.XPackLicenseState;
|
import org.elasticsearch.license.XPackLicenseState;
|
||||||
|
@ -55,9 +53,7 @@ import static java.util.Collections.singleton;
|
||||||
import static java.util.Collections.singletonMap;
|
import static java.util.Collections.singletonMap;
|
||||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
import static org.mockito.Matchers.any;
|
|
||||||
import static org.mockito.Matchers.anyString;
|
import static org.mockito.Matchers.anyString;
|
||||||
import static org.mockito.Mockito.doReturn;
|
|
||||||
import static org.mockito.Mockito.mock;
|
import static org.mockito.Mockito.mock;
|
||||||
import static org.mockito.Mockito.spy;
|
import static org.mockito.Mockito.spy;
|
||||||
import static org.mockito.Mockito.when;
|
import static org.mockito.Mockito.when;
|
||||||
|
@ -76,9 +72,6 @@ public class SecurityIndexReaderWrapperIntegrationTests extends AbstractBuilderT
|
||||||
final Authentication authentication = mock(Authentication.class);
|
final Authentication authentication = mock(Authentication.class);
|
||||||
when(authentication.getUser()).thenReturn(mock(User.class));
|
when(authentication.getUser()).thenReturn(mock(User.class));
|
||||||
threadContext.putTransient(AuthenticationField.AUTHENTICATION_KEY, authentication);
|
threadContext.putTransient(AuthenticationField.AUTHENTICATION_KEY, authentication);
|
||||||
IndicesAccessControl.IndexAccessControl indexAccessControl = new IndicesAccessControl.IndexAccessControl(true, new
|
|
||||||
FieldPermissions(),
|
|
||||||
DocumentPermissions.filteredBy(singleton(new BytesArray("{\"match_all\" : {}}"))));
|
|
||||||
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(shardId.getIndex(), Settings.EMPTY);
|
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(shardId.getIndex(), Settings.EMPTY);
|
||||||
Client client = mock(Client.class);
|
Client client = mock(Client.class);
|
||||||
when(client.settings()).thenReturn(Settings.EMPTY);
|
when(client.settings()).thenReturn(Settings.EMPTY);
|
||||||
|
@ -90,14 +83,6 @@ public class SecurityIndexReaderWrapperIntegrationTests extends AbstractBuilderT
|
||||||
DocumentSubsetBitsetCache bitsetCache = new DocumentSubsetBitsetCache(Settings.EMPTY);
|
DocumentSubsetBitsetCache bitsetCache = new DocumentSubsetBitsetCache(Settings.EMPTY);
|
||||||
XPackLicenseState licenseState = mock(XPackLicenseState.class);
|
XPackLicenseState licenseState = mock(XPackLicenseState.class);
|
||||||
when(licenseState.isDocumentAndFieldLevelSecurityAllowed()).thenReturn(true);
|
when(licenseState.isDocumentAndFieldLevelSecurityAllowed()).thenReturn(true);
|
||||||
SecurityIndexReaderWrapper wrapper = new SecurityIndexReaderWrapper(s -> queryShardContext,
|
|
||||||
bitsetCache, threadContext, licenseState, scriptService) {
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected IndicesAccessControl getIndicesAccessControl() {
|
|
||||||
return new IndicesAccessControl(true, singletonMap("_index", indexAccessControl));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
Directory directory = newDirectory();
|
Directory directory = newDirectory();
|
||||||
IndexWriter iw = new IndexWriter(
|
IndexWriter iw = new IndexWriter(
|
||||||
|
@ -144,17 +129,32 @@ public class SecurityIndexReaderWrapperIntegrationTests extends AbstractBuilderT
|
||||||
|
|
||||||
DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(directory), shardId);
|
DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(directory), shardId);
|
||||||
for (int i = 0; i < numValues; i++) {
|
for (int i = 0; i < numValues; i++) {
|
||||||
|
String termQuery = "{\"term\": {\"field\": \""+ values[i] + "\"} }";
|
||||||
|
IndicesAccessControl.IndexAccessControl indexAccessControl = new IndicesAccessControl.IndexAccessControl(true, new
|
||||||
|
FieldPermissions(),
|
||||||
|
DocumentPermissions.filteredBy(singleton(new BytesArray(termQuery))));
|
||||||
|
SecurityIndexReaderWrapper wrapper = new SecurityIndexReaderWrapper(s -> queryShardContext,
|
||||||
|
bitsetCache, threadContext, licenseState, scriptService) {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected IndicesAccessControl getIndicesAccessControl() {
|
||||||
|
return new IndicesAccessControl(true, singletonMap("_index", indexAccessControl));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
ParsedQuery parsedQuery = new ParsedQuery(new TermQuery(new Term("field", values[i])));
|
ParsedQuery parsedQuery = new ParsedQuery(new TermQuery(new Term("field", values[i])));
|
||||||
doReturn(new TermQueryBuilder("field", values[i])).when(queryShardContext).parseInnerQueryBuilder(any(XContentParser.class));
|
|
||||||
when(queryShardContext.toQuery(new TermsQueryBuilder("field", values[i]))).thenReturn(parsedQuery);
|
when(queryShardContext.toQuery(new TermsQueryBuilder("field", values[i]))).thenReturn(parsedQuery);
|
||||||
|
|
||||||
DirectoryReader wrappedDirectoryReader = wrapper.apply(directoryReader);
|
DirectoryReader wrappedDirectoryReader = wrapper.apply(directoryReader);
|
||||||
IndexSearcher indexSearcher = new ContextIndexSearcher(wrappedDirectoryReader,
|
IndexSearcher indexSearcher = new ContextIndexSearcher(wrappedDirectoryReader,
|
||||||
IndexSearcher.getDefaultSimilarity(), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy());
|
IndexSearcher.getDefaultSimilarity(), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy());
|
||||||
|
|
||||||
int expectedHitCount = valuesHitCount[i];
|
int expectedHitCount = valuesHitCount[i];
|
||||||
logger.info("Going to verify hit count with query [{}] with expected total hits [{}]", parsedQuery.query(), expectedHitCount);
|
logger.info("Going to verify hit count with query [{}] with expected total hits [{}]", parsedQuery.query(), expectedHitCount);
|
||||||
|
|
||||||
TotalHitCountCollector countCollector = new TotalHitCountCollector();
|
TotalHitCountCollector countCollector = new TotalHitCountCollector();
|
||||||
indexSearcher.search(new MatchAllDocsQuery(), countCollector);
|
indexSearcher.search(new MatchAllDocsQuery(), countCollector);
|
||||||
|
|
||||||
assertThat(countCollector.getTotalHits(), equalTo(expectedHitCount));
|
assertThat(countCollector.getTotalHits(), equalTo(expectedHitCount));
|
||||||
assertThat(wrappedDirectoryReader.numDocs(), equalTo(expectedHitCount));
|
assertThat(wrappedDirectoryReader.numDocs(), equalTo(expectedHitCount));
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,23 +6,14 @@
|
||||||
|
|
||||||
package org.elasticsearch.xpack.core.security.authz.permission;
|
package org.elasticsearch.xpack.core.security.authz.permission;
|
||||||
|
|
||||||
import org.apache.lucene.search.join.ScoreMode;
|
|
||||||
import org.elasticsearch.client.Client;
|
import org.elasticsearch.client.Client;
|
||||||
import org.elasticsearch.common.bytes.BytesArray;
|
import org.elasticsearch.common.bytes.BytesArray;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.index.query.BoolQueryBuilder;
|
|
||||||
import org.elasticsearch.index.query.BoostingQueryBuilder;
|
|
||||||
import org.elasticsearch.index.query.ConstantScoreQueryBuilder;
|
|
||||||
import org.elasticsearch.index.query.GeoShapeQueryBuilder;
|
|
||||||
import org.elasticsearch.index.query.MatchAllQueryBuilder;
|
|
||||||
import org.elasticsearch.index.query.QueryBuilder;
|
import org.elasticsearch.index.query.QueryBuilder;
|
||||||
import org.elasticsearch.index.query.QueryRewriteContext;
|
import org.elasticsearch.index.query.QueryRewriteContext;
|
||||||
import org.elasticsearch.index.query.TermsQueryBuilder;
|
import org.elasticsearch.index.query.TermsQueryBuilder;
|
||||||
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
|
|
||||||
import org.elasticsearch.indices.TermsLookup;
|
import org.elasticsearch.indices.TermsLookup;
|
||||||
import org.elasticsearch.join.query.HasChildQueryBuilder;
|
|
||||||
import org.elasticsearch.join.query.HasParentQueryBuilder;
|
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -68,44 +59,6 @@ public class DocumentPermissionsTests extends ESTestCase {
|
||||||
assertThat(ae.getMessage(), containsString("nested scoping for document permissions is not permitted"));
|
assertThat(ae.getMessage(), containsString("nested scoping for document permissions is not permitted"));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testVerifyRoleQuery() throws Exception {
|
|
||||||
QueryBuilder queryBuilder1 = new TermsQueryBuilder("field", "val1", "val2");
|
|
||||||
DocumentPermissions.verifyRoleQuery(queryBuilder1);
|
|
||||||
|
|
||||||
QueryBuilder queryBuilder2 = new TermsQueryBuilder("field", new TermsLookup("_index", "_type", "_id", "_path"));
|
|
||||||
Exception e = expectThrows(IllegalArgumentException.class, () -> DocumentPermissions.verifyRoleQuery(queryBuilder2));
|
|
||||||
assertThat(e.getMessage(), equalTo("terms query with terms lookup isn't supported as part of a role query"));
|
|
||||||
|
|
||||||
QueryBuilder queryBuilder3 = new GeoShapeQueryBuilder("field", "_id", "_type");
|
|
||||||
e = expectThrows(IllegalArgumentException.class, () -> DocumentPermissions.verifyRoleQuery(queryBuilder3));
|
|
||||||
assertThat(e.getMessage(), equalTo("geoshape query referring to indexed shapes isn't support as part of a role query"));
|
|
||||||
|
|
||||||
QueryBuilder queryBuilder4 = new HasChildQueryBuilder("_type", new MatchAllQueryBuilder(), ScoreMode.None);
|
|
||||||
e = expectThrows(IllegalArgumentException.class, () -> DocumentPermissions.verifyRoleQuery(queryBuilder4));
|
|
||||||
assertThat(e.getMessage(), equalTo("has_child query isn't support as part of a role query"));
|
|
||||||
|
|
||||||
QueryBuilder queryBuilder5 = new HasParentQueryBuilder("_type", new MatchAllQueryBuilder(), false);
|
|
||||||
e = expectThrows(IllegalArgumentException.class, () -> DocumentPermissions.verifyRoleQuery(queryBuilder5));
|
|
||||||
assertThat(e.getMessage(), equalTo("has_parent query isn't support as part of a role query"));
|
|
||||||
|
|
||||||
QueryBuilder queryBuilder6 = new BoolQueryBuilder().must(new GeoShapeQueryBuilder("field", "_id", "_type"));
|
|
||||||
e = expectThrows(IllegalArgumentException.class, () -> DocumentPermissions.verifyRoleQuery(queryBuilder6));
|
|
||||||
assertThat(e.getMessage(), equalTo("geoshape query referring to indexed shapes isn't support as part of a role query"));
|
|
||||||
|
|
||||||
QueryBuilder queryBuilder7 = new ConstantScoreQueryBuilder(new GeoShapeQueryBuilder("field", "_id", "_type"));
|
|
||||||
e = expectThrows(IllegalArgumentException.class, () -> DocumentPermissions.verifyRoleQuery(queryBuilder7));
|
|
||||||
assertThat(e.getMessage(), equalTo("geoshape query referring to indexed shapes isn't support as part of a role query"));
|
|
||||||
|
|
||||||
QueryBuilder queryBuilder8 = new FunctionScoreQueryBuilder(new GeoShapeQueryBuilder("field", "_id", "_type"));
|
|
||||||
e = expectThrows(IllegalArgumentException.class, () -> DocumentPermissions.verifyRoleQuery(queryBuilder8));
|
|
||||||
assertThat(e.getMessage(), equalTo("geoshape query referring to indexed shapes isn't support as part of a role query"));
|
|
||||||
|
|
||||||
QueryBuilder queryBuilder9 = new BoostingQueryBuilder(new GeoShapeQueryBuilder("field", "_id", "_type"),
|
|
||||||
new MatchAllQueryBuilder());
|
|
||||||
e = expectThrows(IllegalArgumentException.class, () -> DocumentPermissions.verifyRoleQuery(queryBuilder9));
|
|
||||||
assertThat(e.getMessage(), equalTo("geoshape query referring to indexed shapes isn't support as part of a role query"));
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testFailIfQueryUsesClient() throws Exception {
|
public void testFailIfQueryUsesClient() throws Exception {
|
||||||
Client client = mock(Client.class);
|
Client client = mock(Client.class);
|
||||||
when(client.settings()).thenReturn(Settings.EMPTY);
|
when(client.settings()).thenReturn(Settings.EMPTY);
|
||||||
|
|
|
@ -0,0 +1,63 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.xpack.core.security.authz.support;
|
||||||
|
|
||||||
|
import org.apache.lucene.search.join.ScoreMode;
|
||||||
|
import org.elasticsearch.index.query.BoolQueryBuilder;
|
||||||
|
import org.elasticsearch.index.query.BoostingQueryBuilder;
|
||||||
|
import org.elasticsearch.index.query.ConstantScoreQueryBuilder;
|
||||||
|
import org.elasticsearch.index.query.GeoShapeQueryBuilder;
|
||||||
|
import org.elasticsearch.index.query.MatchAllQueryBuilder;
|
||||||
|
import org.elasticsearch.index.query.QueryBuilder;
|
||||||
|
import org.elasticsearch.index.query.TermsQueryBuilder;
|
||||||
|
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
|
||||||
|
import org.elasticsearch.indices.TermsLookup;
|
||||||
|
import org.elasticsearch.join.query.HasChildQueryBuilder;
|
||||||
|
import org.elasticsearch.join.query.HasParentQueryBuilder;
|
||||||
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
|
||||||
|
public class DLSRoleQueryValidatorTests extends ESTestCase {
|
||||||
|
|
||||||
|
public void testVerifyRoleQuery() throws Exception {
|
||||||
|
QueryBuilder queryBuilder1 = new TermsQueryBuilder("field", "val1", "val2");
|
||||||
|
DLSRoleQueryValidator.verifyRoleQuery(queryBuilder1);
|
||||||
|
|
||||||
|
QueryBuilder queryBuilder2 = new TermsQueryBuilder("field", new TermsLookup("_index", "_id", "_path"));
|
||||||
|
Exception e = expectThrows(IllegalArgumentException.class, () -> DLSRoleQueryValidator.verifyRoleQuery(queryBuilder2));
|
||||||
|
assertThat(e.getMessage(), equalTo("terms query with terms lookup isn't supported as part of a role query"));
|
||||||
|
|
||||||
|
QueryBuilder queryBuilder3 = new GeoShapeQueryBuilder("field", "_id", "_type");
|
||||||
|
e = expectThrows(IllegalArgumentException.class, () -> DLSRoleQueryValidator.verifyRoleQuery(queryBuilder3));
|
||||||
|
assertThat(e.getMessage(), equalTo("geoshape query referring to indexed shapes isn't supported as part of a role query"));
|
||||||
|
|
||||||
|
QueryBuilder queryBuilder4 = new HasChildQueryBuilder("_type", new MatchAllQueryBuilder(), ScoreMode.None);
|
||||||
|
e = expectThrows(IllegalArgumentException.class, () -> DLSRoleQueryValidator.verifyRoleQuery(queryBuilder4));
|
||||||
|
assertThat(e.getMessage(), equalTo("has_child query isn't supported as part of a role query"));
|
||||||
|
|
||||||
|
QueryBuilder queryBuilder5 = new HasParentQueryBuilder("_type", new MatchAllQueryBuilder(), false);
|
||||||
|
e = expectThrows(IllegalArgumentException.class, () -> DLSRoleQueryValidator.verifyRoleQuery(queryBuilder5));
|
||||||
|
assertThat(e.getMessage(), equalTo("has_parent query isn't supported as part of a role query"));
|
||||||
|
|
||||||
|
QueryBuilder queryBuilder6 = new BoolQueryBuilder().must(new GeoShapeQueryBuilder("field", "_id", "_type"));
|
||||||
|
e = expectThrows(IllegalArgumentException.class, () -> DLSRoleQueryValidator.verifyRoleQuery(queryBuilder6));
|
||||||
|
assertThat(e.getMessage(), equalTo("geoshape query referring to indexed shapes isn't supported as part of a role query"));
|
||||||
|
|
||||||
|
QueryBuilder queryBuilder7 = new ConstantScoreQueryBuilder(new GeoShapeQueryBuilder("field", "_id", "_type"));
|
||||||
|
e = expectThrows(IllegalArgumentException.class, () -> DLSRoleQueryValidator.verifyRoleQuery(queryBuilder7));
|
||||||
|
assertThat(e.getMessage(), equalTo("geoshape query referring to indexed shapes isn't supported as part of a role query"));
|
||||||
|
|
||||||
|
QueryBuilder queryBuilder8 = new FunctionScoreQueryBuilder(new GeoShapeQueryBuilder("field", "_id", "_type"));
|
||||||
|
e = expectThrows(IllegalArgumentException.class, () -> DLSRoleQueryValidator.verifyRoleQuery(queryBuilder8));
|
||||||
|
assertThat(e.getMessage(), equalTo("geoshape query referring to indexed shapes isn't supported as part of a role query"));
|
||||||
|
|
||||||
|
QueryBuilder queryBuilder9 = new BoostingQueryBuilder(new GeoShapeQueryBuilder("field", "_id", "_type"),
|
||||||
|
new MatchAllQueryBuilder());
|
||||||
|
e = expectThrows(IllegalArgumentException.class, () -> DLSRoleQueryValidator.verifyRoleQuery(queryBuilder9));
|
||||||
|
assertThat(e.getMessage(), equalTo("geoshape query referring to indexed shapes isn't supported as part of a role query"));
|
||||||
|
}
|
||||||
|
}
|
|
@ -388,7 +388,7 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw
|
||||||
NamedXContentRegistry xContentRegistry, Environment environment,
|
NamedXContentRegistry xContentRegistry, Environment environment,
|
||||||
NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry) {
|
NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry) {
|
||||||
try {
|
try {
|
||||||
return createComponents(client, threadPool, clusterService, resourceWatcherService, scriptService);
|
return createComponents(client, threadPool, clusterService, resourceWatcherService, scriptService, xContentRegistry);
|
||||||
} catch (final Exception e) {
|
} catch (final Exception e) {
|
||||||
throw new IllegalStateException("security initialization failed", e);
|
throw new IllegalStateException("security initialization failed", e);
|
||||||
}
|
}
|
||||||
|
@ -396,7 +396,8 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw
|
||||||
|
|
||||||
// pkg private for testing - tests want to pass in their set of extensions hence we are not using the extension service directly
|
// pkg private for testing - tests want to pass in their set of extensions hence we are not using the extension service directly
|
||||||
Collection<Object> createComponents(Client client, ThreadPool threadPool, ClusterService clusterService,
|
Collection<Object> createComponents(Client client, ThreadPool threadPool, ClusterService clusterService,
|
||||||
ResourceWatcherService resourceWatcherService, ScriptService scriptService) throws Exception {
|
ResourceWatcherService resourceWatcherService, ScriptService scriptService,
|
||||||
|
NamedXContentRegistry xContentRegistry) throws Exception {
|
||||||
if (enabled == false) {
|
if (enabled == false) {
|
||||||
return Collections.emptyList();
|
return Collections.emptyList();
|
||||||
}
|
}
|
||||||
|
@ -451,7 +452,8 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw
|
||||||
|
|
||||||
dlsBitsetCache.set(new DocumentSubsetBitsetCache(settings));
|
dlsBitsetCache.set(new DocumentSubsetBitsetCache(settings));
|
||||||
final FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(settings);
|
final FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(settings);
|
||||||
final FileRolesStore fileRolesStore = new FileRolesStore(settings, env, resourceWatcherService, getLicenseState());
|
final FileRolesStore fileRolesStore = new FileRolesStore(settings, env, resourceWatcherService, getLicenseState(),
|
||||||
|
xContentRegistry);
|
||||||
final NativeRolesStore nativeRolesStore = new NativeRolesStore(settings, client, getLicenseState(), securityIndex.get());
|
final NativeRolesStore nativeRolesStore = new NativeRolesStore(settings, client, getLicenseState(), securityIndex.get());
|
||||||
final ReservedRolesStore reservedRolesStore = new ReservedRolesStore();
|
final ReservedRolesStore reservedRolesStore = new ReservedRolesStore();
|
||||||
List<BiConsumer<Set<String>, ActionListener<RoleRetrievalResult>>> rolesProviders = new ArrayList<>();
|
List<BiConsumer<Set<String>, ActionListener<RoleRetrievalResult>>> rolesProviders = new ArrayList<>();
|
||||||
|
|
|
@ -6,11 +6,13 @@
|
||||||
|
|
||||||
package org.elasticsearch.xpack.security.action;
|
package org.elasticsearch.xpack.security.action;
|
||||||
|
|
||||||
|
import org.elasticsearch.ElasticsearchException;
|
||||||
import org.elasticsearch.action.ActionListener;
|
import org.elasticsearch.action.ActionListener;
|
||||||
import org.elasticsearch.action.support.ActionFilters;
|
import org.elasticsearch.action.support.ActionFilters;
|
||||||
import org.elasticsearch.action.support.HandledTransportAction;
|
import org.elasticsearch.action.support.HandledTransportAction;
|
||||||
import org.elasticsearch.common.inject.Inject;
|
import org.elasticsearch.common.inject.Inject;
|
||||||
import org.elasticsearch.common.io.stream.Writeable;
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
|
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||||
import org.elasticsearch.tasks.Task;
|
import org.elasticsearch.tasks.Task;
|
||||||
import org.elasticsearch.transport.TransportService;
|
import org.elasticsearch.transport.TransportService;
|
||||||
import org.elasticsearch.xpack.core.security.SecurityContext;
|
import org.elasticsearch.xpack.core.security.SecurityContext;
|
||||||
|
@ -18,6 +20,8 @@ import org.elasticsearch.xpack.core.security.action.CreateApiKeyAction;
|
||||||
import org.elasticsearch.xpack.core.security.action.CreateApiKeyRequest;
|
import org.elasticsearch.xpack.core.security.action.CreateApiKeyRequest;
|
||||||
import org.elasticsearch.xpack.core.security.action.CreateApiKeyResponse;
|
import org.elasticsearch.xpack.core.security.action.CreateApiKeyResponse;
|
||||||
import org.elasticsearch.xpack.core.security.authc.Authentication;
|
import org.elasticsearch.xpack.core.security.authc.Authentication;
|
||||||
|
import org.elasticsearch.xpack.core.security.authz.RoleDescriptor;
|
||||||
|
import org.elasticsearch.xpack.core.security.authz.support.DLSRoleQueryValidator;
|
||||||
import org.elasticsearch.xpack.security.authc.ApiKeyService;
|
import org.elasticsearch.xpack.security.authc.ApiKeyService;
|
||||||
import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore;
|
import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore;
|
||||||
|
|
||||||
|
@ -32,14 +36,16 @@ public final class TransportCreateApiKeyAction extends HandledTransportAction<Cr
|
||||||
private final ApiKeyService apiKeyService;
|
private final ApiKeyService apiKeyService;
|
||||||
private final SecurityContext securityContext;
|
private final SecurityContext securityContext;
|
||||||
private final CompositeRolesStore rolesStore;
|
private final CompositeRolesStore rolesStore;
|
||||||
|
private final NamedXContentRegistry xContentRegistry;
|
||||||
|
|
||||||
@Inject
|
@Inject
|
||||||
public TransportCreateApiKeyAction(TransportService transportService, ActionFilters actionFilters, ApiKeyService apiKeyService,
|
public TransportCreateApiKeyAction(TransportService transportService, ActionFilters actionFilters, ApiKeyService apiKeyService,
|
||||||
SecurityContext context, CompositeRolesStore rolesStore) {
|
SecurityContext context, CompositeRolesStore rolesStore, NamedXContentRegistry xContentRegistry) {
|
||||||
super(CreateApiKeyAction.NAME, transportService, actionFilters, (Writeable.Reader<CreateApiKeyRequest>) CreateApiKeyRequest::new);
|
super(CreateApiKeyAction.NAME, transportService, actionFilters, (Writeable.Reader<CreateApiKeyRequest>) CreateApiKeyRequest::new);
|
||||||
this.apiKeyService = apiKeyService;
|
this.apiKeyService = apiKeyService;
|
||||||
this.securityContext = context;
|
this.securityContext = context;
|
||||||
this.rolesStore = rolesStore;
|
this.rolesStore = rolesStore;
|
||||||
|
this.xContentRegistry = xContentRegistry;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -49,7 +55,17 @@ public final class TransportCreateApiKeyAction extends HandledTransportAction<Cr
|
||||||
listener.onFailure(new IllegalStateException("authentication is required"));
|
listener.onFailure(new IllegalStateException("authentication is required"));
|
||||||
} else {
|
} else {
|
||||||
rolesStore.getRoleDescriptors(new HashSet<>(Arrays.asList(authentication.getUser().roles())),
|
rolesStore.getRoleDescriptors(new HashSet<>(Arrays.asList(authentication.getUser().roles())),
|
||||||
ActionListener.wrap(roleDescriptors -> apiKeyService.createApiKey(authentication, request, roleDescriptors, listener),
|
ActionListener.wrap(roleDescriptors -> {
|
||||||
|
for (RoleDescriptor rd : roleDescriptors) {
|
||||||
|
try {
|
||||||
|
DLSRoleQueryValidator.validateQueryField(rd.getIndicesPrivileges(), xContentRegistry);
|
||||||
|
} catch (ElasticsearchException | IllegalArgumentException e) {
|
||||||
|
listener.onFailure(e);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
apiKeyService.createApiKey(authentication, request, roleDescriptors, listener);
|
||||||
|
},
|
||||||
listener::onFailure));
|
listener::onFailure));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,26 +5,32 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.xpack.security.action.role;
|
package org.elasticsearch.xpack.security.action.role;
|
||||||
|
|
||||||
|
import org.elasticsearch.ElasticsearchException;
|
||||||
import org.elasticsearch.action.ActionListener;
|
import org.elasticsearch.action.ActionListener;
|
||||||
import org.elasticsearch.action.support.ActionFilters;
|
import org.elasticsearch.action.support.ActionFilters;
|
||||||
import org.elasticsearch.action.support.HandledTransportAction;
|
import org.elasticsearch.action.support.HandledTransportAction;
|
||||||
import org.elasticsearch.common.inject.Inject;
|
import org.elasticsearch.common.inject.Inject;
|
||||||
|
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||||
import org.elasticsearch.tasks.Task;
|
import org.elasticsearch.tasks.Task;
|
||||||
import org.elasticsearch.transport.TransportService;
|
import org.elasticsearch.transport.TransportService;
|
||||||
import org.elasticsearch.xpack.core.security.action.role.PutRoleAction;
|
import org.elasticsearch.xpack.core.security.action.role.PutRoleAction;
|
||||||
import org.elasticsearch.xpack.core.security.action.role.PutRoleRequest;
|
import org.elasticsearch.xpack.core.security.action.role.PutRoleRequest;
|
||||||
import org.elasticsearch.xpack.core.security.action.role.PutRoleResponse;
|
import org.elasticsearch.xpack.core.security.action.role.PutRoleResponse;
|
||||||
import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore;
|
import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore;
|
||||||
|
import org.elasticsearch.xpack.core.security.authz.support.DLSRoleQueryValidator;
|
||||||
import org.elasticsearch.xpack.security.authz.store.NativeRolesStore;
|
import org.elasticsearch.xpack.security.authz.store.NativeRolesStore;
|
||||||
|
|
||||||
public class TransportPutRoleAction extends HandledTransportAction<PutRoleRequest, PutRoleResponse> {
|
public class TransportPutRoleAction extends HandledTransportAction<PutRoleRequest, PutRoleResponse> {
|
||||||
|
|
||||||
private final NativeRolesStore rolesStore;
|
private final NativeRolesStore rolesStore;
|
||||||
|
private final NamedXContentRegistry xContentRegistry;
|
||||||
|
|
||||||
@Inject
|
@Inject
|
||||||
public TransportPutRoleAction(ActionFilters actionFilters, NativeRolesStore rolesStore, TransportService transportService) {
|
public TransportPutRoleAction(ActionFilters actionFilters, NativeRolesStore rolesStore, TransportService transportService,
|
||||||
|
NamedXContentRegistry xContentRegistry) {
|
||||||
super(PutRoleAction.NAME, transportService, actionFilters, PutRoleRequest::new);
|
super(PutRoleAction.NAME, transportService, actionFilters, PutRoleRequest::new);
|
||||||
this.rolesStore = rolesStore;
|
this.rolesStore = rolesStore;
|
||||||
|
this.xContentRegistry = xContentRegistry;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -35,6 +41,13 @@ public class TransportPutRoleAction extends HandledTransportAction<PutRoleReques
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
DLSRoleQueryValidator.validateQueryField(request.roleDescriptor().getIndicesPrivileges(), xContentRegistry);
|
||||||
|
} catch (ElasticsearchException | IllegalArgumentException e) {
|
||||||
|
listener.onFailure(e);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
rolesStore.putRole(request, request.roleDescriptor(), new ActionListener<Boolean>() {
|
rolesStore.putRole(request, request.roleDescriptor(), new ActionListener<Boolean>() {
|
||||||
@Override
|
@Override
|
||||||
public void onResponse(Boolean created) {
|
public void onResponse(Boolean created) {
|
||||||
|
|
|
@ -8,10 +8,13 @@ package org.elasticsearch.xpack.security.action.user;
|
||||||
import org.elasticsearch.action.ActionListener;
|
import org.elasticsearch.action.ActionListener;
|
||||||
import org.elasticsearch.action.support.ActionFilters;
|
import org.elasticsearch.action.support.ActionFilters;
|
||||||
import org.elasticsearch.action.support.HandledTransportAction;
|
import org.elasticsearch.action.support.HandledTransportAction;
|
||||||
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.inject.Inject;
|
import org.elasticsearch.common.inject.Inject;
|
||||||
|
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||||
import org.elasticsearch.tasks.Task;
|
import org.elasticsearch.tasks.Task;
|
||||||
import org.elasticsearch.threadpool.ThreadPool;
|
import org.elasticsearch.threadpool.ThreadPool;
|
||||||
import org.elasticsearch.transport.TransportService;
|
import org.elasticsearch.transport.TransportService;
|
||||||
|
import org.elasticsearch.xpack.core.security.SecurityContext;
|
||||||
import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesAction;
|
import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesAction;
|
||||||
import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesRequest;
|
import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesRequest;
|
||||||
import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesResponse;
|
import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesResponse;
|
||||||
|
@ -36,15 +39,20 @@ public class TransportHasPrivilegesAction extends HandledTransportAction<HasPriv
|
||||||
private final ThreadPool threadPool;
|
private final ThreadPool threadPool;
|
||||||
private final AuthorizationService authorizationService;
|
private final AuthorizationService authorizationService;
|
||||||
private final NativePrivilegeStore privilegeStore;
|
private final NativePrivilegeStore privilegeStore;
|
||||||
|
private final SecurityContext securityContext;
|
||||||
|
private final NamedXContentRegistry xContentRegistry;
|
||||||
|
|
||||||
@Inject
|
@Inject
|
||||||
public TransportHasPrivilegesAction(ThreadPool threadPool, TransportService transportService,
|
public TransportHasPrivilegesAction(ThreadPool threadPool, TransportService transportService,
|
||||||
ActionFilters actionFilters, AuthorizationService authorizationService,
|
ActionFilters actionFilters, AuthorizationService authorizationService,
|
||||||
NativePrivilegeStore privilegeStore) {
|
NativePrivilegeStore privilegeStore, SecurityContext context,
|
||||||
|
NamedXContentRegistry xContentRegistry) {
|
||||||
super(HasPrivilegesAction.NAME, transportService, actionFilters, HasPrivilegesRequest::new);
|
super(HasPrivilegesAction.NAME, transportService, actionFilters, HasPrivilegesRequest::new);
|
||||||
this.threadPool = threadPool;
|
this.threadPool = threadPool;
|
||||||
this.authorizationService = authorizationService;
|
this.authorizationService = authorizationService;
|
||||||
this.privilegeStore = privilegeStore;
|
this.privilegeStore = privilegeStore;
|
||||||
|
this.xContentRegistry = xContentRegistry;
|
||||||
|
this.securityContext = context;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -58,6 +66,18 @@ public class TransportHasPrivilegesAction extends HandledTransportAction<HasPriv
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
final RoleDescriptor.IndicesPrivileges[] indicesPrivileges = request.indexPrivileges();
|
||||||
|
if (indicesPrivileges != null) {
|
||||||
|
for (int i = 0; i < indicesPrivileges.length; i++) {
|
||||||
|
BytesReference query = indicesPrivileges[i].getQuery();
|
||||||
|
if (query != null) {
|
||||||
|
listener.onFailure(
|
||||||
|
new IllegalArgumentException("users may only check the index privileges without any DLS role query"));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
resolveApplicationPrivileges(request, ActionListener.wrap(applicationPrivilegeDescriptors ->
|
resolveApplicationPrivileges(request, ActionListener.wrap(applicationPrivilegeDescriptors ->
|
||||||
authorizationService.checkPrivileges(authentication, request, applicationPrivilegeDescriptors, listener),
|
authorizationService.checkPrivileges(authentication, request, applicationPrivilegeDescriptors, listener),
|
||||||
listener::onFailure));
|
listener::onFailure));
|
||||||
|
|
|
@ -323,7 +323,7 @@ public class ESNativeRealmMigrateTool extends LoggingAwareMultiCommand {
|
||||||
}
|
}
|
||||||
terminal.println("importing roles from [" + rolesFile + "]...");
|
terminal.println("importing roles from [" + rolesFile + "]...");
|
||||||
Logger logger = getTerminalLogger(terminal);
|
Logger logger = getTerminalLogger(terminal);
|
||||||
Map<String, RoleDescriptor> roles = FileRolesStore.parseRoleDescriptors(rolesFile, logger, true, Settings.EMPTY);
|
Map<String, RoleDescriptor> roles = FileRolesStore.parseRoleDescriptors(rolesFile, logger, true, Settings.EMPTY, null);
|
||||||
Set<String> existingRoles;
|
Set<String> existingRoles;
|
||||||
try {
|
try {
|
||||||
existingRoles = getRolesThatExist(terminal, env.settings(), env, options);
|
existingRoles = getRolesThatExist(terminal, env.settings(), env, options);
|
||||||
|
|
|
@ -9,6 +9,7 @@ import org.apache.logging.log4j.LogManager;
|
||||||
import org.apache.logging.log4j.Logger;
|
import org.apache.logging.log4j.Logger;
|
||||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||||
import org.apache.logging.log4j.util.Supplier;
|
import org.apache.logging.log4j.util.Supplier;
|
||||||
|
import org.elasticsearch.ElasticsearchException;
|
||||||
import org.elasticsearch.ElasticsearchParseException;
|
import org.elasticsearch.ElasticsearchParseException;
|
||||||
import org.elasticsearch.action.ActionListener;
|
import org.elasticsearch.action.ActionListener;
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
|
@ -29,6 +30,7 @@ import org.elasticsearch.xpack.core.security.authz.RoleDescriptor;
|
||||||
import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges;
|
import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges;
|
||||||
import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore;
|
import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore;
|
||||||
import org.elasticsearch.xpack.core.security.authz.store.RoleRetrievalResult;
|
import org.elasticsearch.xpack.core.security.authz.store.RoleRetrievalResult;
|
||||||
|
import org.elasticsearch.xpack.core.security.authz.support.DLSRoleQueryValidator;
|
||||||
import org.elasticsearch.xpack.core.security.support.NoOpLogger;
|
import org.elasticsearch.xpack.core.security.support.NoOpLogger;
|
||||||
import org.elasticsearch.xpack.core.security.support.Validation;
|
import org.elasticsearch.xpack.core.security.support.Validation;
|
||||||
|
|
||||||
|
@ -61,27 +63,30 @@ public class FileRolesStore implements BiConsumer<Set<String>, ActionListener<Ro
|
||||||
private final Settings settings;
|
private final Settings settings;
|
||||||
private final Path file;
|
private final Path file;
|
||||||
private final XPackLicenseState licenseState;
|
private final XPackLicenseState licenseState;
|
||||||
|
private final NamedXContentRegistry xContentRegistry;
|
||||||
private final List<Consumer<Set<String>>> listeners = new ArrayList<>();
|
private final List<Consumer<Set<String>>> listeners = new ArrayList<>();
|
||||||
|
|
||||||
private volatile Map<String, RoleDescriptor> permissions;
|
private volatile Map<String, RoleDescriptor> permissions;
|
||||||
|
|
||||||
public FileRolesStore(Settings settings, Environment env, ResourceWatcherService watcherService, XPackLicenseState licenseState)
|
public FileRolesStore(Settings settings, Environment env, ResourceWatcherService watcherService, XPackLicenseState licenseState,
|
||||||
|
NamedXContentRegistry xContentRegistry)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
this(settings, env, watcherService, null, licenseState);
|
this(settings, env, watcherService, null, licenseState, xContentRegistry);
|
||||||
}
|
}
|
||||||
|
|
||||||
FileRolesStore(Settings settings, Environment env, ResourceWatcherService watcherService, Consumer<Set<String>> listener,
|
FileRolesStore(Settings settings, Environment env, ResourceWatcherService watcherService, Consumer<Set<String>> listener,
|
||||||
XPackLicenseState licenseState) throws IOException {
|
XPackLicenseState licenseState, NamedXContentRegistry xContentRegistry) throws IOException {
|
||||||
this.settings = settings;
|
this.settings = settings;
|
||||||
this.file = resolveFile(env);
|
this.file = resolveFile(env);
|
||||||
if (listener != null) {
|
if (listener != null) {
|
||||||
listeners.add(listener);
|
listeners.add(listener);
|
||||||
}
|
}
|
||||||
this.licenseState = licenseState;
|
this.licenseState = licenseState;
|
||||||
|
this.xContentRegistry = xContentRegistry;
|
||||||
FileWatcher watcher = new FileWatcher(file.getParent());
|
FileWatcher watcher = new FileWatcher(file.getParent());
|
||||||
watcher.addListener(new FileListener());
|
watcher.addListener(new FileListener());
|
||||||
watcherService.add(watcher, ResourceWatcherService.Frequency.HIGH);
|
watcherService.add(watcher, ResourceWatcherService.Frequency.HIGH);
|
||||||
permissions = parseFile(file, logger, settings, licenseState);
|
permissions = parseFile(file, logger, settings, licenseState, xContentRegistry);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -150,15 +155,17 @@ public class FileRolesStore implements BiConsumer<Set<String>, ActionListener<Ro
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Set<String> parseFileForRoleNames(Path path, Logger logger) {
|
public static Set<String> parseFileForRoleNames(Path path, Logger logger) {
|
||||||
return parseRoleDescriptors(path, logger, false, Settings.EMPTY).keySet();
|
// EMPTY is safe here because we never use namedObject as we are just parsing role names
|
||||||
|
return parseRoleDescriptors(path, logger, false, Settings.EMPTY, NamedXContentRegistry.EMPTY).keySet();
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Map<String, RoleDescriptor> parseFile(Path path, Logger logger, Settings settings, XPackLicenseState licenseState) {
|
public static Map<String, RoleDescriptor> parseFile(Path path, Logger logger, Settings settings, XPackLicenseState licenseState,
|
||||||
return parseFile(path, logger, true, settings, licenseState);
|
NamedXContentRegistry xContentRegistry) {
|
||||||
|
return parseFile(path, logger, true, settings, licenseState, xContentRegistry);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Map<String, RoleDescriptor> parseFile(Path path, Logger logger, boolean resolvePermission,
|
public static Map<String, RoleDescriptor> parseFile(Path path, Logger logger, boolean resolvePermission, Settings settings,
|
||||||
Settings settings, XPackLicenseState licenseState) {
|
XPackLicenseState licenseState, NamedXContentRegistry xContentRegistry) {
|
||||||
if (logger == null) {
|
if (logger == null) {
|
||||||
logger = NoOpLogger.INSTANCE;
|
logger = NoOpLogger.INSTANCE;
|
||||||
}
|
}
|
||||||
|
@ -170,7 +177,7 @@ public class FileRolesStore implements BiConsumer<Set<String>, ActionListener<Ro
|
||||||
List<String> roleSegments = roleSegments(path);
|
List<String> roleSegments = roleSegments(path);
|
||||||
final boolean flsDlsLicensed = licenseState.isDocumentAndFieldLevelSecurityAllowed();
|
final boolean flsDlsLicensed = licenseState.isDocumentAndFieldLevelSecurityAllowed();
|
||||||
for (String segment : roleSegments) {
|
for (String segment : roleSegments) {
|
||||||
RoleDescriptor descriptor = parseRoleDescriptor(segment, path, logger, resolvePermission, settings);
|
RoleDescriptor descriptor = parseRoleDescriptor(segment, path, logger, resolvePermission, settings, xContentRegistry);
|
||||||
if (descriptor != null) {
|
if (descriptor != null) {
|
||||||
if (ReservedRolesStore.isReserved(descriptor.getName())) {
|
if (ReservedRolesStore.isReserved(descriptor.getName())) {
|
||||||
logger.warn("role [{}] is reserved. the relevant role definition in the mapping file will be ignored",
|
logger.warn("role [{}] is reserved. the relevant role definition in the mapping file will be ignored",
|
||||||
|
@ -202,7 +209,8 @@ public class FileRolesStore implements BiConsumer<Set<String>, ActionListener<Ro
|
||||||
return unmodifiableMap(roles);
|
return unmodifiableMap(roles);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Map<String, RoleDescriptor> parseRoleDescriptors(Path path, Logger logger, boolean resolvePermission, Settings settings) {
|
public static Map<String, RoleDescriptor> parseRoleDescriptors(Path path, Logger logger, boolean resolvePermission, Settings settings,
|
||||||
|
NamedXContentRegistry xContentRegistry) {
|
||||||
if (logger == null) {
|
if (logger == null) {
|
||||||
logger = NoOpLogger.INSTANCE;
|
logger = NoOpLogger.INSTANCE;
|
||||||
}
|
}
|
||||||
|
@ -213,7 +221,7 @@ public class FileRolesStore implements BiConsumer<Set<String>, ActionListener<Ro
|
||||||
try {
|
try {
|
||||||
List<String> roleSegments = roleSegments(path);
|
List<String> roleSegments = roleSegments(path);
|
||||||
for (String segment : roleSegments) {
|
for (String segment : roleSegments) {
|
||||||
RoleDescriptor rd = parseRoleDescriptor(segment, path, logger, resolvePermission, settings);
|
RoleDescriptor rd = parseRoleDescriptor(segment, path, logger, resolvePermission, settings, xContentRegistry);
|
||||||
if (rd != null) {
|
if (rd != null) {
|
||||||
roles.put(rd.getName(), rd);
|
roles.put(rd.getName(), rd);
|
||||||
}
|
}
|
||||||
|
@ -231,12 +239,12 @@ public class FileRolesStore implements BiConsumer<Set<String>, ActionListener<Ro
|
||||||
}
|
}
|
||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
static RoleDescriptor parseRoleDescriptor(String segment, Path path, Logger logger, boolean resolvePermissions, Settings settings) {
|
static RoleDescriptor parseRoleDescriptor(String segment, Path path, Logger logger, boolean resolvePermissions, Settings settings,
|
||||||
|
NamedXContentRegistry xContentRegistry) {
|
||||||
String roleName = null;
|
String roleName = null;
|
||||||
try {
|
try {
|
||||||
// EMPTY is safe here because we never use namedObject
|
|
||||||
XContentParser parser = YamlXContent.yamlXContent
|
XContentParser parser = YamlXContent.yamlXContent
|
||||||
.createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, segment);
|
.createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, segment);
|
||||||
XContentParser.Token token = parser.nextToken();
|
XContentParser.Token token = parser.nextToken();
|
||||||
if (token == XContentParser.Token.START_OBJECT) {
|
if (token == XContentParser.Token.START_OBJECT) {
|
||||||
token = parser.nextToken();
|
token = parser.nextToken();
|
||||||
|
@ -258,7 +266,7 @@ public class FileRolesStore implements BiConsumer<Set<String>, ActionListener<Ro
|
||||||
// we pass true as last parameter because we do not want to reject files if field permissions
|
// we pass true as last parameter because we do not want to reject files if field permissions
|
||||||
// are given in 2.x syntax
|
// are given in 2.x syntax
|
||||||
RoleDescriptor descriptor = RoleDescriptor.parse(roleName, parser, true);
|
RoleDescriptor descriptor = RoleDescriptor.parse(roleName, parser, true);
|
||||||
return checkDescriptor(descriptor, path, logger, settings);
|
return checkDescriptor(descriptor, path, logger, settings, xContentRegistry);
|
||||||
} else {
|
} else {
|
||||||
logger.error("invalid role definition [{}] in roles file [{}]. skipping role...", roleName, path.toAbsolutePath());
|
logger.error("invalid role definition [{}] in roles file [{}]. skipping role...", roleName, path.toAbsolutePath());
|
||||||
return null;
|
return null;
|
||||||
|
@ -295,16 +303,25 @@ public class FileRolesStore implements BiConsumer<Set<String>, ActionListener<Ro
|
||||||
}
|
}
|
||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
private static RoleDescriptor checkDescriptor(RoleDescriptor descriptor, Path path, Logger logger, Settings settings) {
|
private static RoleDescriptor checkDescriptor(RoleDescriptor descriptor, Path path, Logger logger, Settings settings,
|
||||||
|
NamedXContentRegistry xContentRegistry) {
|
||||||
String roleName = descriptor.getName();
|
String roleName = descriptor.getName();
|
||||||
// first check if FLS/DLS is enabled on the role...
|
// first check if FLS/DLS is enabled on the role...
|
||||||
for (RoleDescriptor.IndicesPrivileges privilege : descriptor.getIndicesPrivileges()) {
|
if (descriptor.isUsingDocumentOrFieldLevelSecurity()) {
|
||||||
if ((privilege.getQuery() != null || privilege.getGrantedFields() != null || privilege.getDeniedFields() != null)
|
if (XPackSettings.DLS_FLS_ENABLED.get(settings) == false) {
|
||||||
&& XPackSettings.DLS_FLS_ENABLED.get(settings) == false) {
|
|
||||||
logger.error("invalid role definition [{}] in roles file [{}]. document and field level security is not " +
|
logger.error("invalid role definition [{}] in roles file [{}]. document and field level security is not " +
|
||||||
"enabled. set [{}] to [true] in the configuration file. skipping role...", roleName, path
|
"enabled. set [{}] to [true] in the configuration file. skipping role...", roleName, path
|
||||||
.toAbsolutePath(), XPackSettings.DLS_FLS_ENABLED.getKey());
|
.toAbsolutePath(), XPackSettings.DLS_FLS_ENABLED.getKey());
|
||||||
return null;
|
return null;
|
||||||
|
} else if (xContentRegistry != null) {
|
||||||
|
try {
|
||||||
|
DLSRoleQueryValidator.validateQueryField(descriptor.getIndicesPrivileges(), xContentRegistry);
|
||||||
|
} catch (ElasticsearchException | IllegalArgumentException e) {
|
||||||
|
logger.error((Supplier<?>) () -> new ParameterizedMessage(
|
||||||
|
"invalid role definition [{}] in roles file [{}]. failed to validate query field. skipping role...", roleName,
|
||||||
|
path.toAbsolutePath()), e);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return descriptor;
|
return descriptor;
|
||||||
|
@ -350,7 +367,7 @@ public class FileRolesStore implements BiConsumer<Set<String>, ActionListener<Ro
|
||||||
if (file.equals(FileRolesStore.this.file)) {
|
if (file.equals(FileRolesStore.this.file)) {
|
||||||
final Map<String, RoleDescriptor> previousPermissions = permissions;
|
final Map<String, RoleDescriptor> previousPermissions = permissions;
|
||||||
try {
|
try {
|
||||||
permissions = parseFile(file, logger, settings, licenseState);
|
permissions = parseFile(file, logger, settings, licenseState, xContentRegistry);
|
||||||
logger.info("updated roles (roles file [{}] {})", file.toAbsolutePath(),
|
logger.info("updated roles (roles file [{}] {})", file.toAbsolutePath(),
|
||||||
Files.exists(file) ? "changed" : "removed");
|
Files.exists(file) ? "changed" : "removed");
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
|
|
|
@ -128,7 +128,8 @@ public class SecurityTests extends ESTestCase {
|
||||||
Client client = mock(Client.class);
|
Client client = mock(Client.class);
|
||||||
when(client.threadPool()).thenReturn(threadPool);
|
when(client.threadPool()).thenReturn(threadPool);
|
||||||
when(client.settings()).thenReturn(settings);
|
when(client.settings()).thenReturn(settings);
|
||||||
return security.createComponents(client, threadPool, clusterService, mock(ResourceWatcherService.class), mock(ScriptService.class));
|
return security.createComponents(client, threadPool, clusterService, mock(ResourceWatcherService.class), mock(ScriptService.class),
|
||||||
|
xContentRegistry());
|
||||||
}
|
}
|
||||||
|
|
||||||
private static <T> T findComponent(Class<T> type, Collection<Object> components) {
|
private static <T> T findComponent(Class<T> type, Collection<Object> components) {
|
||||||
|
|
|
@ -32,4 +32,4 @@ public class PutRoleBuilderTests extends ESTestCase {
|
||||||
"[role1], use [\"field_security\": {\"grant\":[...],\"except\":[...]}] instead"));
|
"[role1], use [\"field_security\": {\"grant\":[...],\"except\":[...]}] instead"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,10 +5,21 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.xpack.security.action.role;
|
package org.elasticsearch.xpack.security.action.role;
|
||||||
|
|
||||||
|
import org.elasticsearch.ElasticsearchParseException;
|
||||||
import org.elasticsearch.ElasticsearchSecurityException;
|
import org.elasticsearch.ElasticsearchSecurityException;
|
||||||
import org.elasticsearch.action.ActionListener;
|
import org.elasticsearch.action.ActionListener;
|
||||||
import org.elasticsearch.action.support.ActionFilters;
|
import org.elasticsearch.action.support.ActionFilters;
|
||||||
|
import org.elasticsearch.common.ParseField;
|
||||||
|
import org.elasticsearch.common.Strings;
|
||||||
|
import org.elasticsearch.common.bytes.BytesArray;
|
||||||
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||||
|
import org.elasticsearch.index.query.MatchAllQueryBuilder;
|
||||||
|
import org.elasticsearch.index.query.QueryBuilder;
|
||||||
|
import org.elasticsearch.index.query.TermQueryBuilder;
|
||||||
|
import org.elasticsearch.join.query.HasChildQueryBuilder;
|
||||||
|
import org.elasticsearch.join.query.HasParentQueryBuilder;
|
||||||
import org.elasticsearch.tasks.Task;
|
import org.elasticsearch.tasks.Task;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
import org.elasticsearch.transport.Transport;
|
import org.elasticsearch.transport.Transport;
|
||||||
|
@ -22,6 +33,7 @@ import org.mockito.invocation.InvocationOnMock;
|
||||||
import org.mockito.stubbing.Answer;
|
import org.mockito.stubbing.Answer;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.concurrent.atomic.AtomicReference;
|
import java.util.concurrent.atomic.AtomicReference;
|
||||||
|
|
||||||
|
@ -41,12 +53,26 @@ import static org.mockito.Mockito.verifyZeroInteractions;
|
||||||
|
|
||||||
public class TransportPutRoleActionTests extends ESTestCase {
|
public class TransportPutRoleActionTests extends ESTestCase {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected NamedXContentRegistry xContentRegistry() {
|
||||||
|
return new NamedXContentRegistry(Arrays.asList(
|
||||||
|
new NamedXContentRegistry.Entry(QueryBuilder.class, new ParseField(MatchAllQueryBuilder.NAME),
|
||||||
|
(p, c) -> MatchAllQueryBuilder.fromXContent(p)),
|
||||||
|
new NamedXContentRegistry.Entry(QueryBuilder.class, new ParseField(HasChildQueryBuilder.NAME),
|
||||||
|
(p, c) -> HasChildQueryBuilder.fromXContent(p)),
|
||||||
|
new NamedXContentRegistry.Entry(QueryBuilder.class, new ParseField(HasParentQueryBuilder.NAME),
|
||||||
|
(p, c) -> HasParentQueryBuilder.fromXContent(p)),
|
||||||
|
new NamedXContentRegistry.Entry(QueryBuilder.class, new ParseField(TermQueryBuilder.NAME),
|
||||||
|
(p, c) -> TermQueryBuilder.fromXContent(p))));
|
||||||
|
}
|
||||||
|
|
||||||
public void testReservedRole() {
|
public void testReservedRole() {
|
||||||
final String roleName = randomFrom(new ArrayList<>(ReservedRolesStore.names()));
|
final String roleName = randomFrom(new ArrayList<>(ReservedRolesStore.names()));
|
||||||
NativeRolesStore rolesStore = mock(NativeRolesStore.class);
|
NativeRolesStore rolesStore = mock(NativeRolesStore.class);
|
||||||
TransportService transportService = new TransportService(Settings.EMPTY, mock(Transport.class), null,
|
TransportService transportService = new TransportService(Settings.EMPTY, mock(Transport.class), null,
|
||||||
TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet());
|
TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet());
|
||||||
TransportPutRoleAction action = new TransportPutRoleAction(mock(ActionFilters.class), rolesStore, transportService);
|
TransportPutRoleAction action = new TransportPutRoleAction(mock(ActionFilters.class), rolesStore, transportService,
|
||||||
|
xContentRegistry());
|
||||||
|
|
||||||
PutRoleRequest request = new PutRoleRequest();
|
PutRoleRequest request = new PutRoleRequest();
|
||||||
request.name(roleName);
|
request.name(roleName);
|
||||||
|
@ -76,7 +102,8 @@ public class TransportPutRoleActionTests extends ESTestCase {
|
||||||
NativeRolesStore rolesStore = mock(NativeRolesStore.class);
|
NativeRolesStore rolesStore = mock(NativeRolesStore.class);
|
||||||
TransportService transportService = new TransportService(Settings.EMPTY, mock(Transport.class), null,
|
TransportService transportService = new TransportService(Settings.EMPTY, mock(Transport.class), null,
|
||||||
TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet());
|
TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet());
|
||||||
TransportPutRoleAction action = new TransportPutRoleAction(mock(ActionFilters.class), rolesStore, transportService);
|
TransportPutRoleAction action = new TransportPutRoleAction(mock(ActionFilters.class), rolesStore, transportService,
|
||||||
|
xContentRegistry());
|
||||||
|
|
||||||
final boolean created = randomBoolean();
|
final boolean created = randomBoolean();
|
||||||
PutRoleRequest request = new PutRoleRequest();
|
PutRoleRequest request = new PutRoleRequest();
|
||||||
|
@ -119,7 +146,8 @@ public class TransportPutRoleActionTests extends ESTestCase {
|
||||||
NativeRolesStore rolesStore = mock(NativeRolesStore.class);
|
NativeRolesStore rolesStore = mock(NativeRolesStore.class);
|
||||||
TransportService transportService = new TransportService(Settings.EMPTY, mock(Transport.class), null,
|
TransportService transportService = new TransportService(Settings.EMPTY, mock(Transport.class), null,
|
||||||
TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet());
|
TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet());
|
||||||
TransportPutRoleAction action = new TransportPutRoleAction(mock(ActionFilters.class), rolesStore, transportService);
|
TransportPutRoleAction action = new TransportPutRoleAction(mock(ActionFilters.class), rolesStore, transportService,
|
||||||
|
xContentRegistry());
|
||||||
|
|
||||||
PutRoleRequest request = new PutRoleRequest();
|
PutRoleRequest request = new PutRoleRequest();
|
||||||
request.name(roleName);
|
request.name(roleName);
|
||||||
|
@ -154,4 +182,78 @@ public class TransportPutRoleActionTests extends ESTestCase {
|
||||||
assertThat(throwableRef.get(), is(sameInstance(e)));
|
assertThat(throwableRef.get(), is(sameInstance(e)));
|
||||||
verify(rolesStore, times(1)).putRole(eq(request), any(RoleDescriptor.class), any(ActionListener.class));
|
verify(rolesStore, times(1)).putRole(eq(request), any(RoleDescriptor.class), any(ActionListener.class));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testCreationOfRoleWithMalformedQueryJsonFails() {
|
||||||
|
NativeRolesStore rolesStore = mock(NativeRolesStore.class);
|
||||||
|
TransportService transportService = new TransportService(Settings.EMPTY, mock(Transport.class), null,
|
||||||
|
TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet());
|
||||||
|
TransportPutRoleAction action = new TransportPutRoleAction(mock(ActionFilters.class), rolesStore, transportService,
|
||||||
|
xContentRegistry());
|
||||||
|
PutRoleRequest request = new PutRoleRequest();
|
||||||
|
request.name("test");
|
||||||
|
String[] malformedQueryJson = new String[]{"{ \"match_all\": { \"unknown_field\": \"\" } }",
|
||||||
|
"{ malformed JSON }",
|
||||||
|
"{ \"unknown\": {\"\"} }",
|
||||||
|
"{}"};
|
||||||
|
BytesReference query = new BytesArray(randomFrom(malformedQueryJson));
|
||||||
|
request.addIndex(new String[]{"idx1"}, new String[]{"read"}, null, null, query, randomBoolean());
|
||||||
|
|
||||||
|
final AtomicReference<Throwable> throwableRef = new AtomicReference<>();
|
||||||
|
final AtomicReference<PutRoleResponse> responseRef = new AtomicReference<>();
|
||||||
|
action.doExecute(mock(Task.class), request, new ActionListener<PutRoleResponse>() {
|
||||||
|
@Override
|
||||||
|
public void onResponse(PutRoleResponse response) {
|
||||||
|
responseRef.set(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onFailure(Exception e) {
|
||||||
|
throwableRef.set(e);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
assertThat(responseRef.get(), is(nullValue()));
|
||||||
|
assertThat(throwableRef.get(), is(notNullValue()));
|
||||||
|
Throwable t = throwableRef.get();
|
||||||
|
assertThat(t, instanceOf(ElasticsearchParseException.class));
|
||||||
|
assertThat(t.getMessage(), containsString("failed to parse field 'query' for indices [" +
|
||||||
|
Strings.arrayToCommaDelimitedString(new String[]{"idx1"}) +
|
||||||
|
"] at index privilege [0] of role descriptor"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testCreationOfRoleWithUnsupportedQueryFails() throws Exception {
|
||||||
|
NativeRolesStore rolesStore = mock(NativeRolesStore.class);
|
||||||
|
TransportService transportService = new TransportService(Settings.EMPTY, mock(Transport.class), null,
|
||||||
|
TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet());
|
||||||
|
TransportPutRoleAction action = new TransportPutRoleAction(mock(ActionFilters.class), rolesStore, transportService,
|
||||||
|
xContentRegistry());
|
||||||
|
PutRoleRequest request = new PutRoleRequest();
|
||||||
|
request.name("test");
|
||||||
|
String hasChildQuery = "{ \"has_child\": { \"type\": \"child\", \"query\": { \"match_all\": {} } } }";
|
||||||
|
String hasParentQuery = "{ \"has_parent\": { \"parent_type\": \"parent\", \"query\": { \"match_all\": {} } } }";
|
||||||
|
BytesReference query = new BytesArray(randomFrom(hasChildQuery, hasParentQuery));
|
||||||
|
request.addIndex(new String[]{"idx1"}, new String[]{"read"}, null, null, query, randomBoolean());
|
||||||
|
|
||||||
|
final AtomicReference<Throwable> throwableRef = new AtomicReference<>();
|
||||||
|
final AtomicReference<PutRoleResponse> responseRef = new AtomicReference<>();
|
||||||
|
action.doExecute(mock(Task.class), request, new ActionListener<PutRoleResponse>() {
|
||||||
|
@Override
|
||||||
|
public void onResponse(PutRoleResponse response) {
|
||||||
|
responseRef.set(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onFailure(Exception e) {
|
||||||
|
throwableRef.set(e);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
assertThat(responseRef.get(), is(nullValue()));
|
||||||
|
assertThat(throwableRef.get(), is(notNullValue()));
|
||||||
|
Throwable t = throwableRef.get();
|
||||||
|
assertThat(t, instanceOf(ElasticsearchParseException.class));
|
||||||
|
assertThat(t.getMessage(), containsString("failed to parse field 'query' for indices [" +
|
||||||
|
Strings.arrayToCommaDelimitedString(new String[]{"idx1"}) +
|
||||||
|
"] at index privilege [0] of role descriptor"));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -116,4 +116,4 @@ public class HasPrivilegesRequestBuilderTests extends ESTestCase {
|
||||||
);
|
);
|
||||||
assertThat(parseException.getMessage(), containsString("[cluster,index,applications] are missing"));
|
assertThat(parseException.getMessage(), containsString("[cluster,index,applications] are missing"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,78 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.xpack.security.action.user;
|
||||||
|
|
||||||
|
import org.elasticsearch.action.support.ActionFilters;
|
||||||
|
import org.elasticsearch.action.support.PlainActionFuture;
|
||||||
|
import org.elasticsearch.common.bytes.BytesArray;
|
||||||
|
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||||
|
import org.elasticsearch.tasks.Task;
|
||||||
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
import org.elasticsearch.threadpool.TestThreadPool;
|
||||||
|
import org.elasticsearch.threadpool.ThreadPool;
|
||||||
|
import org.elasticsearch.transport.TransportService;
|
||||||
|
import org.elasticsearch.xpack.core.security.SecurityContext;
|
||||||
|
import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesRequest;
|
||||||
|
import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesResponse;
|
||||||
|
import org.elasticsearch.xpack.core.security.authc.Authentication;
|
||||||
|
import org.elasticsearch.xpack.core.security.authc.AuthenticationField;
|
||||||
|
import org.elasticsearch.xpack.core.security.authz.RoleDescriptor;
|
||||||
|
import org.elasticsearch.xpack.core.security.user.User;
|
||||||
|
import org.elasticsearch.xpack.security.authz.AuthorizationService;
|
||||||
|
import org.elasticsearch.xpack.security.authz.store.NativePrivilegeStore;
|
||||||
|
import org.junit.After;
|
||||||
|
import org.junit.Before;
|
||||||
|
|
||||||
|
import static org.hamcrest.Matchers.containsString;
|
||||||
|
import static org.hamcrest.Matchers.notNullValue;
|
||||||
|
import static org.mockito.Mockito.mock;
|
||||||
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
|
public class TransportHasPrivilegesActionTests extends ESTestCase {
|
||||||
|
private ThreadPool threadPool;
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void createThreadPool() {
|
||||||
|
threadPool = new TestThreadPool("has privileges action tests");
|
||||||
|
}
|
||||||
|
|
||||||
|
@After
|
||||||
|
public void stopThreadPool() {
|
||||||
|
terminate(threadPool);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testHasPrivilegesRequestDoesNotAllowDLSRoleQueryBasedIndicesPrivileges() {
|
||||||
|
final ThreadContext threadContext = threadPool.getThreadContext();
|
||||||
|
final SecurityContext context = mock(SecurityContext.class);
|
||||||
|
final User user = new User("user-1", "superuser");
|
||||||
|
final Authentication authentication = new Authentication(user,
|
||||||
|
new Authentication.RealmRef("native", "default_native", "node1"), null);
|
||||||
|
when(context.getAuthentication()).thenReturn(authentication);
|
||||||
|
threadContext.putTransient(AuthenticationField.AUTHENTICATION_KEY, authentication);
|
||||||
|
final TransportHasPrivilegesAction transportHasPrivilegesAction = new TransportHasPrivilegesAction(threadPool,
|
||||||
|
mock(TransportService.class), mock(ActionFilters.class), mock(AuthorizationService.class), mock(NativePrivilegeStore.class),
|
||||||
|
context, xContentRegistry());
|
||||||
|
|
||||||
|
final HasPrivilegesRequest request = new HasPrivilegesRequest();
|
||||||
|
final RoleDescriptor.IndicesPrivileges[] indicesPrivileges = new RoleDescriptor.IndicesPrivileges[randomIntBetween(1, 5)];
|
||||||
|
for (int i = 0; i < indicesPrivileges.length; i++) {
|
||||||
|
indicesPrivileges[i] = RoleDescriptor.IndicesPrivileges.builder()
|
||||||
|
.privileges(randomFrom("read", "write"))
|
||||||
|
.indices(randomAlphaOfLengthBetween(2, 8))
|
||||||
|
.query(new BytesArray(randomAlphaOfLength(5)))
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
request.indexPrivileges(indicesPrivileges);
|
||||||
|
request.username("user-1");
|
||||||
|
|
||||||
|
final PlainActionFuture<HasPrivilegesResponse> listener = new PlainActionFuture<>();
|
||||||
|
transportHasPrivilegesAction.doExecute(mock(Task.class), request, listener);
|
||||||
|
|
||||||
|
final IllegalArgumentException ile = expectThrows(IllegalArgumentException.class, () -> listener.actionGet());
|
||||||
|
assertThat(ile, notNullValue());
|
||||||
|
assertThat(ile.getMessage(), containsString("users may only check the index privileges without any DLS role query"));
|
||||||
|
}
|
||||||
|
}
|
|
@ -131,7 +131,7 @@ public class ESNativeMigrateToolTests extends NativeRealmIntegTestCase {
|
||||||
.cluster("all", "none")
|
.cluster("all", "none")
|
||||||
.runAs("root", "nobody")
|
.runAs("root", "nobody")
|
||||||
.addIndices(new String[] { "index" }, new String[] { "read" }, new String[] { "body", "title" }, null,
|
.addIndices(new String[] { "index" }, new String[] { "read" }, new String[] { "body", "title" }, null,
|
||||||
new BytesArray("{\"query\": {\"match_all\": {}}}"), randomBoolean())
|
new BytesArray("{\"match_all\": {}}"), randomBoolean())
|
||||||
.get();
|
.get();
|
||||||
addedRoles.add(rname);
|
addedRoles.add(rname);
|
||||||
}
|
}
|
||||||
|
|
|
@ -198,7 +198,7 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase {
|
||||||
.cluster("all", "none")
|
.cluster("all", "none")
|
||||||
.runAs("root", "nobody")
|
.runAs("root", "nobody")
|
||||||
.addIndices(new String[]{"index"}, new String[]{"read"}, new String[]{"body", "title"}, null,
|
.addIndices(new String[]{"index"}, new String[]{"read"}, new String[]{"body", "title"}, null,
|
||||||
new BytesArray("{\"query\": {\"match_all\": {}}}"), randomBoolean())
|
new BytesArray("{\"match_all\": {}}"), randomBoolean())
|
||||||
.metadata(metadata)
|
.metadata(metadata)
|
||||||
.get();
|
.get();
|
||||||
logger.error("--> waiting for .security index");
|
logger.error("--> waiting for .security index");
|
||||||
|
@ -215,13 +215,13 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase {
|
||||||
.cluster("all", "none")
|
.cluster("all", "none")
|
||||||
.runAs("root", "nobody")
|
.runAs("root", "nobody")
|
||||||
.addIndices(new String[]{"index"}, new String[]{"read"}, new String[]{"body", "title"}, null,
|
.addIndices(new String[]{"index"}, new String[]{"read"}, new String[]{"body", "title"}, null,
|
||||||
new BytesArray("{\"query\": {\"match_all\": {}}}"), randomBoolean())
|
new BytesArray("{\"match_all\": {}}"), randomBoolean())
|
||||||
.get();
|
.get();
|
||||||
c.preparePutRole("test_role3")
|
c.preparePutRole("test_role3")
|
||||||
.cluster("all", "none")
|
.cluster("all", "none")
|
||||||
.runAs("root", "nobody")
|
.runAs("root", "nobody")
|
||||||
.addIndices(new String[]{"index"}, new String[]{"read"}, new String[]{"body", "title"}, null,
|
.addIndices(new String[]{"index"}, new String[]{"read"}, new String[]{"body", "title"}, null,
|
||||||
new BytesArray("{\"query\": {\"match_all\": {}}}"), randomBoolean())
|
new BytesArray("{\"match_all\": {}}"), randomBoolean())
|
||||||
.get();
|
.get();
|
||||||
|
|
||||||
logger.info("--> retrieving all roles");
|
logger.info("--> retrieving all roles");
|
||||||
|
|
|
@ -61,7 +61,7 @@ public class RoleDescriptorTests extends ESTestCase {
|
||||||
.indices("i1", "i2")
|
.indices("i1", "i2")
|
||||||
.privileges("read")
|
.privileges("read")
|
||||||
.grantedFields("body", "title")
|
.grantedFields("body", "title")
|
||||||
.query("{\"query\": {\"match_all\": {}}}")
|
.query("{\"match_all\": {}}")
|
||||||
.build()
|
.build()
|
||||||
};
|
};
|
||||||
final RoleDescriptor.ApplicationResourcePrivileges[] applicationPrivileges = {
|
final RoleDescriptor.ApplicationResourcePrivileges[] applicationPrivileges = {
|
||||||
|
@ -82,7 +82,7 @@ public class RoleDescriptorTests extends ESTestCase {
|
||||||
assertThat(descriptor.toString(), is("Role[name=test, cluster=[all,none]" +
|
assertThat(descriptor.toString(), is("Role[name=test, cluster=[all,none]" +
|
||||||
", global=[{APPLICATION:manage:applications=app01,app02}]" +
|
", global=[{APPLICATION:manage:applications=app01,app02}]" +
|
||||||
", indicesPrivileges=[IndicesPrivileges[indices=[i1,i2], allowRestrictedIndices=[false], privileges=[read]" +
|
", indicesPrivileges=[IndicesPrivileges[indices=[i1,i2], allowRestrictedIndices=[false], privileges=[read]" +
|
||||||
", field_security=[grant=[body,title], except=null], query={\"query\": {\"match_all\": {}}}],]" +
|
", field_security=[grant=[body,title], except=null], query={\"match_all\": {}}],]" +
|
||||||
", applicationPrivileges=[ApplicationResourcePrivileges[application=my_app, privileges=[read,write], resources=[*]],]" +
|
", applicationPrivileges=[ApplicationResourcePrivileges[application=my_app, privileges=[read,write], resources=[*]],]" +
|
||||||
", runAs=[sudo], metadata=[{}]]"));
|
", runAs=[sudo], metadata=[{}]]"));
|
||||||
}
|
}
|
||||||
|
@ -94,7 +94,7 @@ public class RoleDescriptorTests extends ESTestCase {
|
||||||
.privileges("read")
|
.privileges("read")
|
||||||
.grantedFields("body", "title")
|
.grantedFields("body", "title")
|
||||||
.allowRestrictedIndices(randomBoolean())
|
.allowRestrictedIndices(randomBoolean())
|
||||||
.query("{\"query\": {\"match_all\": {}}}")
|
.query("{\"match_all\": {}}")
|
||||||
.build()
|
.build()
|
||||||
};
|
};
|
||||||
final RoleDescriptor.ApplicationResourcePrivileges[] applicationPrivileges = {
|
final RoleDescriptor.ApplicationResourcePrivileges[] applicationPrivileges = {
|
||||||
|
@ -136,7 +136,7 @@ public class RoleDescriptorTests extends ESTestCase {
|
||||||
"\"p2\"]}, {\"names\": \"idx2\", \"allow_restricted_indices\": true, \"privileges\": [\"p3\"], \"field_security\": " +
|
"\"p2\"]}, {\"names\": \"idx2\", \"allow_restricted_indices\": true, \"privileges\": [\"p3\"], \"field_security\": " +
|
||||||
"{\"grant\": [\"f1\", \"f2\"]}}, {\"names\": " +
|
"{\"grant\": [\"f1\", \"f2\"]}}, {\"names\": " +
|
||||||
"\"idx2\", \"allow_restricted_indices\": false," +
|
"\"idx2\", \"allow_restricted_indices\": false," +
|
||||||
"\"privileges\": [\"p3\"], \"field_security\": {\"grant\": [\"f1\", \"f2\"]}, \"query\": \"{\\\"match_all\\\": {}}\"}]}";
|
"\"privileges\": [\"p3\"], \"field_security\": {\"grant\": [\"f1\", \"f2\"]}, \"query\": {\"match_all\": {}} }]}";
|
||||||
rd = RoleDescriptor.parse("test", new BytesArray(q), false, XContentType.JSON);
|
rd = RoleDescriptor.parse("test", new BytesArray(q), false, XContentType.JSON);
|
||||||
assertEquals("test", rd.getName());
|
assertEquals("test", rd.getName());
|
||||||
assertArrayEquals(new String[] { "a", "b" }, rd.getClusterPrivileges());
|
assertArrayEquals(new String[] { "a", "b" }, rd.getClusterPrivileges());
|
||||||
|
@ -261,6 +261,18 @@ public class RoleDescriptorTests extends ESTestCase {
|
||||||
assertNull(rd.getIndicesPrivileges()[0].getQuery());
|
assertNull(rd.getIndicesPrivileges()[0].getQuery());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testParseNullQuery() throws Exception {
|
||||||
|
String json = "{\"cluster\":[\"a\", \"b\"], \"run_as\": [\"m\", \"n\"], \"index\": [{\"names\": [\"idx1\",\"idx2\"], " +
|
||||||
|
"\"privileges\": [\"p1\", \"p2\"], \"query\": null}]}";
|
||||||
|
RoleDescriptor rd = RoleDescriptor.parse("test", new BytesArray(json), false, XContentType.JSON);
|
||||||
|
assertEquals("test", rd.getName());
|
||||||
|
assertArrayEquals(new String[] { "a", "b" }, rd.getClusterPrivileges());
|
||||||
|
assertEquals(1, rd.getIndicesPrivileges().length);
|
||||||
|
assertArrayEquals(new String[] { "idx1", "idx2" }, rd.getIndicesPrivileges()[0].getIndices());
|
||||||
|
assertArrayEquals(new String[] { "m", "n" }, rd.getRunAs());
|
||||||
|
assertNull(rd.getIndicesPrivileges()[0].getQuery());
|
||||||
|
}
|
||||||
|
|
||||||
public void testParseEmptyQueryUsingDeprecatedIndicesField() throws Exception {
|
public void testParseEmptyQueryUsingDeprecatedIndicesField() throws Exception {
|
||||||
String json = "{\"cluster\":[\"a\", \"b\"], \"run_as\": [\"m\", \"n\"], \"indices\": [{\"names\": [\"idx1\",\"idx2\"], " +
|
String json = "{\"cluster\":[\"a\", \"b\"], \"run_as\": [\"m\", \"n\"], \"indices\": [{\"names\": [\"idx1\",\"idx2\"], " +
|
||||||
"\"privileges\": [\"p1\", \"p2\"], \"query\": \"\"}]}";
|
"\"privileges\": [\"p1\", \"p2\"], \"query\": \"\"}]}";
|
||||||
|
@ -283,4 +295,5 @@ public class RoleDescriptorTests extends ESTestCase {
|
||||||
assertEquals(1, parsed.getTransientMetadata().size());
|
assertEquals(1, parsed.getTransientMetadata().size());
|
||||||
assertEquals(true, parsed.getTransientMetadata().get("enabled"));
|
assertEquals(true, parsed.getTransientMetadata().get("enabled"));
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,9 +9,13 @@ import org.apache.logging.log4j.Level;
|
||||||
import org.apache.logging.log4j.Logger;
|
import org.apache.logging.log4j.Logger;
|
||||||
import org.apache.lucene.util.automaton.MinimizationOperations;
|
import org.apache.lucene.util.automaton.MinimizationOperations;
|
||||||
import org.apache.lucene.util.automaton.Operations;
|
import org.apache.lucene.util.automaton.Operations;
|
||||||
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||||
import org.elasticsearch.env.Environment;
|
import org.elasticsearch.env.Environment;
|
||||||
import org.elasticsearch.env.TestEnvironment;
|
import org.elasticsearch.env.TestEnvironment;
|
||||||
|
import org.elasticsearch.index.query.MatchAllQueryBuilder;
|
||||||
|
import org.elasticsearch.index.query.QueryBuilder;
|
||||||
import org.elasticsearch.license.XPackLicenseState;
|
import org.elasticsearch.license.XPackLicenseState;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
import org.elasticsearch.threadpool.TestThreadPool;
|
import org.elasticsearch.threadpool.TestThreadPool;
|
||||||
|
@ -45,6 +49,7 @@ import java.util.Set;
|
||||||
import java.util.concurrent.CountDownLatch;
|
import java.util.concurrent.CountDownLatch;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
|
|
||||||
|
import static java.util.Collections.singletonList;
|
||||||
import static org.hamcrest.Matchers.arrayContaining;
|
import static org.hamcrest.Matchers.arrayContaining;
|
||||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||||
import static org.hamcrest.Matchers.containsString;
|
import static org.hamcrest.Matchers.containsString;
|
||||||
|
@ -61,11 +66,17 @@ import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
public class FileRolesStoreTests extends ESTestCase {
|
public class FileRolesStoreTests extends ESTestCase {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected NamedXContentRegistry xContentRegistry() {
|
||||||
|
return new NamedXContentRegistry(singletonList(new NamedXContentRegistry.Entry(QueryBuilder.class,
|
||||||
|
new ParseField(MatchAllQueryBuilder.NAME), (p, c) -> MatchAllQueryBuilder.fromXContent(p))));
|
||||||
|
}
|
||||||
|
|
||||||
public void testParseFile() throws Exception {
|
public void testParseFile() throws Exception {
|
||||||
Path path = getDataPath("roles.yml");
|
Path path = getDataPath("roles.yml");
|
||||||
Map<String, RoleDescriptor> roles = FileRolesStore.parseFile(path, logger, Settings.builder()
|
Map<String, RoleDescriptor> roles = FileRolesStore.parseFile(path, logger, Settings.builder()
|
||||||
.put(XPackSettings.DLS_FLS_ENABLED.getKey(), true)
|
.put(XPackSettings.DLS_FLS_ENABLED.getKey(), true)
|
||||||
.build(), new XPackLicenseState(Settings.EMPTY));
|
.build(), new XPackLicenseState(Settings.EMPTY), xContentRegistry());
|
||||||
assertThat(roles, notNullValue());
|
assertThat(roles, notNullValue());
|
||||||
assertThat(roles.size(), is(9));
|
assertThat(roles.size(), is(9));
|
||||||
|
|
||||||
|
@ -235,6 +246,8 @@ public class FileRolesStoreTests extends ESTestCase {
|
||||||
assertTrue(group.getFieldPermissions().grantsAccessTo("boo"));
|
assertTrue(group.getFieldPermissions().grantsAccessTo("boo"));
|
||||||
assertTrue(group.getFieldPermissions().hasFieldLevelSecurity());
|
assertTrue(group.getFieldPermissions().hasFieldLevelSecurity());
|
||||||
assertThat(group.getQuery(), notNullValue());
|
assertThat(group.getQuery(), notNullValue());
|
||||||
|
|
||||||
|
assertThat(roles.get("role_query_invalid"), nullValue());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testParseFileWithFLSAndDLSDisabled() throws Exception {
|
public void testParseFileWithFLSAndDLSDisabled() throws Exception {
|
||||||
|
@ -244,14 +257,15 @@ public class FileRolesStoreTests extends ESTestCase {
|
||||||
events.clear();
|
events.clear();
|
||||||
Map<String, RoleDescriptor> roles = FileRolesStore.parseFile(path, logger, Settings.builder()
|
Map<String, RoleDescriptor> roles = FileRolesStore.parseFile(path, logger, Settings.builder()
|
||||||
.put(XPackSettings.DLS_FLS_ENABLED.getKey(), false)
|
.put(XPackSettings.DLS_FLS_ENABLED.getKey(), false)
|
||||||
.build(), new XPackLicenseState(Settings.EMPTY));
|
.build(), new XPackLicenseState(Settings.EMPTY), xContentRegistry());
|
||||||
assertThat(roles, notNullValue());
|
assertThat(roles, notNullValue());
|
||||||
assertThat(roles.size(), is(6));
|
assertThat(roles.size(), is(6));
|
||||||
assertThat(roles.get("role_fields"), nullValue());
|
assertThat(roles.get("role_fields"), nullValue());
|
||||||
assertThat(roles.get("role_query"), nullValue());
|
assertThat(roles.get("role_query"), nullValue());
|
||||||
assertThat(roles.get("role_query_fields"), nullValue());
|
assertThat(roles.get("role_query_fields"), nullValue());
|
||||||
|
assertThat(roles.get("role_query_invalid"), nullValue());
|
||||||
|
|
||||||
assertThat(events, hasSize(3));
|
assertThat(events, hasSize(4));
|
||||||
assertThat(
|
assertThat(
|
||||||
events.get(0),
|
events.get(0),
|
||||||
startsWith("invalid role definition [role_fields] in roles file [" + path.toAbsolutePath() +
|
startsWith("invalid role definition [role_fields] in roles file [" + path.toAbsolutePath() +
|
||||||
|
@ -262,6 +276,9 @@ public class FileRolesStoreTests extends ESTestCase {
|
||||||
assertThat(events.get(2),
|
assertThat(events.get(2),
|
||||||
startsWith("invalid role definition [role_query_fields] in roles file [" + path.toAbsolutePath() +
|
startsWith("invalid role definition [role_query_fields] in roles file [" + path.toAbsolutePath() +
|
||||||
"]. document and field level security is not enabled."));
|
"]. document and field level security is not enabled."));
|
||||||
|
assertThat(events.get(3),
|
||||||
|
startsWith("invalid role definition [role_query_invalid] in roles file [" + path.toAbsolutePath() +
|
||||||
|
"]. document and field level security is not enabled."));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testParseFileWithFLSAndDLSUnlicensed() throws Exception {
|
public void testParseFileWithFLSAndDLSUnlicensed() throws Exception {
|
||||||
|
@ -271,7 +288,7 @@ public class FileRolesStoreTests extends ESTestCase {
|
||||||
events.clear();
|
events.clear();
|
||||||
XPackLicenseState licenseState = mock(XPackLicenseState.class);
|
XPackLicenseState licenseState = mock(XPackLicenseState.class);
|
||||||
when(licenseState.isDocumentAndFieldLevelSecurityAllowed()).thenReturn(false);
|
when(licenseState.isDocumentAndFieldLevelSecurityAllowed()).thenReturn(false);
|
||||||
Map<String, RoleDescriptor> roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY, licenseState);
|
Map<String, RoleDescriptor> roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY, licenseState, xContentRegistry());
|
||||||
assertThat(roles, notNullValue());
|
assertThat(roles, notNullValue());
|
||||||
assertThat(roles.size(), is(9));
|
assertThat(roles.size(), is(9));
|
||||||
assertNotNull(roles.get("role_fields"));
|
assertNotNull(roles.get("role_fields"));
|
||||||
|
@ -295,7 +312,8 @@ public class FileRolesStoreTests extends ESTestCase {
|
||||||
public void testDefaultRolesFile() throws Exception {
|
public void testDefaultRolesFile() throws Exception {
|
||||||
// TODO we should add the config dir to the resources so we don't copy this stuff around...
|
// TODO we should add the config dir to the resources so we don't copy this stuff around...
|
||||||
Path path = getDataPath("default_roles.yml");
|
Path path = getDataPath("default_roles.yml");
|
||||||
Map<String, RoleDescriptor> roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY, new XPackLicenseState(Settings.EMPTY));
|
Map<String, RoleDescriptor> roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY, new XPackLicenseState(Settings.EMPTY),
|
||||||
|
xContentRegistry());
|
||||||
assertThat(roles, notNullValue());
|
assertThat(roles, notNullValue());
|
||||||
assertThat(roles.size(), is(0));
|
assertThat(roles.size(), is(0));
|
||||||
}
|
}
|
||||||
|
@ -325,7 +343,7 @@ public class FileRolesStoreTests extends ESTestCase {
|
||||||
FileRolesStore store = new FileRolesStore(settings, env, watcherService, roleSet -> {
|
FileRolesStore store = new FileRolesStore(settings, env, watcherService, roleSet -> {
|
||||||
modifiedRoles.addAll(roleSet);
|
modifiedRoles.addAll(roleSet);
|
||||||
latch.countDown();
|
latch.countDown();
|
||||||
}, new XPackLicenseState(Settings.EMPTY));
|
}, new XPackLicenseState(Settings.EMPTY), xContentRegistry());
|
||||||
|
|
||||||
Set<RoleDescriptor> descriptors = store.roleDescriptors(Collections.singleton("role1"));
|
Set<RoleDescriptor> descriptors = store.roleDescriptors(Collections.singleton("role1"));
|
||||||
assertThat(descriptors, notNullValue());
|
assertThat(descriptors, notNullValue());
|
||||||
|
@ -368,7 +386,7 @@ public class FileRolesStoreTests extends ESTestCase {
|
||||||
store = new FileRolesStore(settings, env, watcherService, roleSet -> {
|
store = new FileRolesStore(settings, env, watcherService, roleSet -> {
|
||||||
truncatedFileRolesModified.addAll(roleSet);
|
truncatedFileRolesModified.addAll(roleSet);
|
||||||
truncateLatch.countDown();
|
truncateLatch.countDown();
|
||||||
}, new XPackLicenseState(Settings.EMPTY));
|
}, new XPackLicenseState(Settings.EMPTY), xContentRegistry());
|
||||||
|
|
||||||
final Set<String> allRolesPreTruncate = store.getAllRoleNames();
|
final Set<String> allRolesPreTruncate = store.getAllRoleNames();
|
||||||
try (BufferedWriter writer = Files.newBufferedWriter(tmp, StandardCharsets.UTF_8, StandardOpenOption.TRUNCATE_EXISTING)) {
|
try (BufferedWriter writer = Files.newBufferedWriter(tmp, StandardCharsets.UTF_8, StandardOpenOption.TRUNCATE_EXISTING)) {
|
||||||
|
@ -391,7 +409,7 @@ public class FileRolesStoreTests extends ESTestCase {
|
||||||
store = new FileRolesStore(settings, env, watcherService, roleSet -> {
|
store = new FileRolesStore(settings, env, watcherService, roleSet -> {
|
||||||
modifiedFileRolesModified.addAll(roleSet);
|
modifiedFileRolesModified.addAll(roleSet);
|
||||||
modifyLatch.countDown();
|
modifyLatch.countDown();
|
||||||
}, new XPackLicenseState(Settings.EMPTY));
|
}, new XPackLicenseState(Settings.EMPTY), xContentRegistry());
|
||||||
|
|
||||||
try (BufferedWriter writer = Files.newBufferedWriter(tmp, StandardCharsets.UTF_8, StandardOpenOption.TRUNCATE_EXISTING)) {
|
try (BufferedWriter writer = Files.newBufferedWriter(tmp, StandardCharsets.UTF_8, StandardOpenOption.TRUNCATE_EXISTING)) {
|
||||||
writer.append("role5:").append(System.lineSeparator());
|
writer.append("role5:").append(System.lineSeparator());
|
||||||
|
@ -416,7 +434,8 @@ public class FileRolesStoreTests extends ESTestCase {
|
||||||
public void testThatEmptyFileDoesNotResultInLoop() throws Exception {
|
public void testThatEmptyFileDoesNotResultInLoop() throws Exception {
|
||||||
Path file = createTempFile();
|
Path file = createTempFile();
|
||||||
Files.write(file, Collections.singletonList("#"), StandardCharsets.UTF_8);
|
Files.write(file, Collections.singletonList("#"), StandardCharsets.UTF_8);
|
||||||
Map<String, RoleDescriptor> roles = FileRolesStore.parseFile(file, logger, Settings.EMPTY, new XPackLicenseState(Settings.EMPTY));
|
Map<String, RoleDescriptor> roles = FileRolesStore.parseFile(file, logger, Settings.EMPTY, new XPackLicenseState(Settings.EMPTY),
|
||||||
|
xContentRegistry());
|
||||||
assertThat(roles.keySet(), is(empty()));
|
assertThat(roles.keySet(), is(empty()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -425,7 +444,8 @@ public class FileRolesStoreTests extends ESTestCase {
|
||||||
Logger logger = CapturingLogger.newCapturingLogger(Level.ERROR, null);
|
Logger logger = CapturingLogger.newCapturingLogger(Level.ERROR, null);
|
||||||
List<String> entries = CapturingLogger.output(logger.getName(), Level.ERROR);
|
List<String> entries = CapturingLogger.output(logger.getName(), Level.ERROR);
|
||||||
entries.clear();
|
entries.clear();
|
||||||
Map<String, RoleDescriptor> roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY, new XPackLicenseState(Settings.EMPTY));
|
Map<String, RoleDescriptor> roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY, new XPackLicenseState(Settings.EMPTY),
|
||||||
|
xContentRegistry());
|
||||||
assertThat(roles.size(), is(1));
|
assertThat(roles.size(), is(1));
|
||||||
assertThat(roles, hasKey("valid_role"));
|
assertThat(roles, hasKey("valid_role"));
|
||||||
RoleDescriptor descriptor = roles.get("valid_role");
|
RoleDescriptor descriptor = roles.get("valid_role");
|
||||||
|
@ -467,7 +487,8 @@ public class FileRolesStoreTests extends ESTestCase {
|
||||||
List<String> events = CapturingLogger.output(logger.getName(), Level.ERROR);
|
List<String> events = CapturingLogger.output(logger.getName(), Level.ERROR);
|
||||||
events.clear();
|
events.clear();
|
||||||
Path path = getDataPath("reserved_roles.yml");
|
Path path = getDataPath("reserved_roles.yml");
|
||||||
Map<String, RoleDescriptor> roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY, new XPackLicenseState(Settings.EMPTY));
|
Map<String, RoleDescriptor> roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY, new XPackLicenseState(Settings.EMPTY),
|
||||||
|
xContentRegistry());
|
||||||
assertThat(roles, notNullValue());
|
assertThat(roles, notNullValue());
|
||||||
assertThat(roles.size(), is(1));
|
assertThat(roles.size(), is(1));
|
||||||
|
|
||||||
|
@ -498,7 +519,8 @@ public class FileRolesStoreTests extends ESTestCase {
|
||||||
.put(XPackSettings.DLS_FLS_ENABLED.getKey(), flsDlsEnabled)
|
.put(XPackSettings.DLS_FLS_ENABLED.getKey(), flsDlsEnabled)
|
||||||
.build();
|
.build();
|
||||||
Environment env = TestEnvironment.newEnvironment(settings);
|
Environment env = TestEnvironment.newEnvironment(settings);
|
||||||
FileRolesStore store = new FileRolesStore(settings, env, mock(ResourceWatcherService.class), new XPackLicenseState(Settings.EMPTY));
|
FileRolesStore store = new FileRolesStore(settings, env, mock(ResourceWatcherService.class), new XPackLicenseState(Settings.EMPTY),
|
||||||
|
xContentRegistry());
|
||||||
|
|
||||||
Map<String, Object> usageStats = store.usageStats();
|
Map<String, Object> usageStats = store.usageStats();
|
||||||
|
|
||||||
|
@ -512,9 +534,10 @@ public class FileRolesStoreTests extends ESTestCase {
|
||||||
Path path = getDataPath("roles2xformat.yml");
|
Path path = getDataPath("roles2xformat.yml");
|
||||||
byte[] bytes = Files.readAllBytes(path);
|
byte[] bytes = Files.readAllBytes(path);
|
||||||
String roleString = new String(bytes, Charset.defaultCharset());
|
String roleString = new String(bytes, Charset.defaultCharset());
|
||||||
RoleDescriptor role = FileRolesStore.parseRoleDescriptor(roleString, path, logger, true, Settings.EMPTY);
|
RoleDescriptor role = FileRolesStore.parseRoleDescriptor(roleString, path, logger, true, Settings.EMPTY, xContentRegistry());
|
||||||
RoleDescriptor.IndicesPrivileges indicesPrivileges = role.getIndicesPrivileges()[0];
|
RoleDescriptor.IndicesPrivileges indicesPrivileges = role.getIndicesPrivileges()[0];
|
||||||
assertThat(indicesPrivileges.getGrantedFields(), arrayContaining("foo", "boo"));
|
assertThat(indicesPrivileges.getGrantedFields(), arrayContaining("foo", "boo"));
|
||||||
assertNull(indicesPrivileges.getDeniedFields());
|
assertNull(indicesPrivileges.getDeniedFields());
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -65,8 +65,16 @@ role_query_fields:
|
||||||
privileges:
|
privileges:
|
||||||
- READ
|
- READ
|
||||||
query:
|
query:
|
||||||
match_all:
|
match_all: {}
|
||||||
field_security:
|
field_security:
|
||||||
grant:
|
grant:
|
||||||
- foo
|
- foo
|
||||||
- boo
|
- boo
|
||||||
|
|
||||||
|
role_query_invalid:
|
||||||
|
indices:
|
||||||
|
- names:
|
||||||
|
- 'query_idx'
|
||||||
|
privileges:
|
||||||
|
- READ
|
||||||
|
query: '{ "unknown": {} }'
|
|
@ -5,7 +5,7 @@ role1:
|
||||||
privileges:
|
privileges:
|
||||||
- READ
|
- READ
|
||||||
query:
|
query:
|
||||||
match_all:
|
match_all: {}
|
||||||
fields:
|
fields:
|
||||||
- foo
|
- foo
|
||||||
- boo
|
- boo
|
|
@ -7,7 +7,11 @@ setup:
|
||||||
cluster.health:
|
cluster.health:
|
||||||
wait_for_status: yellow
|
wait_for_status: yellow
|
||||||
|
|
||||||
|
---
|
||||||
|
"Test use prohibited query inside role query":
|
||||||
|
|
||||||
- do:
|
- do:
|
||||||
|
catch: /terms query with terms lookup isn't supported as part of a role query/
|
||||||
security.put_role:
|
security.put_role:
|
||||||
name: "role"
|
name: "role"
|
||||||
body: >
|
body: >
|
||||||
|
@ -24,49 +28,3 @@ setup:
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
- do:
|
|
||||||
security.put_user:
|
|
||||||
username: "joe"
|
|
||||||
body: >
|
|
||||||
{
|
|
||||||
"password": "x-pack-test-password",
|
|
||||||
"roles" : [ "role" ]
|
|
||||||
}
|
|
||||||
|
|
||||||
---
|
|
||||||
teardown:
|
|
||||||
- do:
|
|
||||||
security.delete_user:
|
|
||||||
username: "joe"
|
|
||||||
ignore: 404
|
|
||||||
- do:
|
|
||||||
security.delete_role:
|
|
||||||
name: "role"
|
|
||||||
ignore: 404
|
|
||||||
|
|
||||||
|
|
||||||
---
|
|
||||||
"Test use prohibited query inside role query":
|
|
||||||
|
|
||||||
- do:
|
|
||||||
headers:
|
|
||||||
Authorization: "Basic am9lOngtcGFjay10ZXN0LXBhc3N3b3Jk"
|
|
||||||
index:
|
|
||||||
index: index
|
|
||||||
type: type
|
|
||||||
id: 1
|
|
||||||
body: >
|
|
||||||
{
|
|
||||||
"foo": "bar"
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
- do:
|
|
||||||
catch: /terms query with terms lookup isn't supported as part of a role query/
|
|
||||||
headers:
|
|
||||||
Authorization: "Basic am9lOngtcGFjay10ZXN0LXBhc3N3b3Jk"
|
|
||||||
search:
|
|
||||||
rest_total_hits_as_int: true
|
|
||||||
index: index
|
|
||||||
body: { "query" : { "match_all" : {} } }
|
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue