Changed how privileges and permissions work
- changed Privileges class (moved away from enum and use constants instead), now implies(...) can only work on same type privilege - introduces Permissions.Global (replacing the generic Compound) - Fixed FileRolesStore to use new privileges & permsissions - Changed the file format for FileRolesStore. Now instead of the `indices::privileges` array, the indices permissions are defined as an object where the indices are the keys and the privileges are the values. This moves the configuration to pure yaml format. - Added an option on the authorization service to extract all indices & aliases permitted for a specific user+privilege Original commit: elastic/x-pack-elasticsearch@77afce767c
This commit is contained in:
parent
ed959d684a
commit
7ba3c3594e
|
@ -44,6 +44,7 @@ public class InternalAuthenticationService extends AbstractComponent implements
|
|||
* request
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public User authenticate(String action, TransportMessage<?> message) throws AuthenticationException {
|
||||
for (Realm realm : realms) {
|
||||
AuthenticationToken token = realm.token(message);
|
||||
|
|
|
@ -6,6 +6,8 @@
|
|||
package org.elasticsearch.shield.authz;
|
||||
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.shield.authz.store.FileRolesStore;
|
||||
import org.elasticsearch.shield.authz.store.RolesStore;
|
||||
|
||||
/**
|
||||
*
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
*/
|
||||
package org.elasticsearch.shield.authz;
|
||||
|
||||
import org.elasticsearch.common.collect.ImmutableList;
|
||||
import org.elasticsearch.shield.User;
|
||||
import org.elasticsearch.transport.TransportRequest;
|
||||
|
||||
|
@ -13,6 +14,24 @@ import org.elasticsearch.transport.TransportRequest;
|
|||
*/
|
||||
public interface AuthorizationService {
|
||||
|
||||
/**
|
||||
* Returns all indices and aliases the given user is allowed to execute the given action on.
|
||||
*
|
||||
* @param user The user
|
||||
* @param action The action
|
||||
*/
|
||||
ImmutableList<String> authorizedIndicesAndAliases(User user, String action);
|
||||
|
||||
/**
|
||||
* Verifies that the given user can execute the given request (and action). If the user doesn't
|
||||
* have the appropriate privileges for this action/request, an {@link AuthorizationException}
|
||||
* will be thrown.
|
||||
*
|
||||
* @param user The user
|
||||
* @param action The action
|
||||
* @param request The request
|
||||
* @throws AuthorizationException If the given user is no allowed to execute the given request
|
||||
*/
|
||||
void authorize(User user, String action, TransportRequest request) throws AuthorizationException;
|
||||
|
||||
}
|
||||
|
|
|
@ -1,201 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.shield.authz;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.collect.ImmutableMap;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.watcher.FileChangesListener;
|
||||
import org.elasticsearch.watcher.FileWatcher;
|
||||
import org.elasticsearch.watcher.ResourceWatcherService;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class FileRolesStore extends AbstractComponent implements RolesStore {
|
||||
|
||||
private static final Pattern COMMA_DELIM = Pattern.compile("\\s*,\\s*");
|
||||
|
||||
private final Path file;
|
||||
private final FileWatcher watcher;
|
||||
|
||||
private volatile ImmutableMap<String, Permission> roles;
|
||||
|
||||
@Inject
|
||||
public FileRolesStore(Settings settings, Environment env, ResourceWatcherService watcherService) {
|
||||
super(settings);
|
||||
file = resolveFile(componentSettings, env);
|
||||
roles = ImmutableMap.copyOf(parseFile(file, logger));
|
||||
watcher = new FileWatcher(file.getParent().toFile());
|
||||
watcher.addListener(new FileListener());
|
||||
watcherService.add(watcher);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Permission permission(String... roles) {
|
||||
Permission.Compound.Builder builder = Permission.compound();
|
||||
for (int i = 0; i < roles.length; i++) {
|
||||
Permission permissions = this.roles.get(roles[i]);
|
||||
if (permissions != null) {
|
||||
builder.add(permissions);
|
||||
}
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
public static Path resolveFile(Settings settings, Environment env) {
|
||||
String location = settings.get("file.roles");
|
||||
if (location == null) {
|
||||
return env.configFile().toPath().resolve(".roles.yml");
|
||||
}
|
||||
return Paths.get(location);
|
||||
}
|
||||
|
||||
public static Map<String, Permission> parseFile(Path path, @Nullable ESLogger logger) {
|
||||
if (!Files.exists(path)) {
|
||||
return ImmutableMap.of();
|
||||
}
|
||||
|
||||
ImmutableMap.Builder<String, Permission> roles = ImmutableMap.builder();
|
||||
try {
|
||||
byte[] content = Streams.copyToByteArray(path.toFile());
|
||||
XContentParser parser = XContentFactory.xContent(content).createParser(content);
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_OBJECT && currentFieldName != null) {
|
||||
String roleName = currentFieldName;
|
||||
Permission.Compound.Builder builder = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if ("cluster".equals(currentFieldName)) {
|
||||
String[] privs;
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
privs = COMMA_DELIM.split(parser.text().trim());
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
List<String> list = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
if (list == null) {
|
||||
list = new ArrayList<>();
|
||||
}
|
||||
list.add(parser.text());
|
||||
}
|
||||
}
|
||||
privs = list != null ? list.toArray(new String[list.size()]) : Strings.EMPTY_ARRAY;
|
||||
} else {
|
||||
throw new ElasticsearchException("Invalid roles file format [" + path.toAbsolutePath() +
|
||||
"]. [cluster] field value can either be a string or a list of strings, but [" + token + "] was found instead");
|
||||
}
|
||||
if (builder == null) {
|
||||
builder = Permission.compound();
|
||||
}
|
||||
Privilege.Cluster cluster = Privilege.Cluster.resolve(privs);
|
||||
builder.add(Permission.cluster(cluster));
|
||||
} else if ("indices".equals(currentFieldName)) {
|
||||
if (token != XContentParser.Token.START_ARRAY) {
|
||||
|
||||
}
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
Permission.Index index = parseIndicesPermission(parser.text());
|
||||
if (index != null) {
|
||||
if (builder == null) {
|
||||
builder = Permission.compound();
|
||||
}
|
||||
builder.add(index);
|
||||
}
|
||||
} else {
|
||||
throw new ElasticsearchException("Invalid roles file format [" + path.toAbsolutePath() +
|
||||
"]. [indices] field value must be an array of indices-privileges mappings defined as a string" +
|
||||
" in the form <comma-separated list of index name patterns>::<comma-separated list of privileges> , but [" + token + "] was found instead");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
assert roleName != null;
|
||||
if (builder != null) {
|
||||
roles.put(roleName, builder.build());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return roles.build();
|
||||
|
||||
} catch (IOException ioe) {
|
||||
throw new ElasticsearchException("Failed to read roles file [" + path.toAbsolutePath() + "]", ioe);
|
||||
}
|
||||
}
|
||||
|
||||
private static Permission.Index parseIndicesPermission(String spec) {
|
||||
int i = spec.indexOf("::");
|
||||
if (i == 0) {
|
||||
throw new ElasticsearchException("Malformed index privileges entry [" + spec + "]. Missing indices name patterns list");
|
||||
}
|
||||
if (i < 0) {
|
||||
throw new ElasticsearchException("Malformed index privileges entry [" + spec + "]");
|
||||
}
|
||||
if (i == spec.length() - 2) {
|
||||
throw new ElasticsearchException("Malformed index privileges entry [" + spec + "]. Missing privileges list");
|
||||
}
|
||||
if (spec.indexOf("::", i+2) >= 0) {
|
||||
throw new ElasticsearchException("Malformed index privileges entry [" + spec + "]. There should only be a single \"::\" separator");
|
||||
}
|
||||
|
||||
String[] indices = COMMA_DELIM.split(spec.substring(0, i));
|
||||
|
||||
String[] privs = COMMA_DELIM.split(spec.substring(i+2));
|
||||
Privilege.Index index = Privilege.Index.resolve(privs);
|
||||
return Permission.index(index, indices);
|
||||
}
|
||||
|
||||
private class FileListener extends FileChangesListener {
|
||||
@Override
|
||||
public void onFileCreated(File file) {
|
||||
if (file.equals(FileRolesStore.this.file.toFile())) {
|
||||
roles = ImmutableMap.copyOf(parseFile(file.toPath(), logger));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFileDeleted(File file) {
|
||||
if (file.equals(FileRolesStore.this.file.toFile())) {
|
||||
roles = ImmutableMap.of();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFileChanged(File file) {
|
||||
if (file.equals(FileRolesStore.this.file.toFile())) {
|
||||
if (file.equals(FileRolesStore.this.file.toFile())) {
|
||||
roles = ImmutableMap.copyOf(parseFile(file.toPath(), logger));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -7,14 +7,20 @@ package org.elasticsearch.shield.authz;
|
|||
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.common.base.Predicate;
|
||||
import org.elasticsearch.common.base.Predicates;
|
||||
import org.elasticsearch.common.collect.ImmutableList;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.inject.internal.Nullable;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.shield.User;
|
||||
import org.elasticsearch.shield.audit.AuditTrail;
|
||||
import org.elasticsearch.shield.authz.store.RolesStore;
|
||||
import org.elasticsearch.transport.TransportRequest;
|
||||
|
||||
import java.util.Iterator;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
|
@ -33,16 +39,63 @@ public class InternalAuthorizationService extends AbstractComponent implements A
|
|||
}
|
||||
|
||||
@Override
|
||||
public void authorize(User user, String action, TransportRequest request) throws AuthorizationException {
|
||||
Permission permission = rolesStore.permission(user.roles());
|
||||
MetaData metaData = clusterService.state().metaData();
|
||||
if (permission.check(action, request, metaData)) {
|
||||
if (auditTrail != null) {
|
||||
auditTrail.accessGranted(user, action, request);
|
||||
}
|
||||
return;
|
||||
@SuppressWarnings("unchecked")
|
||||
public ImmutableList<String> authorizedIndicesAndAliases(User user, String action) {
|
||||
String[] roles = user.roles();
|
||||
if (roles.length == 0) {
|
||||
return ImmutableList.of();
|
||||
}
|
||||
Predicate[] predicates = new Predicate[roles.length];
|
||||
for (int i = 0; i < roles.length; i++) {
|
||||
Permission.Global global = rolesStore.permission(roles[i]);
|
||||
predicates[i] = global.indices().allowedIndicesMatcher(action);
|
||||
}
|
||||
ImmutableList.Builder<String> indicesAndAliases = ImmutableList.builder();
|
||||
Predicate<String> predicate = Predicates.or(predicates);
|
||||
MetaData metaData = clusterService.state().metaData();
|
||||
for (String index : metaData.concreteAllIndices()) {
|
||||
if (predicate.apply(index)) {
|
||||
indicesAndAliases.add(index);
|
||||
}
|
||||
}
|
||||
for (Iterator<String> iter = metaData.getAliases().keysIt(); iter.hasNext();) {
|
||||
String alias = iter.next();
|
||||
if (predicate.apply(alias)) {
|
||||
indicesAndAliases.add(alias);
|
||||
}
|
||||
}
|
||||
return indicesAndAliases.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void authorize(User user, String action, TransportRequest request) throws AuthorizationException {
|
||||
String[] roles = user.roles();
|
||||
if (roles.length == 0) {
|
||||
deny(user, action, request);
|
||||
}
|
||||
|
||||
MetaData metaData = clusterService.state().metaData();
|
||||
for (String role : roles) {
|
||||
Permission permission = rolesStore.permission(role);
|
||||
if (permission.check(action, request, metaData)) {
|
||||
grant(user, action, request);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
deny(user, action, request);
|
||||
}
|
||||
|
||||
private void deny(User user, String action, TransportRequest request) {
|
||||
if (auditTrail != null) {
|
||||
auditTrail.accessDenied(user, action, request);
|
||||
}
|
||||
auditTrail.accessDenied(user, action, request);
|
||||
throw new AuthorizationException("Action [" + action + "] is unauthorized");
|
||||
}
|
||||
|
||||
private void grant(User user, String action, TransportRequest request) {
|
||||
if (auditTrail != null) {
|
||||
auditTrail.accessGranted(user, action, request);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,160 +5,264 @@
|
|||
*/
|
||||
package org.elasticsearch.shield.authz;
|
||||
|
||||
import org.apache.lucene.util.automaton.Automaton;
|
||||
import org.apache.lucene.util.automaton.RegExp;
|
||||
import org.elasticsearch.action.CompositeIndicesRequest;
|
||||
import org.elasticsearch.action.IndicesRequest;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.common.base.Predicate;
|
||||
import org.elasticsearch.common.collect.ImmutableList;
|
||||
import org.elasticsearch.common.collect.Sets;
|
||||
import org.elasticsearch.shield.support.AutomatonPredicate;
|
||||
import org.elasticsearch.shield.support.Automatons;
|
||||
import org.elasticsearch.transport.TransportRequest;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* Represents a permission in the system. There are 3 types of permissions:
|
||||
*
|
||||
* <ul>
|
||||
* <li>
|
||||
* Cluster - a permission that is based on privileges for cluster wide actions
|
||||
* </li>
|
||||
* <li>
|
||||
* Indices - a permission that is based on privileges for index related actions executed
|
||||
* on specific indices
|
||||
* </li>
|
||||
* <li>
|
||||
* Global - a composite permission that combines a both cluster & indices permissions
|
||||
* </li>
|
||||
* </ul>
|
||||
*/
|
||||
public abstract class Permission {
|
||||
public interface Permission {
|
||||
|
||||
public abstract boolean check(String action, TransportRequest request, MetaData metaData);
|
||||
boolean check(String action, TransportRequest request, MetaData metaData);
|
||||
|
||||
public static Cluster cluster(Privilege.Cluster clusterPrivilege) {
|
||||
return new Cluster(clusterPrivilege.predicate());
|
||||
}
|
||||
public static class Global implements Permission {
|
||||
|
||||
public static Index index(Privilege.Index indexPrivilege, String... indexNamePatterns) {
|
||||
assert indexNamePatterns.length != 0 : "Index permissions must at least be defined on a single index";
|
||||
private final Cluster cluster;
|
||||
private final Indices indices;
|
||||
|
||||
Automaton indices = new RegExp(indexNamePatterns[0]).toAutomaton();
|
||||
for (int i = 1; i < indexNamePatterns.length; i++) {
|
||||
indices.union(new RegExp(indexNamePatterns[i]).toAutomaton());
|
||||
}
|
||||
return new Index(new AutomatonPredicate(indices), indexPrivilege.predicate());
|
||||
}
|
||||
|
||||
public static Compound.Builder compound() {
|
||||
return new Compound.Builder();
|
||||
}
|
||||
|
||||
public static class Index extends Permission {
|
||||
|
||||
private final Predicate<String> indicesMatcher;
|
||||
private final Predicate<String> actionMatcher;
|
||||
|
||||
private Index(Predicate<String> indicesMatcher, Predicate<String> actionMatcher) {
|
||||
this.indicesMatcher = indicesMatcher;
|
||||
this.actionMatcher = actionMatcher;
|
||||
Global() {
|
||||
this(null, null);
|
||||
}
|
||||
|
||||
Global(Cluster cluster, Indices indices) {
|
||||
this.cluster = cluster;
|
||||
this.indices = indices;
|
||||
}
|
||||
|
||||
public Cluster cluster() {
|
||||
return cluster;
|
||||
}
|
||||
|
||||
public Indices indices() {
|
||||
return indices;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean check(String action, TransportRequest request, MetaData metaData) {
|
||||
if (!actionMatcher.apply(action)) {
|
||||
if (cluster != null && cluster.check(action, request, metaData)) {
|
||||
return true;
|
||||
}
|
||||
if (indices != null && indices.check(action, request, metaData)) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public static Builder builder() {
|
||||
return new Builder();
|
||||
}
|
||||
|
||||
public static class Builder {
|
||||
|
||||
private Cluster cluster = Cluster.NONE;
|
||||
private ImmutableList.Builder<Indices.Group> groups;
|
||||
|
||||
private Builder() {
|
||||
}
|
||||
|
||||
public Builder set(Privilege.Cluster privilege) {
|
||||
cluster = new Cluster(privilege);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder add(Privilege.Index privilege, String... indices) {
|
||||
if (groups == null) {
|
||||
groups = ImmutableList.builder();
|
||||
}
|
||||
groups.add(new Indices.Group(privilege, indices));
|
||||
return this;
|
||||
}
|
||||
|
||||
public Global build() {
|
||||
Indices indices = groups != null ? new Indices(groups.build()) : Indices.NONE;
|
||||
return new Global(cluster, indices);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static class Cluster implements Permission {
|
||||
|
||||
public static final Cluster NONE = new Cluster(Privilege.Cluster.NONE) {
|
||||
@Override
|
||||
public boolean check(String action, TransportRequest request, MetaData metaData) {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
boolean isIndicesRequest = request instanceof CompositeIndicesRequest || request instanceof IndicesRequest;
|
||||
private final Privilege.Cluster privilege;
|
||||
private final Predicate<String> predicate;
|
||||
|
||||
assert isIndicesRequest : "the only requests passing the action matcher should be IndicesRequests";
|
||||
private Cluster(Privilege.Cluster privilege) {
|
||||
this.privilege = privilege;
|
||||
this.predicate = privilege.predicate();
|
||||
}
|
||||
|
||||
// if for some reason we are missing an action... just for safety we'll reject
|
||||
if (!isIndicesRequest) {
|
||||
public Privilege.Cluster privilege() {
|
||||
return privilege;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean check(String action, TransportRequest request, MetaData metaData) {
|
||||
return predicate.apply(action);
|
||||
}
|
||||
}
|
||||
|
||||
public static class Indices implements Permission {
|
||||
|
||||
public static final Indices NONE = new Indices() {
|
||||
@Override
|
||||
public boolean check(String action, TransportRequest request, MetaData metaData) {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
Set<String> indices = Sets.newHashSet();
|
||||
if (request instanceof CompositeIndicesRequest) {
|
||||
CompositeIndicesRequest compositeIndicesRequest = (CompositeIndicesRequest) request;
|
||||
for (IndicesRequest indicesRequest : compositeIndicesRequest.subRequests()) {
|
||||
Collections.addAll(indices, explodeWildcards(indicesRequest, metaData));
|
||||
}
|
||||
} else {
|
||||
Collections.addAll(indices, explodeWildcards((IndicesRequest) request, metaData));
|
||||
}
|
||||
private Group[] groups;
|
||||
|
||||
for (String index : indices) {
|
||||
if (!indicesMatcher.apply(index)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
public Indices(Collection<Group> groups) {
|
||||
this(groups.toArray(new Group[groups.size()]));
|
||||
}
|
||||
|
||||
private String[] explodeWildcards(IndicesRequest indicesRequest, MetaData metaData) {
|
||||
if (indicesRequest.indicesOptions().expandWildcardsOpen() || indicesRequest.indicesOptions().expandWildcardsClosed()) {
|
||||
if (MetaData.isAllIndices(indicesRequest.indices())) {
|
||||
return new String[]{"_all"};
|
||||
|
||||
/* the following is an alternative to requiring explicit privileges for _all, we just expand it, we could potentially extract
|
||||
this code fragment to a separate method in MetaData#concreteIndices in the open source and just use it here]
|
||||
|
||||
if (indicesRequest.indicesOptions().expandWildcardsOpen() && indicesRequest.indicesOptions().expandWildcardsClosed()) {
|
||||
return metaData.concreteAllIndices();
|
||||
} else if (indicesRequest.indicesOptions().expandWildcardsOpen()) {
|
||||
return metaData.concreteAllOpenIndices();
|
||||
} else {
|
||||
return metaData.concreteAllClosedIndices();
|
||||
}*/
|
||||
|
||||
}
|
||||
return metaData.convertFromWildcards(indicesRequest.indices(), indicesRequest.indicesOptions());
|
||||
}
|
||||
return indicesRequest.indices();
|
||||
public Indices(Group... groups) {
|
||||
this.groups = groups;
|
||||
}
|
||||
}
|
||||
|
||||
public static class Cluster extends Permission {
|
||||
public Group[] groups() {
|
||||
return groups;
|
||||
}
|
||||
|
||||
private final Predicate<String> actionMatcher;
|
||||
/**
|
||||
* @return A predicate that will match all the indices that this permission
|
||||
* has the given privilege for.
|
||||
*/
|
||||
public Predicate<String> allowedIndicesMatcher(Privilege.Index privilege) {
|
||||
ImmutableList.Builder<String> indices = ImmutableList.builder();
|
||||
for (Group group : groups) {
|
||||
if (group.privilege.implies(privilege)) {
|
||||
indices.add(group.indices);
|
||||
}
|
||||
}
|
||||
return new AutomatonPredicate(Automatons.patterns(indices.build()));
|
||||
}
|
||||
|
||||
private Cluster(Predicate<String> actionMatcher) {
|
||||
this.actionMatcher = actionMatcher;
|
||||
/**
|
||||
* @return A predicate that will match all the indices that this permission
|
||||
* has the privilege for executing the given action on.
|
||||
*/
|
||||
public Predicate<String> allowedIndicesMatcher(String action) {
|
||||
ImmutableList.Builder<String> indices = ImmutableList.builder();
|
||||
for (Group group : groups) {
|
||||
if (group.actionMatcher.apply(action)) {
|
||||
indices.add(group.indices);
|
||||
}
|
||||
}
|
||||
return new AutomatonPredicate(Automatons.patterns(indices.build()));
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean check(String action, TransportRequest request, MetaData metaData) {
|
||||
return actionMatcher.apply(action);
|
||||
}
|
||||
}
|
||||
|
||||
public static class Compound extends Permission {
|
||||
|
||||
private final Permission[] permissions;
|
||||
|
||||
private Compound(Permission... permissions) {
|
||||
this.permissions = permissions;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean check(String action, TransportRequest request, MetaData metaData) {
|
||||
for (int i = 0; i < permissions.length; i++) {
|
||||
if (permissions[i].check(action, request, metaData)) {
|
||||
for (int i = 0; i < groups.length; i++) {
|
||||
if (groups[i].check(action, request, metaData)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public static class Builder {
|
||||
public static class Group implements Permission {
|
||||
|
||||
private Permission[] permissions = null;
|
||||
private final Privilege.Index privilege;
|
||||
private final Predicate<String> actionMatcher;
|
||||
private final String[] indices;
|
||||
private final Predicate<String> indicesMatcher;
|
||||
|
||||
private Builder() {}
|
||||
|
||||
public void add(Permission... permissions) {
|
||||
if (this.permissions == null) {
|
||||
this.permissions = permissions;
|
||||
return;
|
||||
}
|
||||
Permission[] extended = new Permission[this.permissions.length + permissions.length];
|
||||
System.arraycopy(this.permissions, 0, extended, 0, this.permissions.length);
|
||||
System.arraycopy(permissions, 0, extended, this.permissions.length, permissions.length);
|
||||
public Group(Privilege.Index privilege, String... indices) {
|
||||
assert indices.length != 0;
|
||||
this.privilege = privilege;
|
||||
this.actionMatcher = privilege.predicate();
|
||||
this.indices = indices;
|
||||
this.indicesMatcher = new AutomatonPredicate(Automatons.patterns(indices));
|
||||
}
|
||||
|
||||
public Compound build() {
|
||||
return new Compound(permissions);
|
||||
public Privilege.Index privilege() {
|
||||
return privilege;
|
||||
}
|
||||
|
||||
public String[] indices() {
|
||||
return indices;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean check(String action, TransportRequest request, MetaData metaData) {
|
||||
|
||||
if (!actionMatcher.apply(action)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
boolean isIndicesRequest = request instanceof CompositeIndicesRequest || request instanceof IndicesRequest;
|
||||
|
||||
assert isIndicesRequest : "the only requests passing the action matcher should be IndicesRequests";
|
||||
|
||||
// if for some reason we are missing an action... just for safety we'll reject
|
||||
if (!isIndicesRequest) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Set<String> indices = Sets.newHashSet();
|
||||
if (request instanceof CompositeIndicesRequest) {
|
||||
CompositeIndicesRequest compositeIndicesRequest = (CompositeIndicesRequest) request;
|
||||
for (IndicesRequest indicesRequest : compositeIndicesRequest.subRequests()) {
|
||||
Collections.addAll(indices, explodeWildcards(indicesRequest, metaData));
|
||||
}
|
||||
} else {
|
||||
Collections.addAll(indices, explodeWildcards((IndicesRequest) request, metaData));
|
||||
}
|
||||
|
||||
for (String index : indices) {
|
||||
if (!indicesMatcher.apply(index)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private String[] explodeWildcards(IndicesRequest indicesRequest, MetaData metaData) {
|
||||
if (indicesRequest.indicesOptions().expandWildcardsOpen() || indicesRequest.indicesOptions().expandWildcardsClosed()) {
|
||||
if (MetaData.isAllIndices(indicesRequest.indices())) {
|
||||
if (indicesRequest.indicesOptions().expandWildcardsOpen() && indicesRequest.indicesOptions().expandWildcardsClosed()) {
|
||||
return metaData.concreteAllIndices();
|
||||
}
|
||||
if (indicesRequest.indicesOptions().expandWildcardsOpen()) {
|
||||
return metaData.concreteAllOpenIndices();
|
||||
}
|
||||
return metaData.concreteAllClosedIndices();
|
||||
|
||||
}
|
||||
return metaData.convertFromWildcards(indicesRequest.indices(), indicesRequest.indicesOptions());
|
||||
}
|
||||
return indicesRequest.indices();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,146 +9,225 @@ import org.apache.lucene.util.automaton.Automaton;
|
|||
import org.apache.lucene.util.automaton.BasicAutomata;
|
||||
import org.apache.lucene.util.automaton.MinimizationOperations;
|
||||
import org.apache.lucene.util.automaton.RegExp;
|
||||
import org.elasticsearch.action.get.GetAction;
|
||||
import org.elasticsearch.action.search.SearchAction;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.base.Predicate;
|
||||
import org.elasticsearch.common.cache.CacheBuilder;
|
||||
import org.elasticsearch.common.cache.CacheLoader;
|
||||
import org.elasticsearch.common.cache.LoadingCache;
|
||||
import org.elasticsearch.common.collect.ImmutableSet;
|
||||
import org.elasticsearch.common.collect.Sets;
|
||||
import org.elasticsearch.shield.support.AutomatonPredicate;
|
||||
import org.elasticsearch.shield.support.Automatons;
|
||||
|
||||
import java.util.Locale;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public interface Privilege {
|
||||
public abstract class Privilege<P extends Privilege<P>> {
|
||||
|
||||
Privilege SYSTEM = new AutomatonPrivilege("internal:.*");
|
||||
public static final Internal INTERNAL = new Internal();
|
||||
|
||||
Predicate<String> predicate();
|
||||
protected final Name name;
|
||||
|
||||
Privilege plus(Privilege other);
|
||||
private Privilege(Name name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
Privilege minus(Privilege other);
|
||||
public Name name() {
|
||||
return name;
|
||||
}
|
||||
|
||||
boolean implies(Privilege other);
|
||||
public abstract Predicate<String> predicate();
|
||||
|
||||
public static enum Index implements Privilege {
|
||||
public abstract boolean implies(P other);
|
||||
|
||||
NONE(AutomatonPrivilege.NONE),
|
||||
ALL("indices:.*"),
|
||||
MANAGE("indices:monitor/.*", "indices:admin/.*"),
|
||||
MONITOR("indices:monitor/.*"),
|
||||
@SuppressWarnings("unchecked")
|
||||
public boolean isAlias(P other) {
|
||||
return this.implies(other) && other.implies((P) this);
|
||||
}
|
||||
|
||||
FULL_DATA_ACCESS("indices:data/.*"),
|
||||
static class Internal extends Privilege<Internal> {
|
||||
|
||||
CRUD("indices:data/write/.*", "indices:data/read/.*"),
|
||||
READ("indices:data/read/.*"),
|
||||
CREATE("indices:data/write/create"), //todo unsupported yet
|
||||
INDEX("indices:data/write/index.*", "indices:data/write/update"),
|
||||
DELETE("indices:data/write/delete.*"),
|
||||
WRITE("indices:data/write/.*"),
|
||||
protected final Automaton automaton;
|
||||
|
||||
BENCHMARK("indices:data/benchmark");
|
||||
|
||||
private AutomatonPrivilege privilege;
|
||||
|
||||
private Index(String... patterns) {
|
||||
this(new AutomatonPrivilege(patterns));
|
||||
}
|
||||
|
||||
private Index(AutomatonPrivilege privilege) {
|
||||
this.privilege = privilege;
|
||||
private Internal() {
|
||||
super(new Name("internal"));
|
||||
automaton = new RegExp("internal:.*", RegExp.ALL).toAutomaton();
|
||||
MinimizationOperations.minimize(automaton);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Predicate<String> predicate() {
|
||||
return privilege.predicate();
|
||||
return new AutomatonPredicate(automaton);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Privilege plus(Privilege other) {
|
||||
return privilege.plus(other);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Privilege minus(Privilege other) {
|
||||
return privilege.minus(other);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean implies(Privilege other) {
|
||||
return privilege.implies(other);
|
||||
}
|
||||
|
||||
public static Index resolve(String... names) {
|
||||
Index result = null;
|
||||
for (int i = 0; i < names.length; i++) {
|
||||
if (result == null) {
|
||||
result = Index.valueOf(names[i]);
|
||||
} else {
|
||||
result.plus(Index.valueOf(names[i]));
|
||||
}
|
||||
}
|
||||
return result;
|
||||
public boolean implies(Internal other) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
public static enum Cluster implements Privilege {
|
||||
public static class Index extends AutomatonPrivilege<Index> {
|
||||
|
||||
NONE(AutomatonPrivilege.NONE),
|
||||
ALL("cluster:.*"),
|
||||
MANAGE("cluster:.*"),
|
||||
MONITOR("cluster:monitor/.*");
|
||||
public static final Index NONE = new Index(Name.NONE, BasicAutomata.makeEmpty());
|
||||
public static final Index ALL = new Index("all", "indices:.*");
|
||||
public static final Index MANAGE = new Index("manage", "indices:monitor/.*", "indices:admin/.*");
|
||||
public static final Index MONITOR = new Index("monitor", "indices:monitor/.*");
|
||||
public static final Index DATA_ACCESS = new Index("data_access","indices:data/.*");
|
||||
public static final Index CRUD = new Index("crud", "indices:data/write/.*", "indices:data/read/.*");
|
||||
public static final Index READ = new Index("read", "indices:data/read/.*");
|
||||
public static final Index SEARCH = new Index("search", SearchAction.NAME + ".*", GetAction.NAME + ".*");
|
||||
public static final Index GET = new Index("get", GetAction.NAME + ".*");
|
||||
public static final Index INDEX = new Index("index", "indices:data/write/index.*", "indices:data/write/update");
|
||||
public static final Index DELETE = new Index("delete", "indices:data/write/delete.*");
|
||||
public static final Index WRITE = new Index("write", "indices:data/write/.*");
|
||||
public static final Index BENCHMARK = new Index("benchmark", "indices:data/benchmark");
|
||||
|
||||
private AutomatonPrivilege privilege;
|
||||
private static final Index[] values = new Index[] {
|
||||
NONE, ALL, MANAGE, MONITOR, DATA_ACCESS, CRUD, READ, SEARCH, GET, INDEX, DELETE, WRITE, BENCHMARK
|
||||
};
|
||||
|
||||
private Cluster(String... patterns) {
|
||||
this(new AutomatonPrivilege(patterns));
|
||||
static Index[] values() {
|
||||
return values;
|
||||
}
|
||||
|
||||
private Cluster(AutomatonPrivilege privilege) {
|
||||
this.privilege = privilege;
|
||||
private static final LoadingCache<Name, Index> cache = CacheBuilder.newBuilder().build(
|
||||
new CacheLoader<Name, Index>() {
|
||||
@Override
|
||||
public Index load(Name name) throws Exception {
|
||||
Index index = NONE;
|
||||
for (String part : name.parts) {
|
||||
index = index == NONE ? resolve(part) : index.plus(resolve(part));
|
||||
}
|
||||
return index;
|
||||
}
|
||||
});
|
||||
|
||||
private Index(String name, String... patterns) {
|
||||
super(name, patterns);
|
||||
}
|
||||
|
||||
private Index(Name name, Automaton automaton) {
|
||||
super(name, automaton);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Predicate<String> predicate() {
|
||||
return privilege.predicate();
|
||||
protected Index create(Name name, Automaton automaton) {
|
||||
if (name == Name.NONE) {
|
||||
return NONE;
|
||||
}
|
||||
return new Index(name, automaton);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Privilege plus(Privilege other) {
|
||||
return privilege.plus(other);
|
||||
protected Index none() {
|
||||
return NONE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Privilege minus(Privilege other) {
|
||||
return privilege.minus(other);
|
||||
public static Index get(Name name) {
|
||||
return cache.getUnchecked(name);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean implies(Privilege other) {
|
||||
return privilege.implies(other);
|
||||
public static Index action(String action) {
|
||||
return new Index(action, action);
|
||||
}
|
||||
|
||||
public static Cluster resolve(String... names) {
|
||||
Cluster result = null;
|
||||
for (int i = 0; i < names.length; i++) {
|
||||
if (result == null) {
|
||||
result = Cluster.valueOf(names[i]);
|
||||
} else {
|
||||
result.plus(Cluster.valueOf(names[i]));
|
||||
}
|
||||
public static Index union(Index... indices) {
|
||||
Index result = NONE;
|
||||
for (Index index : indices) {
|
||||
result = result.plus(index);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
static class AutomatonPrivilege implements Privilege {
|
||||
|
||||
private static final AutomatonPrivilege NONE = new AutomatonPrivilege(BasicAutomata.makeEmpty());
|
||||
|
||||
private final Automaton automaton;
|
||||
|
||||
private AutomatonPrivilege(String... patterns) {
|
||||
this.automaton = compileAutomaton(patterns);
|
||||
private static Index resolve(String name) {
|
||||
name = name.toLowerCase(Locale.ROOT);
|
||||
for (Index index : values) {
|
||||
if (name.toLowerCase(Locale.ROOT).equals(index.name.toString())) {
|
||||
return index;
|
||||
}
|
||||
}
|
||||
throw new IllegalArgumentException("Unknown index privilege [" + name + "]");
|
||||
}
|
||||
|
||||
private AutomatonPrivilege(Automaton automaton) {
|
||||
}
|
||||
|
||||
public static class Cluster extends AutomatonPrivilege<Cluster> {
|
||||
|
||||
public static final Cluster NONE = new Cluster(Name.NONE, BasicAutomata.makeEmpty());
|
||||
public static final Cluster ALL = new Cluster("all", "cluster:.*");
|
||||
public static final Cluster MONITOR = new Cluster("monitor", "cluster:monitor/.*");
|
||||
|
||||
private static final Cluster[] values = new Cluster[] { NONE, ALL, MONITOR };
|
||||
|
||||
static Cluster[] values() {
|
||||
return values;
|
||||
};
|
||||
|
||||
private static final LoadingCache<Name, Cluster> cache = CacheBuilder.newBuilder().build(
|
||||
new CacheLoader<Name, Cluster>() {
|
||||
@Override
|
||||
public Cluster load(Name name) throws Exception {
|
||||
Cluster cluster = NONE;
|
||||
for (String part : name.parts) {
|
||||
cluster = cluster == NONE ? resolve(part) : cluster.plus(resolve(part));
|
||||
}
|
||||
return cluster;
|
||||
}
|
||||
});
|
||||
|
||||
private Cluster(String name, String... patterns) {
|
||||
super(name, patterns);
|
||||
}
|
||||
|
||||
private Cluster(Name name, Automaton automaton) {
|
||||
super(name, automaton);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Cluster create(Name name, Automaton automaton) {
|
||||
if (name == Name.NONE) {
|
||||
return NONE;
|
||||
}
|
||||
return new Cluster(name, automaton);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Cluster none() {
|
||||
return NONE;
|
||||
}
|
||||
|
||||
public static Cluster get(Name name) {
|
||||
return cache.getUnchecked(name);
|
||||
}
|
||||
|
||||
private static Cluster resolve(String name) {
|
||||
name = name.toLowerCase(Locale.ROOT);
|
||||
for (Cluster cluster : values) {
|
||||
if (name.equals(cluster.name.toString())) {
|
||||
return cluster;
|
||||
}
|
||||
}
|
||||
throw new IllegalArgumentException("Unknown cluster privilege [" + name + "]");
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static abstract class AutomatonPrivilege<P extends AutomatonPrivilege<P>> extends Privilege<P> {
|
||||
|
||||
protected final Automaton automaton;
|
||||
|
||||
private AutomatonPrivilege(String name, String... patterns) {
|
||||
super(new Name(name));
|
||||
this.automaton = Automatons.patterns(patterns);
|
||||
}
|
||||
|
||||
private AutomatonPrivilege(Name name, Automaton automaton) {
|
||||
super(name);
|
||||
this.automaton = automaton;
|
||||
}
|
||||
|
||||
|
@ -157,36 +236,87 @@ public interface Privilege {
|
|||
return new AutomatonPredicate(automaton);
|
||||
}
|
||||
|
||||
private static Automaton compileAutomaton(String... patterns) {
|
||||
Automaton a = null;
|
||||
for (int i = 0; i < patterns.length; i++) {
|
||||
if (a == null) {
|
||||
a = new RegExp(patterns[i], RegExp.ALL).toAutomaton();
|
||||
} else {
|
||||
a = a.union(new RegExp(patterns[i], RegExp.ALL).toAutomaton());
|
||||
}
|
||||
protected P plus(P other) {
|
||||
if (other.implies((P) this)) {
|
||||
return other;
|
||||
}
|
||||
MinimizationOperations.minimize(a);
|
||||
return a;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Privilege plus(Privilege other) {
|
||||
return new AutomatonPrivilege(automaton.union(((AutomatonPrivilege) other).automaton));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Privilege minus(Privilege other) {
|
||||
if (!implies(other)) {
|
||||
return this;
|
||||
if (this.implies(other)) {
|
||||
return (P) this;
|
||||
}
|
||||
return new AutomatonPrivilege(automaton.minus(((AutomatonPrivilege) other).automaton));
|
||||
return create(name.add(other.name), automaton.union(other.automaton));
|
||||
}
|
||||
|
||||
protected P minus(P other) {
|
||||
if (other.implies((P) this)) {
|
||||
return none();
|
||||
}
|
||||
if (other == none() || !this.implies(other)) {
|
||||
return (P) this;
|
||||
}
|
||||
return create(name.remove(other.name), automaton.minus(other.automaton));
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean implies(Privilege other) {
|
||||
return ((AutomatonPrivilege) other).automaton.subsetOf(automaton);
|
||||
public boolean implies(P other) {
|
||||
return other.automaton.subsetOf(automaton);
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return name.toString();
|
||||
}
|
||||
|
||||
protected abstract P create(Name name, Automaton automaton);
|
||||
|
||||
protected abstract P none();
|
||||
}
|
||||
|
||||
}
|
||||
public static class Name {
|
||||
|
||||
public static final Name NONE = new Name("none");
|
||||
|
||||
private final ImmutableSet<String> parts;
|
||||
|
||||
public Name(String name) {
|
||||
assert name != null && !name.contains(",");
|
||||
parts = ImmutableSet.of(name);
|
||||
}
|
||||
|
||||
public Name(Set<String> parts) {
|
||||
assert !parts.isEmpty();
|
||||
this.parts = ImmutableSet.copyOf(parts);
|
||||
}
|
||||
|
||||
public Name(String... parts) {
|
||||
this(ImmutableSet.copyOf(parts));
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return Strings.collectionToCommaDelimitedString(parts);
|
||||
}
|
||||
|
||||
public Name add(Name other) {
|
||||
return new Name(Sets.union(parts, other.parts));
|
||||
}
|
||||
|
||||
public Name remove(Name other) {
|
||||
Set<String> parts = Sets.difference(this.parts, other.parts);
|
||||
return parts.isEmpty() ? NONE : new Name(parts);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
|
||||
Name name = (Name) o;
|
||||
|
||||
return parts.equals(name.parts);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return parts.hashCode();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,28 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.shield.authz;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public interface RolesStore {
|
||||
|
||||
Permission permission(String... roles);
|
||||
|
||||
public static interface Writable extends RolesStore {
|
||||
|
||||
void set(String role, Privilege.Index[] privileges, String[] indices);
|
||||
|
||||
void grant(String role, Privilege.Index[] privileges, String[] indices);
|
||||
|
||||
void grant(String role, Privilege.Cluster[] privileges);
|
||||
|
||||
void revoke(String role, Privilege.Index[] privileges, String[] indices);
|
||||
|
||||
void revoke(String role, Privilege.Cluster[] privileges);
|
||||
}
|
||||
|
||||
}
|
|
@ -6,17 +6,19 @@
|
|||
package org.elasticsearch.shield.authz;
|
||||
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.common.base.Predicate;
|
||||
import org.elasticsearch.transport.TransportRequest;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class SystemRole extends Permission {
|
||||
public class SystemRole extends Permission.Global {
|
||||
|
||||
public static final String NAME = "__es_system_role";
|
||||
private static final Predicate<String> PREDICATE = Privilege.INTERNAL.predicate();
|
||||
|
||||
@Override
|
||||
public boolean check(String action, TransportRequest request, MetaData metaData) {
|
||||
return true;
|
||||
return PREDICATE.apply(action);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,225 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.shield.authz.store;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.collect.ImmutableMap;
|
||||
import org.elasticsearch.common.collect.ImmutableSet;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.yaml.YamlXContent;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.shield.authz.Permission;
|
||||
import org.elasticsearch.shield.authz.Privilege;
|
||||
import org.elasticsearch.watcher.FileChangesListener;
|
||||
import org.elasticsearch.watcher.FileWatcher;
|
||||
import org.elasticsearch.watcher.ResourceWatcherService;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.nio.file.StandardOpenOption;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class FileRolesStore extends AbstractComponent implements RolesStore {
|
||||
|
||||
private static final Pattern COMMA_DELIM = Pattern.compile("\\s*,\\s*");
|
||||
|
||||
private final Path file;
|
||||
private final Listener listener;
|
||||
|
||||
private volatile ImmutableMap<String, Permission.Global> permissions;
|
||||
|
||||
@Inject
|
||||
public FileRolesStore(Settings settings, Environment env, ResourceWatcherService watcherService) {
|
||||
this(settings, env, watcherService, Listener.NOOP);
|
||||
}
|
||||
|
||||
public FileRolesStore(Settings settings, Environment env, ResourceWatcherService watcherService, Listener listener) {
|
||||
super(settings);
|
||||
file = resolveFile(componentSettings, env);
|
||||
permissions = parseFile(file);
|
||||
FileWatcher watcher = new FileWatcher(file.getParent().toFile());
|
||||
watcher.addListener(new FileListener());
|
||||
watcherService.add(watcher, ResourceWatcherService.Frequency.HIGH);
|
||||
this.listener = listener;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Permission.Global permission(String role) {
|
||||
return permissions.get(role);
|
||||
}
|
||||
|
||||
public static Path resolveFile(Settings settings, Environment env) {
|
||||
String location = settings.get("files.roles");
|
||||
if (location == null) {
|
||||
return env.configFile().toPath().resolve(".roles.yml");
|
||||
}
|
||||
return Paths.get(location);
|
||||
}
|
||||
|
||||
public static ImmutableMap<String, Permission.Global> parseFile(Path path) {
|
||||
if (!Files.exists(path)) {
|
||||
return ImmutableMap.of();
|
||||
}
|
||||
|
||||
ImmutableMap.Builder<String, Permission.Global> roles = ImmutableMap.builder();
|
||||
try (InputStream input = Files.newInputStream(path, StandardOpenOption.READ)) {
|
||||
XContentParser parser = YamlXContent.yamlXContent.createParser(input);
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_OBJECT && currentFieldName != null) {
|
||||
String roleName = currentFieldName;
|
||||
Permission.Global.Builder permission = Permission.Global.builder();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if ("cluster".equals(currentFieldName)) {
|
||||
Privilege.Name name;
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
String[] names = COMMA_DELIM.split(parser.text().trim());
|
||||
name = new Privilege.Name(names);
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
ImmutableSet.Builder<String> names = ImmutableSet.builder();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
names.add(parser.text());
|
||||
}
|
||||
}
|
||||
name = new Privilege.Name(names.build());
|
||||
} else {
|
||||
throw new ElasticsearchException("Invalid roles file format [" + path.toAbsolutePath() +
|
||||
"]. [cluster] field value can either be a string or a list of strings, but [" + token + "] was found instead");
|
||||
}
|
||||
permission.set(Privilege.Cluster.get(name));
|
||||
} else if ("indices".equals(currentFieldName)) {
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else {
|
||||
String[] indices = COMMA_DELIM.split(currentFieldName);
|
||||
Privilege.Name name;
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
String[] names = COMMA_DELIM.split(parser.text());
|
||||
name = new Privilege.Name(names);
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
Set<String> names = new HashSet<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
names.add(parser.text());
|
||||
} else {
|
||||
throw new ElasticsearchException("Invalid roles file format [" + path.toAbsolutePath() +
|
||||
"]. Could not parse [" + token + "] as index privilege. Privilege names must be strings");
|
||||
}
|
||||
}
|
||||
name = new Privilege.Name(names);
|
||||
} else {
|
||||
throw new ElasticsearchException("Invalid roles file format [" + path.toAbsolutePath() +
|
||||
"]. Could not parse [" + token + "] as index privileges list. Privilege lists must either " +
|
||||
"be a comma delimited string or an array of strings");
|
||||
}
|
||||
permission.add(Privilege.Index.get(name), indices);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new ElasticsearchException("Invalid roles file format [" + path.toAbsolutePath() +
|
||||
"]. [indices] field value must be an array of indices-privileges mappings defined as a string" +
|
||||
" in the form <comma-separated list of index name patterns>::<comma-separated list of privileges> , but [" + token + "] was found instead");
|
||||
}
|
||||
} else {
|
||||
throw new ElasticsearchException("Invalid roles file format [" + path.toAbsolutePath() +
|
||||
"]. each role may have [cluster] field (holding a list of cluster permissions) and/or " +
|
||||
"[indices] field (holding a list of indices permissions. But [" + token + "] was found instead");
|
||||
}
|
||||
}
|
||||
roles.put(roleName, permission.build());
|
||||
}
|
||||
}
|
||||
|
||||
return roles.build();
|
||||
|
||||
} catch (IOException ioe) {
|
||||
throw new ElasticsearchException("Failed to read roles file [" + path.toAbsolutePath() + "]", ioe);
|
||||
}
|
||||
}
|
||||
|
||||
public static void writeFile(Map<String, Permission.Global> roles, Path path) {
|
||||
try (OutputStream output = Files.newOutputStream(path, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.WRITE)) {
|
||||
XContentBuilder builder = XContentFactory.yamlBuilder(output);
|
||||
for (Map.Entry<String, Permission.Global> entry : roles.entrySet()) {
|
||||
builder.startObject(entry.getKey());
|
||||
Permission.Global permission = entry.getValue();
|
||||
Permission.Cluster cluster = permission.cluster();
|
||||
if (cluster != null && cluster.privilege() != Privilege.Cluster.NONE) {
|
||||
builder.field("cluster", cluster.privilege().name());
|
||||
}
|
||||
Permission.Indices indices = permission.indices();
|
||||
if (indices != null) {
|
||||
Permission.Global.Indices.Group[] groups = indices.groups();
|
||||
if (groups != null && groups.length > 0) {
|
||||
builder.startObject("indices");
|
||||
for (int i = 0; i < groups.length; i++) {
|
||||
builder.field(Strings.arrayToCommaDelimitedString(groups[i].indices())).value(groups[i].privilege().name());
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (IOException ioe) {
|
||||
throw new ElasticsearchException("Could not write roles file [" + path.toAbsolutePath() + "], please check file permissions", ioe);
|
||||
}
|
||||
}
|
||||
|
||||
public static interface Listener {
|
||||
|
||||
public static final Listener NOOP = new Listener() {
|
||||
@Override
|
||||
public void onRefresh() {
|
||||
}
|
||||
};
|
||||
|
||||
void onRefresh();
|
||||
}
|
||||
|
||||
private class FileListener extends FileChangesListener {
|
||||
@Override
|
||||
public void onFileCreated(File file) {
|
||||
onFileChanged(file);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFileDeleted(File file) {
|
||||
onFileChanged(file);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFileChanged(File file) {
|
||||
if (file.equals(FileRolesStore.this.file.toFile())) {
|
||||
permissions = parseFile(file.toPath());
|
||||
listener.onRefresh();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,31 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.shield.authz.store;
|
||||
|
||||
import org.elasticsearch.shield.authz.Permission;
|
||||
import org.elasticsearch.shield.authz.Privilege;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public interface RolesStore {
|
||||
|
||||
Permission.Global permission(String role);
|
||||
|
||||
public static interface Writable extends RolesStore {
|
||||
|
||||
void set(String role, Privilege.Index privilege, String... indices);
|
||||
|
||||
void grant(String role, Privilege.Index privilege, String... indices);
|
||||
|
||||
void revoke(String role, Privilege.Index privileges, String... indices);
|
||||
|
||||
void grant(String role, Privilege.Cluster privilege);
|
||||
|
||||
void revoke(String role, Privilege.Cluster privileges);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,50 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.shield.support;
|
||||
|
||||
import org.apache.lucene.util.automaton.Automaton;
|
||||
import org.apache.lucene.util.automaton.BasicAutomata;
|
||||
import org.apache.lucene.util.automaton.MinimizationOperations;
|
||||
import org.apache.lucene.util.automaton.RegExp;
|
||||
|
||||
import java.util.Collection;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public final class Automatons {
|
||||
|
||||
private Automatons() {
|
||||
}
|
||||
|
||||
public static Automaton patterns(String... patterns) {
|
||||
if (patterns.length == 0) {
|
||||
return BasicAutomata.makeEmpty();
|
||||
}
|
||||
Automaton automaton = new RegExp(patterns[0]).toAutomaton();
|
||||
for (String pattern : patterns) {
|
||||
automaton = automaton.union(new RegExp(pattern).toAutomaton());
|
||||
}
|
||||
MinimizationOperations.minimize(automaton);
|
||||
return automaton;
|
||||
}
|
||||
|
||||
public static Automaton patterns(Collection<String> patterns) {
|
||||
if (patterns.isEmpty()) {
|
||||
return BasicAutomata.makeEmpty();
|
||||
}
|
||||
Automaton automaton = null;
|
||||
for (String pattern : patterns) {
|
||||
if (automaton == null) {
|
||||
automaton = new RegExp(pattern).toAutomaton();
|
||||
} else {
|
||||
automaton = automaton.union(new RegExp(pattern).toAutomaton());
|
||||
}
|
||||
}
|
||||
MinimizationOperations.minimize(automaton);
|
||||
return automaton;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,52 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.shield.authz;
|
||||
|
||||
import org.elasticsearch.action.get.GetAction;
|
||||
import org.elasticsearch.common.base.Predicate;
|
||||
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.elasticsearch.shield.authz.Privilege.Index.*;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class PermissionTests extends ElasticsearchTestCase {
|
||||
|
||||
private Permission.Global permission;
|
||||
|
||||
@Before
|
||||
public void init() {
|
||||
Permission.Global.Builder builder = Permission.Global.builder();
|
||||
builder.add(union(SEARCH, MONITOR), "test_.*", "foo.*");
|
||||
builder.add(union(READ), "baz_.*foo", "fool.*bar");
|
||||
builder.add(union(MONITOR), "bar.*");
|
||||
permission = builder.build();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAllowedIndicesMatcher_Privilege() throws Exception {
|
||||
testAllowedIndicesMatcher(permission.indices().allowedIndicesMatcher(GET));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAllowedIndicesMatcher_Action() throws Exception {
|
||||
testAllowedIndicesMatcher(permission.indices().allowedIndicesMatcher(GetAction.NAME));
|
||||
}
|
||||
|
||||
private void testAllowedIndicesMatcher(Predicate<String> indicesMatcher) {
|
||||
assertThat(indicesMatcher.apply("test_123"), is(true));
|
||||
assertThat(indicesMatcher.apply("foobar"), is(true));
|
||||
assertThat(indicesMatcher.apply("fool"), is(true));
|
||||
assertThat(indicesMatcher.apply("fool2bar"), is(true));
|
||||
assertThat(indicesMatcher.apply("barbapapa"), is(false));
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,108 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.shield.authz;
|
||||
|
||||
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class PrivilegeTests extends ElasticsearchTestCase {
|
||||
|
||||
@Test
|
||||
public void testName() throws Exception {
|
||||
Privilege.Name name12 = new Privilege.Name("name1", "name2");
|
||||
Privilege.Name name34 = new Privilege.Name("name3", "name4");
|
||||
Privilege.Name name1234 = randomBoolean() ? name12.add(name34) : name34.add(name12);
|
||||
assertThat(name1234, equalTo(new Privilege.Name("name1", "name2", "name3", "name4")));
|
||||
|
||||
Privilege.Name name1 = name12.remove(new Privilege.Name("name2"));
|
||||
assertThat(name1, equalTo(new Privilege.Name("name1")));
|
||||
|
||||
Privilege.Name name = name1.remove(new Privilege.Name("name1"));
|
||||
assertThat(name, is(Privilege.Name.NONE));
|
||||
|
||||
Privilege.Name none = new Privilege.Name("name1", "name2", "none").remove(name12);
|
||||
assertThat(none, is(Privilege.Name.NONE));
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testCluster() throws Exception {
|
||||
|
||||
Privilege.Name name = new Privilege.Name("monitor");
|
||||
Privilege.Cluster cluster = Privilege.Cluster.get(name);
|
||||
assertThat(cluster, is(Privilege.Cluster.MONITOR));
|
||||
|
||||
// since "all" implies "monitor", this should collapse to All
|
||||
name = new Privilege.Name("monitor", "all");
|
||||
cluster = Privilege.Cluster.get(name);
|
||||
assertThat(cluster, is(Privilege.Cluster.ALL));
|
||||
|
||||
name = new Privilege.Name("monitor", "none");
|
||||
cluster = Privilege.Cluster.get(name);
|
||||
assertThat(cluster, is(Privilege.Cluster.MONITOR));
|
||||
|
||||
Privilege.Name name2 = new Privilege.Name("none", "monitor");
|
||||
Privilege.Cluster cluster2 = Privilege.Cluster.get(name2);
|
||||
assertThat(cluster, is(cluster2));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIndex_Collapse() throws Exception {
|
||||
Privilege.Index[] values = Privilege.Index.values();
|
||||
Privilege.Index first = values[randomIntBetween(0, values.length-1)];
|
||||
Privilege.Index second = values[randomIntBetween(0, values.length-1)];
|
||||
|
||||
Privilege.Name name = new Privilege.Name(first.name().toString(), second.name().toString());
|
||||
Privilege.Index index = Privilege.Index.get(name);
|
||||
|
||||
if (first.implies(second)) {
|
||||
assertThat(index, is(first));
|
||||
}
|
||||
|
||||
if (second.implies(first)) {
|
||||
assertThat(index, is(second));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIndex_Implies() throws Exception {
|
||||
Privilege.Index[] values = Privilege.Index.values();
|
||||
Privilege.Index first = values[randomIntBetween(0, values.length-1)];
|
||||
Privilege.Index second = values[randomIntBetween(0, values.length-1)];
|
||||
|
||||
Privilege.Name name = new Privilege.Name(first.name().toString(), second.name().toString());
|
||||
Privilege.Index index = Privilege.Index.get(name);
|
||||
|
||||
assertThat(index.implies(first), is(true));
|
||||
assertThat(index.implies(second), is(true));
|
||||
|
||||
if (first.implies(second)) {
|
||||
assertThat(index, is(first));
|
||||
}
|
||||
|
||||
if (second.implies(first)) {
|
||||
if (index != second) {
|
||||
Privilege.Index idx = Privilege.Index.get(name);
|
||||
idx.name().toString();
|
||||
}
|
||||
assertThat(index, is(second));
|
||||
}
|
||||
|
||||
for (Privilege.Index other : Privilege.Index.values()) {
|
||||
if (first.implies(other) || second.implies(other) || index.isAlias(other)) {
|
||||
assertThat("index privilege [" + index + "] should imply [" + other + "]", index.implies(other), is(true));
|
||||
} else if (other.implies(first) && other.implies(second)) {
|
||||
assertThat("index privilege [" + index + "] should not imply [" + other + "]", index.implies(other), is(false));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,152 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.shield.authz.store;
|
||||
|
||||
import org.elasticsearch.common.base.Charsets;
|
||||
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.shield.authz.Permission;
|
||||
import org.elasticsearch.shield.authz.Privilege;
|
||||
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.watcher.ResourceWatcherService;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.TemporaryFolder;
|
||||
|
||||
import java.io.BufferedWriter;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.nio.file.StandardOpenOption;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class FileRolesStoreTests extends ElasticsearchTestCase {
|
||||
|
||||
@Rule
|
||||
public TemporaryFolder tempFolder = new TemporaryFolder();
|
||||
|
||||
@Test
|
||||
public void testParseFile() throws Exception {
|
||||
Path path = Paths.get(getClass().getResource("roles.yml").toURI());
|
||||
Map<String, Permission.Global> roles = FileRolesStore.parseFile(path);
|
||||
assertThat(roles, notNullValue());
|
||||
assertThat(roles.size(), is(3));
|
||||
|
||||
Permission.Global permission = roles.get("role1");
|
||||
assertThat(permission, notNullValue());
|
||||
assertThat(permission.cluster(), notNullValue());
|
||||
assertThat(permission.cluster().privilege(), is(Privilege.Cluster.ALL));
|
||||
assertThat(permission.indices(), notNullValue());
|
||||
assertThat(permission.indices().groups(), notNullValue());
|
||||
assertThat(permission.indices().groups().length, is(2));
|
||||
|
||||
Permission.Global.Indices.Group group = permission.indices().groups()[0];
|
||||
assertThat(group.indices(), notNullValue());
|
||||
assertThat(group.indices().length, is(2));
|
||||
assertThat(group.indices()[0], equalTo("idx1"));
|
||||
assertThat(group.indices()[1], equalTo("idx2"));
|
||||
assertThat(group.privilege(), notNullValue());
|
||||
assertThat(group.privilege(), is(Privilege.Index.READ));
|
||||
|
||||
group = permission.indices().groups()[1];
|
||||
assertThat(group.indices(), notNullValue());
|
||||
assertThat(group.indices().length, is(1));
|
||||
assertThat(group.indices()[0], equalTo("idx3"));
|
||||
assertThat(group.privilege(), notNullValue());
|
||||
assertThat(group.privilege(), is(Privilege.Index.CRUD));
|
||||
|
||||
permission = roles.get("role2");
|
||||
assertThat(permission, notNullValue());
|
||||
assertThat(permission.cluster(), notNullValue());
|
||||
assertThat(permission.cluster().privilege(), is(Privilege.Cluster.ALL)); // MONITOR is collapsed into ALL
|
||||
assertThat(permission.indices(), notNullValue());
|
||||
assertThat(permission.indices(), is(Permission.Global.Indices.NONE));
|
||||
|
||||
permission = roles.get("role3");
|
||||
assertThat(permission, notNullValue());
|
||||
assertThat(permission.cluster(), notNullValue());
|
||||
assertThat(permission.cluster(), is(Permission.Global.Cluster.NONE));
|
||||
assertThat(permission.indices(), notNullValue());
|
||||
assertThat(permission.indices().groups(), notNullValue());
|
||||
assertThat(permission.indices().groups().length, is(1));
|
||||
|
||||
group = permission.indices().groups()[0];
|
||||
assertThat(group.indices(), notNullValue());
|
||||
assertThat(group.indices().length, is(1));
|
||||
assertThat(group.indices()[0], equalTo(".*_.*"));
|
||||
assertThat(group.privilege(), notNullValue());
|
||||
assertThat(group.privilege().isAlias(Privilege.Index.union(Privilege.Index.READ, Privilege.Index.WRITE)), is(true));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAutoReload() throws Exception {
|
||||
ThreadPool threadPool = null;
|
||||
ResourceWatcherService watcherService = null;
|
||||
try {
|
||||
Path users = Paths.get(getClass().getResource("roles.yml").toURI());
|
||||
Path tmp = tempFolder.newFile().toPath();
|
||||
Files.copy(users, Files.newOutputStream(tmp));
|
||||
|
||||
Settings settings = ImmutableSettings.builder()
|
||||
.put("watcher.interval.high", "500ms")
|
||||
.put("shield.authz.store.files.roles", tmp.toAbsolutePath())
|
||||
.build();
|
||||
|
||||
Environment env = new Environment(settings);
|
||||
threadPool = new ThreadPool("test");
|
||||
watcherService = new ResourceWatcherService(settings, threadPool);
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
FileRolesStore store = new FileRolesStore(settings, env, watcherService, new FileRolesStore.Listener() {
|
||||
@Override
|
||||
public void onRefresh() {
|
||||
latch.countDown();
|
||||
}
|
||||
});
|
||||
|
||||
Permission.Global permission = store.permission("role1");
|
||||
assertThat(permission, notNullValue());
|
||||
permission = store.permission("role4");
|
||||
assertThat(permission, nullValue());
|
||||
|
||||
watcherService.start();
|
||||
|
||||
try (BufferedWriter writer = Files.newBufferedWriter(tmp, Charsets.UTF_8, StandardOpenOption.APPEND)) {
|
||||
writer.newLine();
|
||||
writer.newLine();
|
||||
writer.newLine();
|
||||
writer.append("role4:").append(System.lineSeparator());
|
||||
writer.append(" cluster: 'MONITOR'");
|
||||
}
|
||||
|
||||
if (!latch.await(5, TimeUnit.SECONDS)) {
|
||||
fail("Waited too long for the updated file to be picked up");
|
||||
}
|
||||
|
||||
permission = store.permission("role4");
|
||||
assertThat(permission, notNullValue());
|
||||
assertThat(permission.check("cluster:monitor/foo/bar", null, null), is(true));
|
||||
assertThat(permission.check("cluster:admin/foo/bar", null, null), is(false));
|
||||
|
||||
} finally {
|
||||
if (watcherService != null) {
|
||||
watcherService.stop();
|
||||
}
|
||||
if (threadPool != null) {
|
||||
threadPool.shutdownNow();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
|
@ -6,7 +6,10 @@
|
|||
package org.elasticsearch.shield.n2n;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import com.google.common.collect.ImmutableTable;
|
||||
import com.google.common.net.InetAddresses;
|
||||
import org.elasticsearch.common.os.OsUtils;
|
||||
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.InetSocketTransportAddress;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
|
@ -38,16 +41,21 @@ public class IpFilteringIntegrationTests extends ElasticsearchIntegrationTest {
|
|||
|
||||
@Override
|
||||
protected Settings nodeSettings(int nodeOrdinal) {
|
||||
return settingsBuilder()
|
||||
ImmutableSettings.Builder builder = settingsBuilder()
|
||||
.put(super.nodeSettings(nodeOrdinal))
|
||||
.put("discovery.zen.ping.multicast.ping.enabled", false)
|
||||
.put("node.mode", "network")
|
||||
//.put("network.host", "127.0.0.1")
|
||||
// todo http tests fail without an explicit IP (needs investigation)
|
||||
.put("network.host", randomBoolean() ? "127.0.0.1" : "::1")
|
||||
.put("http.type", NettySSLHttpServerTransportModule.class.getName())
|
||||
.put(TransportModule.TRANSPORT_TYPE_KEY, NettySSLTransportModule.class.getName())
|
||||
.put("plugin.types", SecurityPlugin.class.getName())
|
||||
.put("plugin.types", SecurityPlugin.class.getName());
|
||||
//.put("shield.n2n.file", configFile.getPath())
|
||||
.build();
|
||||
|
||||
if (OsUtils.MAC) {
|
||||
builder.put("network.host", randomBoolean() ? "127.0.0.1" : "::1");
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@Test(expected = SocketException.class)
|
||||
|
|
|
@ -14,6 +14,7 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus;
|
|||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.client.transport.TransportClient;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.common.os.OsUtils;
|
||||
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.InetSocketTransportAddress;
|
||||
|
@ -68,7 +69,7 @@ public class SslIntegrationTests extends ElasticsearchIntegrationTest {
|
|||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
return ImmutableSettings.settingsBuilder()
|
||||
ImmutableSettings.Builder builder = ImmutableSettings.settingsBuilder()
|
||||
.put(super.nodeSettings(nodeOrdinal))
|
||||
.put("discovery.zen.ping.multicast.ping.enabled", false)
|
||||
//
|
||||
|
@ -90,8 +91,12 @@ public class SslIntegrationTests extends ElasticsearchIntegrationTest {
|
|||
.put("http.type", NettySSLHttpServerTransportModule.class.getName())
|
||||
.put(TransportModule.TRANSPORT_TYPE_KEY, NettySSLTransportModule.class.getName())
|
||||
.put("plugin.types", SecurityPlugin.class.getName())
|
||||
.put("shield.n2n.file", ipFilterFile.getPath())
|
||||
.build();
|
||||
.put("shield.n2n.file", ipFilterFile.getPath());
|
||||
|
||||
if (OsUtils.MAC) {
|
||||
builder.put("network.host", randomBoolean() ? "127.0.0.1" : "::1");
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -10,6 +10,7 @@ import com.google.common.io.Files;
|
|||
import com.google.common.net.InetAddresses;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.common.os.OsUtils;
|
||||
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.InetSocketTransportAddress;
|
||||
|
@ -73,7 +74,7 @@ public class SslRequireAuthTests extends ElasticsearchIntegrationTest {
|
|||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
return ImmutableSettings.settingsBuilder()
|
||||
ImmutableSettings.Builder builder = ImmutableSettings.settingsBuilder()
|
||||
.put(super.nodeSettings(nodeOrdinal))
|
||||
.put("discovery.zen.ping.multicast.ping.enabled", false)
|
||||
// prevents exception until parsing has been fixed in PR
|
||||
|
@ -96,8 +97,13 @@ public class SslRequireAuthTests extends ElasticsearchIntegrationTest {
|
|||
.put("http.type", NettySSLHttpServerTransportModule.class.getName())
|
||||
.put(TransportModule.TRANSPORT_TYPE_KEY, NettySSLTransportModule.class.getName())
|
||||
.put("plugin.types", SecurityPlugin.class.getName())
|
||||
.put("shield.n2n.file", ipFilterFile.getPath())
|
||||
.build();
|
||||
.put("shield.n2n.file", ipFilterFile.getPath());
|
||||
|
||||
if (OsUtils.MAC) {
|
||||
builder.put("network.host", randomBoolean() ? "127.0.0.1" : "::1");
|
||||
}
|
||||
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -0,0 +1,13 @@
|
|||
role1:
|
||||
cluster: ALL
|
||||
indices:
|
||||
'idx1,idx2': READ
|
||||
idx3: crud
|
||||
|
||||
role2:
|
||||
cluster: ALL, MONITOR
|
||||
|
||||
role3:
|
||||
indices:
|
||||
'.*_.*': READ, WRITE
|
||||
|
Loading…
Reference in New Issue