Add option to deny access to fields (elastic/elasticsearch#2879)

To deny access to a fields users can name exceptions to field permissions with the following syntax:

"fields": {
     "grant": [list of field names patterns],
     "except": [list of patterns that are forbidden]
}

See doc for the rules for this.

This commit also reverts elastic/elasticsearch#2720

closes elastic/elasticsearch#2681



Original commit: elastic/x-pack-elasticsearch@d6537028ec
This commit is contained in:
Britta Weber 2016-09-13 16:38:58 +02:00 committed by GitHub
parent d8fbcfb68b
commit 2c6d0b0cd2
44 changed files with 1458 additions and 389 deletions

View File

@ -76,9 +76,10 @@ can_not_see_hidden_fields:
- write
- create_index
- indices:admin/refresh
fields:
- foo
- bar
field_security:
grant:
- foo
- bar
- names: dest
privileges:
- read

View File

@ -5,9 +5,10 @@ actual_role:
indices:
- names: [ "index1", "index2" ]
privileges: [ "read", "write", "create_index", "indices:admin/refresh" ]
fields:
- foo
- bar
field_security:
grant:
- foo
- bar
query:
bool:
must_not:

View File

@ -12,7 +12,6 @@ import org.elasticsearch.cli.MockTerminal;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.Requests;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.xpack.security.action.role.GetRolesResponse;
@ -46,13 +45,6 @@ public class MigrateToolIT extends MigrateToolTestCase {
assertTrue(pur.created());
}
private static String[] args(String command) {
if (!Strings.hasLength(command)) {
return Strings.EMPTY_ARRAY;
}
return command.split("\\s+");
}
public void testRunMigrateTool() throws Exception {
Settings settings = Settings.builder()
.put("path.home", createTempDir().toAbsolutePath().toString())
@ -100,13 +92,15 @@ public class MigrateToolIT extends MigrateToolTestCase {
for (RoleDescriptor.IndicesPrivileges ip : ips) {
if (Arrays.equals(ip.getIndices(), new String[]{"index1", "index2"})) {
assertArrayEquals(ip.getPrivileges(), new String[]{"read", "write", "create_index", "indices:admin/refresh"});
assertArrayEquals(ip.getFields(), new String[]{"foo", "bar"});
assertTrue(ip.getFieldPermissions().hasFieldLevelSecurity());
assertTrue(ip.getFieldPermissions().grantsAccessTo("bar"));
assertTrue(ip.getFieldPermissions().grantsAccessTo("foo"));
assertNotNull(ip.getQuery());
assertThat(ip.getQuery().utf8ToString(), containsString("{\"bool\":{\"must_not\":{\"match\":{\"hidden\":true}}}}"));
} else {
assertArrayEquals(ip.getIndices(), new String[]{"*"});
assertArrayEquals(ip.getPrivileges(), new String[]{"read"});
assertArrayEquals(ip.getFields(), null);
assertFalse(ip.getFieldPermissions().hasFieldLevelSecurity());
assertNull(ip.getQuery());
}
}

View File

@ -46,7 +46,7 @@ public class BulkRequestInterceptor extends AbstractComponent implements Request
for (String index : indicesRequest.indices()) {
IndicesAccessControl.IndexAccessControl indexAccessControl = indicesAccessControl.getIndexPermissions(index);
if (indexAccessControl != null) {
boolean fls = indexAccessControl.getFields() != null;
boolean fls = indexAccessControl.getFieldPermissions().hasFieldLevelSecurity();
boolean dls = indexAccessControl.getQueries() != null;
if (fls || dls) {
if (indicesRequest instanceof UpdateRequest) {

View File

@ -55,7 +55,7 @@ abstract class FieldAndDocumentLevelSecurityRequestInterceptor<Request> extends
for (String index : indicesRequest.indices()) {
IndicesAccessControl.IndexAccessControl indexAccessControl = indicesAccessControl.getIndexPermissions(index);
if (indexAccessControl != null) {
boolean fieldLevelSecurityEnabled = indexAccessControl.getFields() != null;
boolean fieldLevelSecurityEnabled = indexAccessControl.getFieldPermissions().hasFieldLevelSecurity();
boolean documentLevelSecurityEnabled = indexAccessControl.getQueries() != null;
if (fieldLevelSecurityEnabled || documentLevelSecurityEnabled) {
if (fieldLevelSecurityEnabled || documentLevelSecurityEnabled) {

View File

@ -15,6 +15,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.xpack.security.authz.RoleDescriptor;
import org.elasticsearch.xpack.security.support.MetadataUtils;
import org.elasticsearch.xpack.security.authz.permission.FieldPermissions;
import java.io.IOException;
import java.util.ArrayList;
@ -64,11 +65,12 @@ public class PutRoleRequest extends ActionRequest<PutRoleRequest> implements Wri
this.indicesPrivileges.addAll(Arrays.asList(privileges));
}
public void addIndex(String[] indices, String[] privileges, @Nullable String[] fields, @Nullable BytesReference query) {
public void addIndex(String[] indices, String[] privileges, FieldPermissions fieldPermissions,
@Nullable BytesReference query) {
this.indicesPrivileges.add(RoleDescriptor.IndicesPrivileges.builder()
.indices(indices)
.privileges(privileges)
.fields(fields)
.fieldPermissions(fieldPermissions)
.query(query)
.build());
}

View File

@ -11,6 +11,7 @@ import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.xpack.security.authz.RoleDescriptor;
import org.elasticsearch.xpack.security.authz.permission.FieldPermissions;
import java.util.Map;
@ -29,7 +30,9 @@ public class PutRoleRequestBuilder extends ActionRequestBuilder<PutRoleRequest,
}
public PutRoleRequestBuilder source(String name, BytesReference source) throws Exception {
RoleDescriptor descriptor = RoleDescriptor.parse(name, source);
// we pass false as last parameter because we want to reject the request if field permissions
// are given in 2.x syntax
RoleDescriptor descriptor = RoleDescriptor.parse(name, source, false);
assert name.equals(descriptor.getName());
request.name(name);
request.cluster(descriptor.getClusterPrivileges());
@ -55,8 +58,8 @@ public class PutRoleRequestBuilder extends ActionRequestBuilder<PutRoleRequest,
}
public PutRoleRequestBuilder addIndices(String[] indices, String[] privileges,
@Nullable String[] fields, @Nullable BytesReference query) {
request.addIndex(indices, privileges, fields, query);
FieldPermissions fieldPermissions, @Nullable BytesReference query) {
request.addIndex(indices, privileges, fieldPermissions, query);
return this;
}

View File

@ -21,9 +21,10 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.xpack.security.support.Validation;
import org.elasticsearch.xpack.security.support.MetadataUtils;
import org.elasticsearch.xpack.common.xcontent.XContentUtils;
import org.elasticsearch.xpack.security.authz.permission.FieldPermissions;
import org.elasticsearch.xpack.security.support.MetadataUtils;
import org.elasticsearch.xpack.security.support.Validation;
import java.io.IOException;
import java.util.ArrayList;
@ -159,14 +160,14 @@ public class RoleDescriptor implements ToXContent {
out.writeMap(descriptor.metadata);
}
public static RoleDescriptor parse(String name, BytesReference source) throws IOException {
public static RoleDescriptor parse(String name, BytesReference source, boolean allow2xFormat) throws IOException {
assert name != null;
try (XContentParser parser = XContentHelper.createParser(source)) {
return parse(name, parser);
return parse(name, parser, allow2xFormat);
}
}
public static RoleDescriptor parse(String name, XContentParser parser) throws IOException {
public static RoleDescriptor parse(String name, XContentParser parser, boolean allow2xFormat) throws IOException {
// validate name
Validation.Error validationError = Validation.Roles.validateRoleName(name, true);
if (validationError != null) {
@ -189,7 +190,7 @@ public class RoleDescriptor implements ToXContent {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (ParseFieldMatcher.STRICT.match(currentFieldName, Fields.INDICES)) {
indicesPrivileges = parseIndices(name, parser);
indicesPrivileges = parseIndices(name, parser, allow2xFormat);
} else if (ParseFieldMatcher.STRICT.match(currentFieldName, Fields.RUN_AS)) {
runAsUsers = readStringArray(name, parser, true);
} else if (ParseFieldMatcher.STRICT.match(currentFieldName, Fields.CLUSTER)) {
@ -216,19 +217,21 @@ public class RoleDescriptor implements ToXContent {
}
}
private static RoleDescriptor.IndicesPrivileges[] parseIndices(String roleName, XContentParser parser) throws IOException {
private static RoleDescriptor.IndicesPrivileges[] parseIndices(String roleName, XContentParser parser,
boolean allow2xFormat) throws IOException {
if (parser.currentToken() != XContentParser.Token.START_ARRAY) {
throw new ElasticsearchParseException("failed to parse indices privileges for role [{}]. expected field [{}] value " +
"to be an array, but found [{}] instead", roleName, parser.currentName(), parser.currentToken());
}
List<RoleDescriptor.IndicesPrivileges> privileges = new ArrayList<>();
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
privileges.add(parseIndex(roleName, parser));
privileges.add(parseIndex(roleName, parser, allow2xFormat));
}
return privileges.toArray(new IndicesPrivileges[privileges.size()]);
}
private static RoleDescriptor.IndicesPrivileges parseIndex(String roleName, XContentParser parser) throws IOException {
private static RoleDescriptor.IndicesPrivileges parseIndex(String roleName, XContentParser parser,
boolean allow2xFormat) throws IOException {
XContentParser.Token token = parser.currentToken();
if (token != XContentParser.Token.START_OBJECT) {
throw new ElasticsearchParseException("failed to parse indices privileges for role [{}]. expected field [{}] value to " +
@ -238,7 +241,8 @@ public class RoleDescriptor implements ToXContent {
String[] names = null;
String query = null;
String[] privileges = null;
String[] fields = null;
String[] grantedFields = null;
String[] deniedFields = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
@ -269,10 +273,58 @@ public class RoleDescriptor implements ToXContent {
throw new ElasticsearchParseException("failed to parse indices privileges for role [{}]. expected field [{}] " +
"value to be null, a string, or an object, but found [{}] instead", roleName, currentFieldName, token);
}
} else if (ParseFieldMatcher.STRICT.match(currentFieldName, Fields.FIELD_PERMISSIONS)) {
if (token == XContentParser.Token.START_OBJECT) {
token = parser.nextToken();
do {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
if (ParseFieldMatcher.STRICT.match(currentFieldName, Fields.GRANT_FIELDS)) {
parser.nextToken();
grantedFields = readStringArray(roleName, parser, true);
if (grantedFields == null) {
throw new ElasticsearchParseException("failed to parse indices privileges for role [{}]. {} must not " +
"be null.", roleName, Fields.GRANT_FIELDS);
}
} else if (ParseFieldMatcher.STRICT.match(currentFieldName, Fields.EXCEPT_FIELDS)) {
parser.nextToken();
deniedFields = readStringArray(roleName, parser, true);
if (deniedFields == null) {
throw new ElasticsearchParseException("failed to parse indices privileges for role [{}]. {} must not " +
"be null.", roleName, Fields.EXCEPT_FIELDS);
}
} else {
throw new ElasticsearchParseException("failed to parse indices privileges for role [{}]. " +
"\"{}\" only accepts options {} and {}, but got: {}",
roleName, Fields.FIELD_PERMISSIONS, Fields.GRANT_FIELDS, Fields.EXCEPT_FIELDS
, parser.currentName());
}
} else {
if (token == XContentParser.Token.END_OBJECT) {
throw new ElasticsearchParseException("failed to parse indices privileges for role [{}]. " +
"\"{}\" must not be empty.", roleName, Fields.FIELD_PERMISSIONS);
} else {
throw new ElasticsearchParseException("failed to parse indices privileges for role [{}]. expected {} but " +
"got {}.", roleName, XContentParser.Token.FIELD_NAME,
token);
}
}
} while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT);
} else {
throw new ElasticsearchParseException("failed to parse indices privileges for role [{}]. expected {} or {} but got {}" +
" in \"{}\".", roleName, XContentParser.Token.START_OBJECT,
XContentParser.Token.START_ARRAY, token, Fields.FIELD_PERMISSIONS);
}
} else if (ParseFieldMatcher.STRICT.match(currentFieldName, Fields.PRIVILEGES)) {
privileges = readStringArray(roleName, parser, true);
} else if (ParseFieldMatcher.STRICT.match(currentFieldName, Fields.FIELDS)) {
fields = readStringArray(roleName, parser, true);
} else if (ParseFieldMatcher.STRICT.match(currentFieldName, Fields.FIELD_PERMISSIONS_2X)) {
if (allow2xFormat) {
grantedFields = readStringArray(roleName, parser, true);
} else {
throw new ElasticsearchParseException("[\"fields\": [...]] format has changed for field" +
" permissions in role [{}], use [\"{}\": {\"{}\":[...]," + "\"{}\":[...]}] instead",
roleName, Fields.FIELD_PERMISSIONS, Fields.GRANT_FIELDS, Fields.EXCEPT_FIELDS, roleName);
}
} else {
throw new ElasticsearchParseException("failed to parse indices privileges for role [{}]. unexpected field [{}]",
roleName, currentFieldName);
@ -286,17 +338,21 @@ public class RoleDescriptor implements ToXContent {
throw new ElasticsearchParseException("failed to parse indices privileges for role [{}]. missing required [{}] field",
roleName, Fields.PRIVILEGES.getPreferredName());
}
if (deniedFields != null && grantedFields == null) {
throw new ElasticsearchParseException("failed to parse indices privileges for role [{}]. {} requires {} if {} is given",
roleName, Fields.FIELD_PERMISSIONS, Fields.GRANT_FIELDS, Fields.EXCEPT_FIELDS);
}
return RoleDescriptor.IndicesPrivileges.builder()
.indices(names)
.privileges(privileges)
.fields(fields)
.fieldPermissions(new FieldPermissions(grantedFields, deniedFields))
.query(query)
.build();
}
/**
* A class representing permissions for a group of indices mapped to
* privileges, fields, and a query.
* privileges, field permissions, and a query.
*/
public static class IndicesPrivileges implements ToXContent, Streamable {
@ -304,7 +360,7 @@ public class RoleDescriptor implements ToXContent {
private String[] indices;
private String[] privileges;
private String[] fields;
private FieldPermissions fieldPermissions = new FieldPermissions();
private BytesReference query;
private IndicesPrivileges() {
@ -322,9 +378,8 @@ public class RoleDescriptor implements ToXContent {
return this.privileges;
}
@Nullable
public String[] getFields() {
return this.fields;
public FieldPermissions getFieldPermissions() {
return fieldPermissions;
}
@Nullable
@ -337,9 +392,10 @@ public class RoleDescriptor implements ToXContent {
StringBuilder sb = new StringBuilder("IndicesPrivileges[");
sb.append("indices=[").append(Strings.arrayToCommaDelimitedString(indices));
sb.append("], privileges=[").append(Strings.arrayToCommaDelimitedString(privileges));
sb.append("], fields=[").append(Strings.arrayToCommaDelimitedString(fields));
sb.append("], ");
sb.append(fieldPermissions.toString());
if (query != null) {
sb.append("], query=").append(query.utf8ToString());
sb.append(", query=").append(query.utf8ToString());
}
sb.append("]");
return sb.toString();
@ -354,7 +410,7 @@ public class RoleDescriptor implements ToXContent {
if (!Arrays.equals(indices, that.indices)) return false;
if (!Arrays.equals(privileges, that.privileges)) return false;
if (!Arrays.equals(fields, that.fields)) return false;
if (fieldPermissions.equals(that.fieldPermissions) == false) return false;
return !(query != null ? !query.equals(that.query) : that.query != null);
}
@ -362,7 +418,7 @@ public class RoleDescriptor implements ToXContent {
public int hashCode() {
int result = Arrays.hashCode(indices);
result = 31 * result + Arrays.hashCode(privileges);
result = 31 * result + Arrays.hashCode(fields);
result = 31 * result + fieldPermissions.hashCode();
result = 31 * result + (query != null ? query.hashCode() : 0);
return result;
}
@ -372,9 +428,7 @@ public class RoleDescriptor implements ToXContent {
builder.startObject();
builder.array("names", indices);
builder.array("privileges", privileges);
if (fields != null) {
builder.array("fields", fields);
}
builder = fieldPermissions.toXContent(builder, params);
if (query != null) {
builder.field("query", query.utf8ToString());
}
@ -390,7 +444,7 @@ public class RoleDescriptor implements ToXContent {
@Override
public void readFrom(StreamInput in) throws IOException {
this.indices = in.readStringArray();
this.fields = in.readOptionalStringArray();
this.fieldPermissions = new FieldPermissions(in);
this.privileges = in.readStringArray();
this.query = in.readOptionalBytesReference();
}
@ -398,7 +452,7 @@ public class RoleDescriptor implements ToXContent {
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeStringArray(indices);
out.writeOptionalStringArray(fields);
fieldPermissions.writeTo(out);
out.writeStringArray(privileges);
out.writeOptionalBytesReference(query);
}
@ -420,8 +474,8 @@ public class RoleDescriptor implements ToXContent {
return this;
}
public Builder fields(@Nullable String... fields) {
indicesPrivileges.fields = fields;
public Builder fieldPermissions(FieldPermissions fieldPermissions) {
indicesPrivileges.fieldPermissions = fieldPermissions;
return this;
}
@ -453,7 +507,10 @@ public class RoleDescriptor implements ToXContent {
ParseField NAMES = new ParseField("names");
ParseField QUERY = new ParseField("query");
ParseField PRIVILEGES = new ParseField("privileges");
ParseField FIELDS = new ParseField("fields");
ParseField FIELD_PERMISSIONS = new ParseField("field_security");
ParseField FIELD_PERMISSIONS_2X = new ParseField("fields");
ParseField GRANT_FIELDS = new ParseField("grant");
ParseField EXCEPT_FIELDS = new ParseField("except");
ParseField METADATA = new ParseField("metadata");
}
}

View File

@ -56,33 +56,30 @@ public final class FieldSubsetReader extends FilterLeafReader {
* and so on.
* @param in reader to filter
* @param fieldNames fields to filter.
* @param negate {@code true} if this should be a negative set, meaning set of field names that is denied.
*/
public static DirectoryReader wrap(DirectoryReader in, Set<String> fieldNames, boolean negate) throws IOException {
return new FieldSubsetDirectoryReader(in, fieldNames, negate);
public static DirectoryReader wrap(DirectoryReader in, Set<String> fieldNames) throws IOException {
return new FieldSubsetDirectoryReader(in, fieldNames);
}
// wraps subreaders with fieldsubsetreaders.
static class FieldSubsetDirectoryReader extends FilterDirectoryReader {
private final Set<String> fieldNames;
private final boolean negate;
FieldSubsetDirectoryReader(DirectoryReader in, Set<String> fieldNames, boolean negate) throws IOException {
FieldSubsetDirectoryReader(DirectoryReader in, final Set<String> fieldNames) throws IOException {
super(in, new FilterDirectoryReader.SubReaderWrapper() {
@Override
public LeafReader wrap(LeafReader reader) {
return new FieldSubsetReader(reader, fieldNames, negate);
return new FieldSubsetReader(reader, fieldNames);
}
});
this.fieldNames = fieldNames;
this.negate = negate;
verifyNoOtherFieldSubsetDirectoryReaderIsWrapped(in);
}
@Override
protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException {
return new FieldSubsetDirectoryReader(in, fieldNames, negate);
return new FieldSubsetDirectoryReader(in, fieldNames);
}
public Set<String> getFieldNames() {
@ -114,23 +111,17 @@ public final class FieldSubsetReader extends FilterLeafReader {
/**
* Wrap a single segment, exposing a subset of its fields.
* @param fields set of field names that should be allowed
* @param negate {@code true} if this should be a negative set, meaning set of field names that is denied.
*/
FieldSubsetReader(LeafReader in, Set<String> fields, boolean negate) {
FieldSubsetReader(LeafReader in, Set<String> fieldNames) {
super(in);
// look at what fields the reader has, and preprocess a subset of them that are allowed
ArrayList<FieldInfo> filteredInfos = new ArrayList<>();
for (FieldInfo fi : in.getFieldInfos()) {
if (fields.contains(fi.name) ^ negate) {
if (fieldNames.contains(fi.name)) {
filteredInfos.add(fi);
}
}
fieldInfos = new FieldInfos(filteredInfos.toArray(new FieldInfo[filteredInfos.size()]));
fieldNames = new String[filteredInfos.size()];
for (int i = 0; i < fieldNames.length; i++) {
fieldNames[i] = filteredInfos.get(i).name;
}
this.fieldNames = fieldNames.toArray(new String[fieldNames.size()]);
}
/** returns true if this field is allowed. */

View File

@ -7,6 +7,7 @@ package org.elasticsearch.xpack.security.authz.accesscontrol;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.xpack.security.authz.permission.FieldPermissions;
import java.util.Collections;
import java.util.HashSet;
@ -52,12 +53,12 @@ public class IndicesAccessControl {
public static class IndexAccessControl {
private final boolean granted;
private final Set<String> fields;
private final FieldPermissions fieldPermissions;
private final Set<BytesReference> queries;
public IndexAccessControl(boolean granted, Set<String> fields, Set<BytesReference> queries) {
public IndexAccessControl(boolean granted, FieldPermissions fieldPermissions, Set<BytesReference> queries) {
this.granted = granted;
this.fields = fields;
this.fieldPermissions = fieldPermissions;
this.queries = queries;
}
@ -69,12 +70,10 @@ public class IndicesAccessControl {
}
/**
* @return The allowed fields for this index permissions. If <code>null</code> is returned then
* this means that there are no field level restrictions
* @return The allowed fields for this index permissions.
*/
@Nullable
public Set<String> getFields() {
return fields;
public FieldPermissions getFieldPermissions() {
return fieldPermissions;
}
/**
@ -99,21 +98,8 @@ public class IndicesAccessControl {
return other;
}
// this code is a bit of a pita, but right now we can't just initialize an empty set,
// because an empty Set means no permissions on fields and
// <code>null</code> means no field level security
// Also, if one grants no access to fields and the other grants all access, merging should result in all access...
Set<String> fields = null;
if (this.fields != null && other.getFields() != null) {
fields = new HashSet<>();
if (this.fields != null) {
fields.addAll(this.fields);
}
if (other.getFields() != null) {
fields.addAll(other.getFields());
}
fields = unmodifiableSet(fields);
}
FieldPermissions newPermissions = FieldPermissions.merge(this.fieldPermissions, other.fieldPermissions);
Set<BytesReference> queries = null;
if (this.queries != null && other.getQueries() != null) {
queries = new HashSet<>();
@ -125,9 +111,9 @@ public class IndicesAccessControl {
}
queries = unmodifiableSet(queries);
}
return new IndexAccessControl(granted, fields, queries);
return new IndexAccessControl(granted, newPermissions, queries);
}
}
}
}

View File

@ -67,7 +67,7 @@ public final class OptOutQueryCache extends AbstractIndexComponent implements Qu
}
IndicesAccessControl.IndexAccessControl indexAccessControl = indicesAccessControl.getIndexPermissions(indexName);
if (indexAccessControl != null && indexAccessControl.getFields() != null) {
if (indexAccessControl != null && indexAccessControl.getFieldPermissions().hasFieldLevelSecurity()) {
if (cachingIsSafe(weight, indexAccessControl)) {
logger.trace("not opting out of the query cache. request for index [{}] is safe to cache", indexName);
return indicesQueryCache.doCache(weight, policy);
@ -98,7 +98,7 @@ public final class OptOutQueryCache extends AbstractIndexComponent implements Qu
// we successfully extracted the set of fields: check each one
for (String field : fields) {
// don't cache any internal fields (e.g. _field_names), these are complicated.
if (field.startsWith("_") || permissions.getFields().contains(field) == false) {
if (field.startsWith("_") || permissions.getFieldPermissions().grantsAccessTo(field) == false) {
return false;
}
}

View File

@ -170,15 +170,11 @@ public class SecurityIndexSearcherWrapper extends IndexSearcherWrapper {
reader = DocumentSubsetReader.wrap(reader, bitsetFilterCache, new ConstantScoreQuery(filter.build()));
}
if (permissions.getFields() != null) {
if (permissions.getFieldPermissions().hasFieldLevelSecurity()) {
// now add the allowed fields based on the current granted permissions and :
Set<String> allowedFields = new HashSet<>(allowedMetaFields);
for (String field : permissions.getFields()) {
allowedFields.addAll(mapperService.simpleMatchToIndexNames(field));
}
Set<String> allowedFields = permissions.getFieldPermissions().resolveAllowedFields(allowedMetaFields, mapperService);
resolveParentChildJoinFields(allowedFields);
// TODO: support 'denied' fields (pass true as the 3rd parameter in this case)
reader = FieldSubsetReader.wrap(reader, allowedFields, false);
reader = FieldSubsetReader.wrap(reader, allowedFields);
}
return reader;

View File

@ -0,0 +1,286 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.security.authz.permission;
import dk.brics.automaton.Automaton;
import org.elasticsearch.ElasticsearchSecurityException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.AllFieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.xpack.security.authz.RoleDescriptor;
import org.elasticsearch.xpack.security.support.Automatons;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
/**
* Stores patterns to fields which access is granted or denied to and maintains an automaton that can be used to check if permission is
* allowed for a specific field.
* Field permissions are configured via a list of strings that are patterns a field has to match. Two lists determine whether or not a
* field is granted access to:
* 1. It has to match the patterns in grantedFieldsArray
* 2. it must not match the patterns in deniedFieldsArray
*/
public class FieldPermissions implements Writeable, ToXContent {
// the patterns for fields which we allow access to. if gratedFieldsArray is null we assume that all fields are grated access to
String[] grantedFieldsArray;
// the patterns for fields which we deny access to. if this is an empty list or null we assume that we do not deny access to any
// field explicitly
String[] deniedFieldsArray;
// an automaton that matches all strings that match the patterns in permittedFieldsArray but does not match those that also match a
// pattern in deniedFieldsArray. If permittedFieldsAutomaton is null we assume that all fields are granted access to.
Automaton permittedFieldsAutomaton;
// we cannot easily determine if all fields are allowed and we can therefore also allow access to the _all field hence we deny access
// to _all unless this was explicitly configured.
boolean allFieldIsAllowed = false;
public FieldPermissions(StreamInput in) throws IOException {
this(in.readOptionalStringArray(), in.readOptionalStringArray());
}
public FieldPermissions(@Nullable String[] grantedFieldsArray, @Nullable String[] deniedFieldsArray) {
this.grantedFieldsArray = grantedFieldsArray;
this.deniedFieldsArray = deniedFieldsArray;
permittedFieldsAutomaton = initializePermittedFieldsAutomaton(grantedFieldsArray, deniedFieldsArray);
allFieldIsAllowed = checkAllFieldIsAllowed(grantedFieldsArray, deniedFieldsArray);
}
private static boolean checkAllFieldIsAllowed(String[] grantedFieldsArray, String[] deniedFieldsArray) {
if (deniedFieldsArray != null) {
for (String fieldName : deniedFieldsArray) {
if (fieldName.equals(AllFieldMapper.NAME)) {
return false;
}
}
}
if (grantedFieldsArray != null) {
for (String fieldName : grantedFieldsArray) {
if (fieldName.equals(AllFieldMapper.NAME)) {
return true;
}
}
}
return false;
}
private static Automaton initializePermittedFieldsAutomaton(final String[] grantedFieldsArray,
final String[] deniedFieldsArray) {
Automaton grantedFieldsAutomaton;
if (grantedFieldsArray == null || containsWildcard(grantedFieldsArray)) {
grantedFieldsAutomaton = Automatons.MATCH_ALL;
} else {
grantedFieldsAutomaton = Automatons.patterns(grantedFieldsArray);
}
Automaton deniedFieldsAutomaton;
if (deniedFieldsArray == null || deniedFieldsArray.length == 0) {
deniedFieldsAutomaton = Automatons.EMPTY;
} else {
deniedFieldsAutomaton = Automatons.patterns(deniedFieldsArray);
}
if (deniedFieldsAutomaton.subsetOf(grantedFieldsAutomaton) == false) {
throw new ElasticsearchSecurityException("Exceptions for field permissions must be a subset of the " +
"granted fields but " + Arrays.toString(deniedFieldsArray) + " is not a subset of " +
Arrays.toString(grantedFieldsArray));
}
grantedFieldsAutomaton = grantedFieldsAutomaton.minus(deniedFieldsAutomaton);
return grantedFieldsAutomaton;
}
private static boolean containsWildcard(String[] grantedFieldsArray) {
for (String fieldPattern : grantedFieldsArray) {
if (Regex.isMatchAllPattern(fieldPattern)) {
return true;
}
}
return false;
}
public FieldPermissions() {
this(null, null);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeOptionalStringArray(grantedFieldsArray);
out.writeOptionalStringArray(deniedFieldsArray);
}
@Nullable
String[] getGrantedFieldsArray() {
return grantedFieldsArray;
}
@Nullable
String[] getDeniedFieldsArray() {
return deniedFieldsArray;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
if (grantedFieldsArray != null || deniedFieldsArray != null) {
sb.append(RoleDescriptor.Fields.FIELD_PERMISSIONS).append("=[");
if (grantedFieldsArray == null) {
sb.append(RoleDescriptor.Fields.GRANT_FIELDS).append("=null");
} else {
sb.append(RoleDescriptor.Fields.GRANT_FIELDS).append("=[")
.append(Strings.arrayToCommaDelimitedString(grantedFieldsArray));
sb.append("]");
}
if (deniedFieldsArray == null) {
sb.append(", ").append(RoleDescriptor.Fields.EXCEPT_FIELDS).append("=null");
} else {
sb.append(", ").append(RoleDescriptor.Fields.EXCEPT_FIELDS).append("=[")
.append(Strings.arrayToCommaDelimitedString(deniedFieldsArray));
sb.append("]");
}
sb.append("]");
}
return sb.toString();
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (grantedFieldsArray != null || deniedFieldsArray != null) {
builder.startObject(RoleDescriptor.Fields.FIELD_PERMISSIONS.getPreferredName());
if (grantedFieldsArray != null) {
builder.array(RoleDescriptor.Fields.GRANT_FIELDS.getPreferredName(), grantedFieldsArray);
}
if (deniedFieldsArray != null) {
builder.array(RoleDescriptor.Fields.EXCEPT_FIELDS.getPreferredName(), deniedFieldsArray);
}
builder.endObject();
}
return builder;
}
/**
* Returns true if this field permission policy allows access to the field and false if not.
* fieldName can be a wildcard.
*/
public boolean grantsAccessTo(String fieldName) {
if (permittedFieldsAutomaton.isTotal()) {
return true;
} else {
return permittedFieldsAutomaton.run(fieldName);
}
}
// Also, if one grants no access to fields and the other grants all access, merging should result in all access...
public static FieldPermissions merge(FieldPermissions p1, FieldPermissions p2) {
Automaton mergedPermittedFieldsAutomaton;
// we only allow the union of the two automatons
mergedPermittedFieldsAutomaton = p1.permittedFieldsAutomaton.union(p2.permittedFieldsAutomaton);
// need to minimize otherwise isTotal() might return false even if one of the merged ones returned true before
mergedPermittedFieldsAutomaton.minimize();
// if one of them allows access to _all we allow it for the merged too
boolean allFieldIsAllowedInMerged = p1.allFieldIsAllowed || p2.allFieldIsAllowed;
return new MergedFieldPermissions(mergedPermittedFieldsAutomaton, allFieldIsAllowedInMerged);
}
public boolean hasFieldLevelSecurity() {
return permittedFieldsAutomaton.isTotal() == false;
}
public Set<String> resolveAllowedFields(Set<String> allowedMetaFields, MapperService mapperService) {
HashSet<String> finalAllowedFields = new HashSet<>();
// we always add the allowed meta fields because we must make sure access is not denied accidentally
finalAllowedFields.addAll(allowedMetaFields);
// now check all other fields if we allow them
Collection<String> allFields = mapperService.simpleMatchToIndexNames("*");
for (String fieldName : allFields) {
if (grantsAccessTo(fieldName)) {
finalAllowedFields.add(fieldName);
}
}
if (allFieldIsAllowed == false) {
// we probably added the _all field and now we have to remove it again
finalAllowedFields.remove("_all");
}
return finalAllowedFields;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
FieldPermissions that = (FieldPermissions) o;
if (allFieldIsAllowed != that.allFieldIsAllowed) return false;
// Probably incorrect - comparing Object[] arrays with Arrays.equals
if (!Arrays.equals(grantedFieldsArray, that.grantedFieldsArray)) return false;
// Probably incorrect - comparing Object[] arrays with Arrays.equals
if (!Arrays.equals(deniedFieldsArray, that.deniedFieldsArray)) return false;
return permittedFieldsAutomaton.equals(that.permittedFieldsAutomaton);
}
@Override
public int hashCode() {
int result = Arrays.hashCode(grantedFieldsArray);
result = 31 * result + Arrays.hashCode(deniedFieldsArray);
result = 31 * result + permittedFieldsAutomaton.hashCode();
result = 31 * result + (allFieldIsAllowed ? 1 : 0);
return result;
}
/**
* When we merge field permissions we need to union all the allowed fields. We do this by a union of the automatons
* that define which fields are granted access too. However, that means that after merging we cannot know anymore
* which strings defined the automatons. Hence we make a new class that only has an automaton for the fields that
* we grant access to and that throws an exception whenever we try to access the original patterns that lead to
* the automaton.
*/
public static class MergedFieldPermissions extends FieldPermissions {
public MergedFieldPermissions(Automaton grantedFields, boolean allFieldIsAllowed) {
assert grantedFields != null;
this.permittedFieldsAutomaton = grantedFields;
this.grantedFieldsArray = null;
this.deniedFieldsArray = null;
this.allFieldIsAllowed = allFieldIsAllowed;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
throw new UnsupportedOperationException("Cannot build xcontent for merged field permissions");
}
@Override
public String toString() {
throw new UnsupportedOperationException("Cannot build string for merged field permissions");
}
@Override
public void writeTo(StreamOutput out) throws IOException {
throw new UnsupportedOperationException("Cannot stream for merged field permissions");
}
@Nullable
public String[] getGrantedFieldsArray() {
throw new UnsupportedOperationException("Merged field permissions does not maintain sets");
}
@Nullable
public String[] getDeniedFieldsArray() {
throw new UnsupportedOperationException("Merged field permissions does not maintain sets");
}
}
}

View File

@ -23,6 +23,7 @@ import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.SortedMap;
import java.util.concurrent.ConcurrentHashMap;
@ -107,7 +108,7 @@ public interface IndicesPermission extends Permission, Iterable<IndicesPermissio
// by at least one indices permission group
SortedMap<String, AliasOrIndex> allAliasesAndIndices = metaData.getAliasAndIndexLookup();
Map<String, Set<String>> rolesFieldsByIndex = new HashMap<>();
Map<String, Set<FieldPermissions>> fieldPermissionsByIndex = new HashMap<>();
Map<String, Set<BytesReference>> roleQueriesByIndex = new HashMap<>();
Map<String, Boolean> grantedBuilder = new HashMap<>();
@ -125,14 +126,10 @@ public interface IndicesPermission extends Permission, Iterable<IndicesPermissio
if (group.check(action, indexOrAlias)) {
granted = true;
for (String index : concreteIndices) {
if (group.hasFields()) {
Set<String> roleFields = rolesFieldsByIndex.get(index);
if (roleFields == null) {
roleFields = new HashSet<>();
rolesFieldsByIndex.put(index, roleFields);
}
roleFields.addAll(group.getFields());
if (fieldPermissionsByIndex.get(index) == null) {
fieldPermissionsByIndex.put(index, new HashSet<>());
}
fieldPermissionsByIndex.get(index).add(group.getFieldPermissions());
if (group.hasQuery()) {
Set<BytesReference> roleQueries = roleQueriesByIndex.get(index);
if (roleQueries == null) {
@ -161,15 +158,17 @@ public interface IndicesPermission extends Permission, Iterable<IndicesPermissio
if (roleQueries != null) {
roleQueries = unmodifiableSet(roleQueries);
}
Set<String> roleFields = rolesFieldsByIndex.get(index);
if (roleFields != null) {
if (roleFields.contains("*")) {
roleFields = null;
} else {
roleFields = unmodifiableSet(roleFields);
FieldPermissions fieldPermissions = new FieldPermissions();
Set<FieldPermissions> indexFieldPermissions = fieldPermissionsByIndex.get(index);
if (indexFieldPermissions != null) {
// get the first field permission entry because we do not want the merge to overwrite granted fields with null
fieldPermissions = indexFieldPermissions.iterator().next();
for (FieldPermissions fp : indexFieldPermissions) {
fieldPermissions = FieldPermissions.merge(fieldPermissions, fp);
}
}
indexPermissions.put(index, new IndicesAccessControl.IndexAccessControl(entry.getValue(), roleFields, roleQueries));
indexPermissions.put(index, new IndicesAccessControl.IndexAccessControl(entry.getValue(), fieldPermissions, roleQueries));
}
return unmodifiableMap(indexPermissions);
}
@ -291,16 +290,21 @@ public interface IndicesPermission extends Permission, Iterable<IndicesPermissio
private final Predicate<String> actionMatcher;
private final String[] indices;
private final Predicate<String> indexNameMatcher;
private final List<String> fields;
public FieldPermissions getFieldPermissions() {
return fieldPermissions;
}
private final FieldPermissions fieldPermissions;
private final BytesReference query;
public Group(IndexPrivilege privilege, @Nullable List<String> fields, @Nullable BytesReference query, String... indices) {
public Group(IndexPrivilege privilege, FieldPermissions fieldPermissions, @Nullable BytesReference query, String... indices) {
assert indices.length != 0;
this.privilege = privilege;
this.actionMatcher = privilege.predicate();
this.indices = indices;
this.indexNameMatcher = new AutomatonPredicate(Automatons.patterns(indices));
this.fields = fields;
this.fieldPermissions = Objects.requireNonNull(fieldPermissions);
this.query = query;
}
@ -312,11 +316,6 @@ public interface IndicesPermission extends Permission, Iterable<IndicesPermissio
return indices;
}
@Nullable
public List<String> getFields() {
return fields;
}
@Nullable
public BytesReference getQuery() {
return query;
@ -331,10 +330,6 @@ public interface IndicesPermission extends Permission, Iterable<IndicesPermissio
return actionMatcher.test(action) && indexNameMatcher.test(index);
}
public boolean hasFields() {
return fields != null;
}
public boolean hasQuery() {
return query != null;
}

View File

@ -11,10 +11,8 @@ import org.elasticsearch.xpack.security.authz.privilege.ClusterPrivilege;
import org.elasticsearch.xpack.security.authz.privilege.GeneralPrivilege;
import org.elasticsearch.xpack.security.authz.privilege.IndexPrivilege;
import org.elasticsearch.xpack.security.authz.privilege.Privilege;
import org.elasticsearch.xpack.security.authz.privilege.Privilege.Name;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
@ -93,12 +91,12 @@ public class Role extends GlobalPermission {
}
public Builder add(IndexPrivilege privilege, String... indices) {
groups.add(new IndicesPermission.Group(privilege, null, null, indices));
groups.add(new IndicesPermission.Group(privilege, new FieldPermissions(), null, indices));
return this;
}
public Builder add(List<String> fields, BytesReference query, IndexPrivilege privilege, String... indices) {
groups.add(new IndicesPermission.Group(privilege, fields, query, indices));
public Builder add(FieldPermissions fieldPermissions, BytesReference query, IndexPrivilege privilege, String... indices) {
groups.add(new IndicesPermission.Group(privilege, fieldPermissions, query, indices));
return this;
}
@ -112,7 +110,7 @@ public class Role extends GlobalPermission {
List<IndicesPermission.Group> list = new ArrayList<>(indicesPrivileges.length);
for (RoleDescriptor.IndicesPrivileges privilege : indicesPrivileges) {
list.add(new IndicesPermission.Group(IndexPrivilege.get(new Privilege.Name(privilege.getPrivileges())),
privilege.getFields() == null ? null : Arrays.asList(privilege.getFields()),
privilege.getFieldPermissions(),
privilege.getQuery(),
privilege.getIndices()));

View File

@ -100,7 +100,7 @@ public class FileRolesStore extends AbstractLifecycleComponent implements RolesS
boolean fls = false;
for (Role role : permissions.values()) {
for (Group group : role.indices()) {
fls = fls || group.hasFields();
fls = fls || group.getFieldPermissions().hasFieldLevelSecurity();
dls = dls || group.hasQuery();
}
if (fls && dls) {
@ -205,7 +205,8 @@ public class FileRolesStore extends AbstractLifecycleComponent implements RolesS
String roleName = descriptor.getName();
// first check if FLS/DLS is enabled on the role...
for (RoleDescriptor.IndicesPrivileges privilege : descriptor.getIndicesPrivileges()) {
if ((privilege.getQuery() != null || privilege.getFields() != null)
if ((privilege.getQuery() != null || privilege.getFieldPermissions().hasFieldLevelSecurity())
&& XPackSettings.DLS_FLS_ENABLED.get(settings) == false) {
logger.error("invalid role definition [{}] in roles file [{}]. document and field level security is not " +
"enabled. set [{}] to [true] in the configuration file. skipping role...", roleName, path
@ -220,7 +221,7 @@ public class FileRolesStore extends AbstractLifecycleComponent implements RolesS
}
@Nullable
private static RoleDescriptor parseRoleDescriptor(String segment, Path path, Logger logger,
static RoleDescriptor parseRoleDescriptor(String segment, Path path, Logger logger,
boolean resolvePermissions, Settings settings) {
String roleName = null;
try {
@ -243,7 +244,9 @@ public class FileRolesStore extends AbstractLifecycleComponent implements RolesS
token = parser.nextToken();
if (token == XContentParser.Token.START_OBJECT) {
RoleDescriptor descriptor = RoleDescriptor.parse(roleName, parser);
// we pass true as last parameter because we do not want to reject files if field permissions
// are given in 2.x syntax
RoleDescriptor descriptor = RoleDescriptor.parse(roleName, parser, true);
return descriptor;
} else {
logger.error("invalid role definition [{}] in roles file [{}]. skipping role...", roleName, path.toAbsolutePath());

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.xpack.security.authz.store;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.ElasticsearchException;
@ -69,6 +70,7 @@ import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.QueryBuilders.existsQuery;
import static org.elasticsearch.xpack.security.Security.setting;
import static org.elasticsearch.xpack.security.SecurityTemplateService.securityIndexMappingAndTemplateUpToDate;
@ -207,7 +209,7 @@ public class NativeRolesStore extends AbstractComponent implements RolesStore, C
boolean hasHits = resp.getHits().getHits().length > 0;
if (hasHits) {
for (SearchHit hit : resp.getHits().getHits()) {
RoleDescriptor rd = transformRole(hit.getId(), hit.getSourceRef());
RoleDescriptor rd = transformRole(hit.getId(), hit.getSourceRef(), logger);
if (rd != null) {
roles.add(rd);
}
@ -345,7 +347,7 @@ public class NativeRolesStore extends AbstractComponent implements RolesStore, C
for (RoleAndVersion rv : roleCache.values()) {
Role role = rv.getRole();
for (Group group : role.indices()) {
fls = fls || group.hasFields();
fls = fls || group.getFieldPermissions().hasFieldLevelSecurity();
dls = dls || group.hasQuery();
}
if (fls && dls) {
@ -365,7 +367,11 @@ public class NativeRolesStore extends AbstractComponent implements RolesStore, C
if (fls == false) {
builder.add(client.prepareSearch(SecurityTemplateService.SECURITY_INDEX_NAME)
.setTypes(ROLE_DOC_TYPE)
.setQuery(QueryBuilders.existsQuery("indices.fields"))
.setQuery(QueryBuilders.boolQuery()
.should(existsQuery("indices.field_security.grant"))
.should(existsQuery("indices.field_security.except"))
// for backwardscompat with 2.x
.should(existsQuery("indices.fields")))
.setSize(0)
.setTerminateAfter(1));
}
@ -373,7 +379,7 @@ public class NativeRolesStore extends AbstractComponent implements RolesStore, C
if (dls == false) {
builder.add(client.prepareSearch(SecurityTemplateService.SECURITY_INDEX_NAME)
.setTypes(ROLE_DOC_TYPE)
.setQuery(QueryBuilders.existsQuery("indices.query"))
.setQuery(existsQuery("indices.query"))
.setSize(0)
.setTerminateAfter(1));
}
@ -560,13 +566,15 @@ public class NativeRolesStore extends AbstractComponent implements RolesStore, C
if (response.isExists() == false) {
return null;
}
return transformRole(response.getId(), response.getSourceAsBytesRef());
return transformRole(response.getId(), response.getSourceAsBytesRef(), logger);
}
@Nullable
private RoleDescriptor transformRole(String name, BytesReference sourceBytes) {
static RoleDescriptor transformRole(String name, BytesReference sourceBytes, Logger logger) {
try {
return RoleDescriptor.parse(name, sourceBytes);
// we pass true as last parameter because we do not want to reject permissions if the field permissions
// are given in 2.x syntax
return RoleDescriptor.parse(name, sourceBytes, true);
} catch (Exception e) {
logger.error((Supplier<?>) () -> new ParameterizedMessage("error in the format of data for role [{}]", name), e);
return null;

View File

@ -25,6 +25,7 @@ import static dk.brics.automaton.MinimizationOperations.minimize;
public final class Automatons {
public static final Automaton EMPTY = BasicAutomata.makeEmpty();
public static final Automaton MATCH_ALL = BasicAutomata.makeAnyString();
static final char WILDCARD_STRING = '*'; // String equality with support for wildcards
static final char WILDCARD_CHAR = '?'; // Char equality with support for wildcards

View File

@ -76,8 +76,15 @@
"indices" : {
"type" : "object",
"properties" : {
"fields" : {
"type" : "keyword"
"field_security" : {
"properties" : {
"grant": {
"type": "keyword"
},
"except": {
"type": "keyword"
}
}
},
"names" : {
"type" : "keyword"

View File

@ -8,6 +8,14 @@ package org.elasticsearch;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.xpack.XPackFeatureSet;
import org.elasticsearch.xpack.action.XPackUsageRequestBuilder;
import org.elasticsearch.xpack.action.XPackUsageResponse;
import org.elasticsearch.xpack.security.SecurityFeatureSet;
import org.elasticsearch.xpack.security.action.role.ClearRolesCacheRequestBuilder;
import org.elasticsearch.xpack.security.action.role.ClearRolesCacheResponse;
import org.elasticsearch.xpack.security.action.role.GetRolesResponse;
import org.elasticsearch.xpack.security.action.role.PutRoleResponse;
import org.elasticsearch.xpack.security.action.user.GetUsersResponse;
@ -15,16 +23,22 @@ import org.elasticsearch.xpack.security.action.user.PutUserResponse;
import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore;
import org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken;
import org.elasticsearch.xpack.security.authz.RoleDescriptor;
import org.elasticsearch.xpack.security.authz.permission.FieldPermissions;
import org.elasticsearch.xpack.security.authz.store.NativeRolesStore;
import org.elasticsearch.xpack.security.client.SecurityClient;
import org.elasticsearch.xpack.security.user.User;
import java.util.Collections;
import java.util.List;
import static java.util.Collections.singletonMap;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.elasticsearch.xpack.security.authc.support.UsernamePasswordTokenTests.basicAuthHeaderValue;
import static org.hamcrest.Matchers.anyOf;
import static org.hamcrest.Matchers.arrayWithSize;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
/**
* Backwards compatibility test that loads some data from a pre-5.0 cluster and attempts to do some basic security stuff with it. It
@ -57,12 +71,30 @@ public class OldSecurityIndexBackwardsCompatibilityIT extends AbstractOldXPackIn
}
protected void checkVersion(Version version) throws Exception {
// test that user and roles are there
logger.info("Getting roles...");
// wait for service to start
SecurityClient securityClient = new SecurityClient(client());
assertBusy(() -> {
assertEquals(NativeRolesStore.State.STARTED, internalCluster().getInstance(NativeRolesStore.class).state());
});
// make sure usage stats are still working even with old fls format
ClearRolesCacheResponse clearResponse = new ClearRolesCacheRequestBuilder(client()).get();
assertThat(clearResponse.failures().size(), equalTo(0));
XPackUsageResponse usageResponse = new XPackUsageRequestBuilder(client()).get();
List<XPackFeatureSet.Usage> usagesList = usageResponse.getUsages();
for (XPackFeatureSet.Usage usage : usagesList) {
if (usage instanceof SecurityFeatureSet.Usage) {
XContentBuilder builder = jsonBuilder();
usage.toXContent(builder, ToXContent.EMPTY_PARAMS);
assertThat(builder.string(),
anyOf(containsString("\"roles\":{\"native\":{\"size\":1,\"fls\":true,\"dls\":true}"),
containsString("\"roles\":{\"native\":{\"size\":1,\"dls\":true,\"fls\":true}")));
}
}
// test that user and roles are there
logger.info("Getting roles...");
GetRolesResponse getRolesResponse = securityClient.prepareGetRoles("bwc_test_role").get();
assertThat(getRolesResponse.roles(), arrayWithSize(1));
RoleDescriptor role = getRolesResponse.roles()[0];
@ -71,12 +103,19 @@ public class OldSecurityIndexBackwardsCompatibilityIT extends AbstractOldXPackIn
RoleDescriptor.IndicesPrivileges indicesPrivileges = role.getIndicesPrivileges()[0];
assertThat(indicesPrivileges.getIndices(), arrayWithSize(2));
assertArrayEquals(new String[] { "index1", "index2" }, indicesPrivileges.getIndices());
assertArrayEquals(new String[] { "title", "body" }, indicesPrivileges.getFields());
assertTrue(indicesPrivileges.getFieldPermissions().grantsAccessTo("title"));
assertTrue(indicesPrivileges.getFieldPermissions().grantsAccessTo("body"));
assertArrayEquals(new String[] { "all" }, indicesPrivileges.getPrivileges());
assertEquals("{\"match\": {\"title\": \"foo\"}}", indicesPrivileges.getQuery().utf8ToString());
assertArrayEquals(new String[] { "all" }, role.getClusterPrivileges());
assertArrayEquals(new String[] { "other_user" }, role.getRunAs());
assertEquals("bwc_test_role", role.getName());
// check x-content is rendered in new format although it comes from an old index
XContentBuilder builder = jsonBuilder();
builder.startObject();
indicesPrivileges.getFieldPermissions().toXContent(builder, null);
builder.endObject();
assertThat(builder.string(), equalTo("{\"field_security\":{\"grant\":[\"title\",\"body\"]}}"));
logger.info("Getting users...");
assertBusy(() -> {
@ -110,7 +149,7 @@ public class OldSecurityIndexBackwardsCompatibilityIT extends AbstractOldXPackIn
PutRoleResponse roleResponse = securityClient.preparePutRole("test_role").addIndices(
new String[] { "index3" },
new String[] { "all" },
new String[] { "title", "body" },
new FieldPermissions(new String[]{"title", "body"}, null),
new BytesArray("{\"term\": {\"title\":\"not\"}}")).cluster("all")
.get();
assertTrue(roleResponse.isCreated());

View File

@ -13,6 +13,7 @@ import org.elasticsearch.xpack.security.SecurityTemplateService;
import org.elasticsearch.xpack.security.action.role.DeleteRoleResponse;
import org.elasticsearch.xpack.security.action.role.GetRolesResponse;
import org.elasticsearch.xpack.security.action.role.PutRoleResponse;
import org.elasticsearch.xpack.security.authz.permission.FieldPermissions;
import org.elasticsearch.xpack.security.authz.store.NativeRolesStore;
import org.elasticsearch.xpack.security.client.SecurityClient;
import org.junit.Before;
@ -49,7 +50,7 @@ public class ClearRolesCacheTests extends NativeRealmIntegTestCase {
for (String role : roles) {
c.preparePutRole(role)
.cluster("none")
.addIndices(new String[] { "*" }, new String[] { "ALL" }, null, null)
.addIndices(new String[] { "*" }, new String[] { "ALL" }, new FieldPermissions(), null)
.get();
logger.debug("--> created role [{}]", role);
}
@ -82,7 +83,7 @@ public class ClearRolesCacheTests extends NativeRealmIntegTestCase {
for (String role : toModify) {
PutRoleResponse response = securityClient.preparePutRole(role)
.cluster("none")
.addIndices(new String[] { "*" }, new String[] { "ALL" }, null, null)
.addIndices(new String[] { "*" }, new String[] { "ALL" }, new FieldPermissions(), null)
.runAs(role)
.setRefreshPolicy(randomBoolean() ? IMMEDIATE : NONE)
.get();

View File

@ -65,21 +65,24 @@ public class DocumentAndFieldLevelSecurityTests extends SecurityIntegTestCase {
" indices:\n" +
" - names: '*'\n" +
" privileges: [ ALL ]\n" +
" fields: [ field1 ]\n" +
" field_security:\n" +
" grant: [ field1 ]\n" +
" query: '{\"term\" : {\"field1\" : \"value1\"}}'\n" +
"role3:\n" +
" cluster: [ all ]\n" +
" indices:\n" +
" - names: '*'\n" +
" privileges: [ ALL ]\n" +
" fields: [ field2 ]\n" +
" field_security:\n" +
" grant: [ field2 ]\n" +
" query: '{\"term\" : {\"field2\" : \"value2\"}}'\n" +
"role4:\n" +
" cluster: [ all ]\n" +
" indices:\n" +
" - names: '*'\n" +
" privileges: [ ALL ]\n" +
" fields: [ field1 ]\n" +
" field_security:\n" +
" grant: [ field1 ]\n" +
" query: '{\"term\" : {\"field2\" : \"value2\"}}'\n";
}

View File

@ -76,7 +76,7 @@ public class FieldLevelSecurityRandomTests extends SecurityIntegTestCase {
StringBuilder roleFields = new StringBuilder();
for (String field : allowedFields) {
roleFields.append(" - ").append(field).append('\n');
roleFields.append(" - ").append(field).append('\n');
}
return super.configRoles() +
@ -90,7 +90,8 @@ public class FieldLevelSecurityRandomTests extends SecurityIntegTestCase {
" indices:\n" +
" - names: '*'\n" +
" privileges: [ ALL ]\n" +
" fields:\n" +roleFields.toString() +
" field_security:\n" +
" grant:\n" + roleFields.toString() +
"role3:\n" +
" cluster:\n" +
" - all\n" +
@ -98,22 +99,22 @@ public class FieldLevelSecurityRandomTests extends SecurityIntegTestCase {
" - names: test\n" +
" privileges:\n" +
" - all\n" +
" fields:\n" +
" - field1\n" +
" field_security:\n" +
" grant: [ field1 ]\n" +
"role4:\n" +
" cluster: [ all ]\n" +
" indices:\n" +
" - names: test\n" +
" privileges: [ ALL ]\n" +
" fields:\n" +
" - field2\n" +
" field_security:\n" +
" grant: [ field2 ]\n" +
"role5:\n" +
" cluster: [ all ]\n" +
" indices:\n" +
" - names: test\n" +
" privileges: [ ALL ]\n" +
" fields:\n" +
" - field3\n";
" field_security:\n" +
" grant: [ field3 ]\n";
}
@Override

View File

@ -90,27 +90,29 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase {
" indices:\n" +
" - names: '*'\n" +
" privileges: [ ALL ]\n" +
" fields: [ field1 ]\n" +
" field_security:\n" +
" grant: [ field1 ]\n" +
"role3:\n" +
" cluster: [ all ]\n" +
" indices:\n" +
" - names: '*'\n" +
" privileges: [ ALL ]\n" +
" fields: [ field2, query* ]\n" +
" field_security:\n" +
" grant: [ field2, query* ]\n" +
"role4:\n" +
" cluster: [ all ]\n" +
" indices:\n" +
" - names: '*'\n" +
" privileges: [ ALL ]\n" +
" fields:\n" +
" - field1\n" +
" - field2\n" +
" field_security:\n" +
" grant: [ field1, field2]\n" +
"role5:\n" +
" cluster: [ all ]\n" +
" indices:\n" +
" - names: '*'\n" +
" privileges: [ ALL ]\n" +
" fields: []\n" +
" field_security:\n" +
" grant: [ ]\n" +
"role6:\n" +
" cluster: [ all ]\n" +
" indices:\n" +
@ -121,7 +123,8 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase {
" indices:\n" +
" - names: '*'\n" +
" privileges: [ ALL ]\n" +
" fields: [ 'field*' ]\n";
" field_security:\n" +
" grant: [ 'field*' ]\n";
}
@Override

View File

@ -48,13 +48,16 @@ public class IndicesPermissionsWithAliasesWildcardsAndRegexsTests extends Securi
" indices:\n" +
" - names: 't*'\n" +
" privileges: [ALL]\n" +
" fields: [ field1 ]\n" +
" field_security:\n" +
" grant: [ field1 ]\n" +
" - names: 'my_alias'\n" +
" privileges: [ALL]\n" +
" fields: [field2]\n" +
" field_security:\n" +
" grant: [ field2 ]\n" +
" - names: '/an_.*/'\n" +
" privileges: [ALL]\n" +
" fields: [field3]\n";
" field_security:\n" +
" grant: [ field3 ]\n";
}
@Override

View File

@ -0,0 +1,33 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.security.action.role;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.client.NoOpClient;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Path;
import static org.hamcrest.Matchers.containsString;
public class PutRoleBuilderTests extends ESTestCase {
// test that we reject a role where field permissions are stored in 2.x format (fields:...)
public void testBWCFieldPermissions() throws Exception {
Path path = getDataPath("roles2xformat.json");
byte[] bytes = Files.readAllBytes(path);
String roleString = new String(bytes, Charset.defaultCharset());
try (Client client = new NoOpClient("testBWCFieldPermissions")) {
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> new PutRoleRequestBuilder(client)
.source("role1", new BytesArray(roleString)));
assertThat(e.getDetailedMessage(), containsString("\"fields\": [...]] format has changed for field permissions in role " +
"[role1], use [\"field_security\": {\"grant\":[...],\"except\":[...]}] instead"));
}
}
}

View File

@ -16,6 +16,7 @@ import org.elasticsearch.env.Environment;
import org.elasticsearch.test.NativeRealmIntegTestCase;
import org.elasticsearch.test.SecuritySettingsSource;
import org.elasticsearch.xpack.security.SecurityTemplateService;
import org.elasticsearch.xpack.security.authz.permission.FieldPermissions;
import org.elasticsearch.xpack.security.client.SecurityClient;
import org.elasticsearch.xpack.security.transport.netty3.SecurityNetty3HttpServerTransport;
import org.junit.BeforeClass;
@ -106,7 +107,7 @@ public class ESNativeMigrateToolTests extends NativeRealmIntegTestCase {
.cluster("all", "none")
.runAs("root", "nobody")
.addIndices(new String[]{"index"}, new String[]{"read"},
new String[]{"body", "title"}, new BytesArray("{\"query\": {\"match_all\": {}}}"))
new FieldPermissions(new String[]{"body", "title"}, null), new BytesArray("{\"query\": {\"match_all\": {}}}"))
.get();
addedRoles.add(rname);
}

View File

@ -8,8 +8,9 @@ package org.elasticsearch.xpack.security.authc.esnative;
import org.elasticsearch.cli.Command;
import org.elasticsearch.cli.CommandTestCase;
import org.elasticsearch.common.Strings;
import org.elasticsearch.xpack.security.authc.esnative.ESNativeRealmMigrateTool;
import org.elasticsearch.xpack.security.authz.RoleDescriptor;
import org.elasticsearch.xpack.security.authz.permission.FieldPermissions;
import static org.hamcrest.Matchers.equalTo;
/**
@ -33,7 +34,7 @@ public class ESNativeRealmMigrateToolTests extends CommandTestCase {
RoleDescriptor.IndicesPrivileges ip = RoleDescriptor.IndicesPrivileges.builder()
.indices(new String[]{"i1", "i2", "i3"})
.privileges(new String[]{"all"})
.fields(new String[]{"body"})
.fieldPermissions(new FieldPermissions(new String[]{"body"}, null))
.build();
RoleDescriptor.IndicesPrivileges[] ips = new RoleDescriptor.IndicesPrivileges[1];
ips[0] = ip;
@ -42,8 +43,7 @@ public class ESNativeRealmMigrateToolTests extends CommandTestCase {
RoleDescriptor rd = new RoleDescriptor("rolename", cluster, ips, runAs);
assertThat(ESNativeRealmMigrateTool.MigrateUserOrRoles.createRoleJson(rd),
equalTo("{\"cluster\":[],\"indices\":[{\"names\":[\"i1\",\"i2\",\"i3\"]," +
"\"privileges\":[\"all\"],\"fields\":[\"body\"]}],\"run_as\":[],\"metadata\":{}}"));
"\"privileges\":[\"all\"],\"field_security\":{\"grant\":[\"body\"]}}],\"run_as\":[],\"metadata\":{}}"));
}
}

View File

@ -27,6 +27,7 @@ import org.elasticsearch.xpack.security.action.user.DeleteUserResponse;
import org.elasticsearch.xpack.security.action.user.GetUsersResponse;
import org.elasticsearch.xpack.security.authc.support.SecuredString;
import org.elasticsearch.xpack.security.authz.RoleDescriptor;
import org.elasticsearch.xpack.security.authz.permission.FieldPermissions;
import org.elasticsearch.xpack.security.authz.permission.KibanaRole;
import org.elasticsearch.xpack.security.authz.permission.Role;
import org.elasticsearch.xpack.security.authz.permission.SuperuserRole;
@ -150,7 +151,8 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase {
.cluster("all", "none")
.runAs("root", "nobody")
.addIndices(new String[]{"index"}, new String[]{"read"},
new String[]{"body", "title"}, new BytesArray("{\"query\": {\"match_all\": {}}}"))
new FieldPermissions(new String[]{"body", "title"}, null), new BytesArray("{\"query\": " +
"{\"match_all\": {}}}"))
.metadata(metadata)
.get();
logger.error("--> waiting for .security index");
@ -167,13 +169,15 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase {
.cluster("all", "none")
.runAs("root", "nobody")
.addIndices(new String[]{"index"}, new String[]{"read"},
new String[]{"body", "title"}, new BytesArray("{\"query\": {\"match_all\": {}}}"))
new FieldPermissions(new String[]{"body", "title"}, null), new BytesArray("{\"query\": " +
"{\"match_all\": {}}}"))
.get();
c.preparePutRole("test_role3")
.cluster("all", "none")
.runAs("root", "nobody")
.addIndices(new String[]{"index"}, new String[]{"read"},
new String[]{"body", "title"}, new BytesArray("{\"query\": {\"match_all\": {}}}"))
new FieldPermissions(new String[]{"body", "title"}, null), new BytesArray("{\"query\": " +
"{\"match_all\": {}}}"))
.get();
logger.info("--> retrieving all roles");
@ -200,7 +204,8 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase {
c.preparePutRole("test_role")
.cluster("all")
.addIndices(new String[] { "*" }, new String[] { "read" },
new String[] { "body", "title" }, new BytesArray("{\"match_all\": {}}"))
new FieldPermissions(new String[] { "body", "title" }, null),
new BytesArray("{\"match_all\": {}}"))
.get();
logger.error("--> creating user");
c.preparePutUser("joe", "s3krit".toCharArray(), "test_role").get();
@ -294,7 +299,7 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase {
c.preparePutRole("test_role")
.cluster("all")
.addIndices(new String[]{"*"}, new String[]{"read"},
new String[]{"body", "title"}, new BytesArray("{\"match_all\": {}}"))
new FieldPermissions(new String[]{"body", "title"}, null), new BytesArray("{\"match_all\": {}}"))
.get();
logger.error("--> creating user");
c.preparePutUser("joe", "s3krit".toCharArray(), "test_role").get();
@ -309,7 +314,7 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase {
c.preparePutRole("test_role")
.cluster("none")
.addIndices(new String[]{"*"}, new String[]{"read"},
new String[]{"body", "title"}, new BytesArray("{\"match_all\": {}}"))
new FieldPermissions(new String[]{"body", "title"}, null), new BytesArray("{\"match_all\": {}}"))
.get();
try {
client().filterWithHeader(Collections.singletonMap("Authorization", token)).admin().cluster().prepareHealth().get();
@ -326,7 +331,7 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase {
c.preparePutRole("test_role")
.cluster("none")
.addIndices(new String[]{"*"}, new String[]{"read"},
new String[]{"body", "title"}, new BytesArray("{\"match_all\": {}}"))
new FieldPermissions(new String[]{"body", "title"}, null), new BytesArray("{\"match_all\": {}}"))
.get();
getRolesResponse = c.prepareGetRoles().names("test_role").get();
assertTrue("test_role does not exist!", getRolesResponse.hasRoles());
@ -342,7 +347,7 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase {
c.preparePutRole("test_role")
.cluster("all")
.addIndices(new String[]{"*"}, new String[]{"read"},
new String[]{"body", "title"}, new BytesArray("{\"match_all\": {}}"))
new FieldPermissions(new String[]{"body", "title"}, null), new BytesArray("{\"match_all\": {}}"))
.get();
c.preparePutUser("joe", "s3krit".toCharArray(), "test_role").get();
logger.error("--> waiting for .security index");
@ -366,11 +371,11 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase {
// create some roles
client.preparePutRole("admin_role")
.cluster("all")
.addIndices(new String[]{"*"}, new String[]{"all"}, null, null)
.addIndices(new String[]{"*"}, new String[]{"all"}, new FieldPermissions(), null)
.get();
client.preparePutRole("read_role")
.cluster("none")
.addIndices(new String[]{"*"}, new String[]{"read"}, null, null)
.addIndices(new String[]{"*"}, new String[]{"read"}, new FieldPermissions(), null)
.get();
assertThat(client.prepareGetUsers("joes").get().hasUsers(), is(false));
@ -453,11 +458,12 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase {
} else {
client.preparePutRole("read_role")
.cluster("none")
.addIndices(new String[]{"*"}, new String[]{"read"}, null, null)
.addIndices(new String[]{"*"}, new String[]{"read"}, new FieldPermissions(), null)
.get();
}
IndicesStatsResponse response = client().admin().indices().prepareStats("foo", SecurityTemplateService.SECURITY_INDEX_NAME).get();
assertThat(response.getFailedShards(), is(0));
assertThat(response.getIndices().size(), is(2));
assertThat(response.getIndices().get(SecurityTemplateService.SECURITY_INDEX_NAME), notNullValue());
assertThat(response.getIndices().get(SecurityTemplateService.SECURITY_INDEX_NAME).getIndex(),
@ -570,14 +576,22 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase {
SecurityClient client = new SecurityClient(client());
PutRoleResponse putRoleResponse = client.preparePutRole("admin_role")
.cluster("all")
.addIndices(new String[]{"*"}, new String[]{"all"}, null, null)
.addIndices(new String[]{"*"}, new String[]{"all"}, new FieldPermissions(), null)
.get();
assertThat(putRoleResponse.isCreated(), is(true));
roles++;
if (fls) {
PutRoleResponse roleResponse = client.preparePutRole("admin_role_fls")
PutRoleResponse roleResponse;
String[] fields = new String[]{"foo"};
FieldPermissions fieldPermissions;
if (randomBoolean()) {
fieldPermissions = new FieldPermissions(fields, null);
} else {
fieldPermissions = new FieldPermissions(null, fields);
}
roleResponse = client.preparePutRole("admin_role_fls")
.cluster("all")
.addIndices(new String[]{"*"}, new String[]{"all"}, new String[] { "foo" }, null)
.addIndices(new String[]{"*"}, new String[]{"all"}, fieldPermissions, null)
.get();
assertThat(roleResponse.isCreated(), is(true));
roles++;
@ -586,7 +600,7 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase {
if (dls) {
PutRoleResponse roleResponse = client.preparePutRole("admin_role_dls")
.cluster("all")
.addIndices(new String[]{"*"}, new String[]{"all"}, null, new BytesArray("{ \"match_all\": {} }"))
.addIndices(new String[]{"*"}, new String[]{"all"}, new FieldPermissions(), new BytesArray("{ \"match_all\": {} }"))
.get();
assertThat(roleResponse.isCreated(), is(true));
roles++;

View File

@ -15,6 +15,7 @@ import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.xpack.security.support.MetadataUtils;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.security.authz.permission.FieldPermissions;
import java.util.Map;
@ -38,13 +39,14 @@ public class RoleDescriptorTests extends ESTestCase {
RoleDescriptor.IndicesPrivileges.builder()
.indices("i1", "i2")
.privileges("read")
.fields("body", "title")
.fieldPermissions(new FieldPermissions(new String[]{"body", "title"}, null))
.query("{\"query\": {\"match_all\": {}}}")
.build()
};
RoleDescriptor descriptor = new RoleDescriptor("test", new String[] { "all", "none" }, groups, new String[] { "sudo" });
assertThat(descriptor.toString(), is("Role[name=test, cluster=[all,none], indicesPrivileges=[IndicesPrivileges[indices=[i1,i2], " +
"privileges=[read], fields=[body,title], query={\"query\": {\"match_all\": {}}}],], runAs=[sudo], metadata=[{}]]"));
"privileges=[read], field_security=[grant=[body,title], except=null], query={\"query\": {\"match_all\": {}}}],]" +
", runAs=[sudo], metadata=[{}]]"));
}
public void testToXContent() throws Exception {
@ -52,37 +54,39 @@ public class RoleDescriptorTests extends ESTestCase {
RoleDescriptor.IndicesPrivileges.builder()
.indices("i1", "i2")
.privileges("read")
.fields("body", "title")
.fieldPermissions(new FieldPermissions(new String[]{"body", "title"}, null))
.query("{\"query\": {\"match_all\": {}}}")
.build()
};
Map<String, Object> metadata = randomBoolean() ? MetadataUtils.DEFAULT_RESERVED_METADATA : null;
RoleDescriptor descriptor = new RoleDescriptor("test", new String[] { "all", "none" }, groups, new String[] { "sudo" }, metadata);
XContentBuilder builder = descriptor.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS);
RoleDescriptor parsed = RoleDescriptor.parse("test", builder.bytes());
assertThat(parsed, is(descriptor));
RoleDescriptor parsed = RoleDescriptor.parse("test", builder.bytes(), false);
assertEquals(parsed, descriptor);
}
public void testParse() throws Exception {
String q = "{\"cluster\":[\"a\", \"b\"]}";
RoleDescriptor rd = RoleDescriptor.parse("test", new BytesArray(q));
RoleDescriptor rd = RoleDescriptor.parse("test", new BytesArray(q), false);
assertEquals("test", rd.getName());
assertArrayEquals(new String[] { "a", "b" }, rd.getClusterPrivileges());
assertEquals(0, rd.getIndicesPrivileges().length);
assertArrayEquals(Strings.EMPTY_ARRAY, rd.getRunAs());
q = "{\"cluster\":[\"a\", \"b\"], \"run_as\": [\"m\", \"n\"]}";
rd = RoleDescriptor.parse("test", new BytesArray(q));
rd = RoleDescriptor.parse("test", new BytesArray(q), false);
assertEquals("test", rd.getName());
assertArrayEquals(new String[] { "a", "b" }, rd.getClusterPrivileges());
assertEquals(0, rd.getIndicesPrivileges().length);
assertArrayEquals(new String[] { "m", "n" }, rd.getRunAs());
q = "{\"cluster\":[\"a\", \"b\"], \"run_as\": [\"m\", \"n\"], \"indices\": [{\"names\": \"idx1\", \"privileges\": [\"p1\", " +
"\"p2\"]}, {\"names\": \"idx2\", \"privileges\": [\"p3\"], \"fields\": [\"f1\", \"f2\"]}, {\"names\": \"idx2\", " +
"\"privileges\": [\"p3\"], \"fields\": [\"f1\", \"f2\"], \"query\": \"{\\\"match_all\\\": {}}\"}]}";
rd = RoleDescriptor.parse("test", new BytesArray(q));
"\"p2\"]}, {\"names\": \"idx2\", \"privileges\": [\"p3\"], \"field_security\": " +
"{\"grant\": [\"f1\", \"f2\"]}}, {\"names\": " +
"\"idx2\", " +
"\"privileges\": [\"p3\"], \"field_security\": {\"grant\": [\"f1\", \"f2\"]}, \"query\": \"{\\\"match_all\\\": {}}\"}]}";
rd = RoleDescriptor.parse("test", new BytesArray(q), false);
assertEquals("test", rd.getName());
assertArrayEquals(new String[] { "a", "b" }, rd.getClusterPrivileges());
assertEquals(3, rd.getIndicesPrivileges().length);
@ -90,7 +94,7 @@ public class RoleDescriptorTests extends ESTestCase {
q = "{\"cluster\":[\"a\", \"b\"], \"run_as\": [\"m\", \"n\"], \"indices\": [{\"names\": [\"idx1\",\"idx2\"], \"privileges\": " +
"[\"p1\", \"p2\"]}]}";
rd = RoleDescriptor.parse("test", new BytesArray(q));
rd = RoleDescriptor.parse("test", new BytesArray(q), false);
assertEquals("test", rd.getName());
assertArrayEquals(new String[] { "a", "b" }, rd.getClusterPrivileges());
assertEquals(1, rd.getIndicesPrivileges().length);
@ -99,7 +103,7 @@ public class RoleDescriptorTests extends ESTestCase {
assertNull(rd.getIndicesPrivileges()[0].getQuery());
q = "{\"cluster\":[\"a\", \"b\"], \"metadata\":{\"foo\":\"bar\"}}";
rd = RoleDescriptor.parse("test", new BytesArray(q));
rd = RoleDescriptor.parse("test", new BytesArray(q), false);
assertEquals("test", rd.getName());
assertArrayEquals(new String[] { "a", "b" }, rd.getClusterPrivileges());
assertEquals(0, rd.getIndicesPrivileges().length);
@ -115,7 +119,7 @@ public class RoleDescriptorTests extends ESTestCase {
RoleDescriptor.IndicesPrivileges.builder()
.indices("i1", "i2")
.privileges("read")
.fields("body", "title")
.fieldPermissions(new FieldPermissions(new String[]{"body", "title"}, null))
.query("{\"query\": {\"match_all\": {}}}")
.build()
};
@ -131,7 +135,7 @@ public class RoleDescriptorTests extends ESTestCase {
public void testParseEmptyQuery() throws Exception {
String json = "{\"cluster\":[\"a\", \"b\"], \"run_as\": [\"m\", \"n\"], \"indices\": [{\"names\": [\"idx1\",\"idx2\"], " +
"\"privileges\": [\"p1\", \"p2\"], \"query\": \"\"}]}";
RoleDescriptor rd = RoleDescriptor.parse("test", new BytesArray(json));
RoleDescriptor rd = RoleDescriptor.parse("test", new BytesArray(json), false);
assertEquals("test", rd.getName());
assertArrayEquals(new String[] { "a", "b" }, rd.getClusterPrivileges());
assertEquals(1, rd.getIndicesPrivileges().length);

View File

@ -97,7 +97,7 @@ public class FieldDataCacheWithFieldSubsetReaderTests extends ESTestCase {
assertThat(atomic.getOrdinalsValues().getValueCount(), equalTo(numDocs));
assertThat(indexFieldDataCache.topLevelBuilds, equalTo(1));
DirectoryReader ir = FieldSubsetReader.wrap(this.ir, Collections.<String>emptySet(), false);
DirectoryReader ir = FieldSubsetReader.wrap(this.ir, Collections.<String>emptySet());
global = sortedSetDVOrdinalsIndexFieldData.loadGlobal(ir);
atomic = global.load(ir.leaves().get(0));
assertThat(atomic.getOrdinalsValues().getValueCount(), equalTo(0L));
@ -110,7 +110,7 @@ public class FieldDataCacheWithFieldSubsetReaderTests extends ESTestCase {
assertThat(atomic.getOrdinalsValues().getValueCount(), greaterThanOrEqualTo(1L));
}
DirectoryReader ir = FieldSubsetReader.wrap(this.ir, Collections.<String>emptySet(), false);
DirectoryReader ir = FieldSubsetReader.wrap(this.ir, Collections.<String>emptySet());
for (LeafReaderContext context : ir.leaves()) {
AtomicOrdinalsFieldData atomic = sortedSetDVOrdinalsIndexFieldData.load(context);
assertThat(atomic.getOrdinalsValues().getValueCount(), equalTo(0L));
@ -126,7 +126,7 @@ public class FieldDataCacheWithFieldSubsetReaderTests extends ESTestCase {
assertThat(atomic.getOrdinalsValues().getValueCount(), equalTo(numDocs));
assertThat(indexFieldDataCache.topLevelBuilds, equalTo(1));
DirectoryReader ir = FieldSubsetReader.wrap(this.ir, Collections.<String>emptySet(), false);
DirectoryReader ir = FieldSubsetReader.wrap(this.ir, Collections.<String>emptySet());
global = pagedBytesIndexFieldData.loadGlobal(ir);
atomic = global.load(ir.leaves().get(0));
assertThat(atomic.getOrdinalsValues().getValueCount(), equalTo(0L));
@ -141,7 +141,7 @@ public class FieldDataCacheWithFieldSubsetReaderTests extends ESTestCase {
}
assertThat(indexFieldDataCache.leafLevelBuilds, equalTo(ir.leaves().size()));
DirectoryReader ir = FieldSubsetReader.wrap(this.ir, Collections.<String>emptySet(), false);
DirectoryReader ir = FieldSubsetReader.wrap(this.ir, Collections.<String>emptySet());
for (LeafReaderContext context : ir.leaves()) {
AtomicOrdinalsFieldData atomic = pagedBytesIndexFieldData.load(context);
assertThat(atomic.getOrdinalsValues().getValueCount(), equalTo(0L));

View File

@ -70,7 +70,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
// see only one field
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -102,7 +102,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
// see only one field
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -189,7 +189,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
// see only one field
Document d2 = ir.document(0);
@ -216,7 +216,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
// see only one field
Document d2 = ir.document(0);
@ -243,7 +243,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
// see only one field
Document d2 = ir.document(0);
@ -270,7 +270,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
// see only one field
Document d2 = ir.document(0);
@ -297,7 +297,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
// see only one field
Document d2 = ir.document(0);
@ -324,7 +324,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
// see only one field
Document d2 = ir.document(0);
@ -353,7 +353,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
// see only one field
Fields vectors = ir.getTermVectors(0);
@ -383,7 +383,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
// see only one field
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -410,7 +410,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
// see only one field
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -442,7 +442,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
// see only one field
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -474,7 +474,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
// see only one field
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -506,7 +506,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
// see only one field
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -542,7 +542,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
// see only one field
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -577,7 +577,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
// see only one field
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -610,7 +610,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
Set<String> fields = new HashSet<>();
fields.add("fieldA");
fields.add(SourceFieldMapper.NAME);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
// see only one field
Document d2 = ir.document(0);
@ -641,7 +641,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
Set<String> fields = new HashSet<>();
fields.add("fieldA");
fields.add(FieldNamesFieldMapper.NAME);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
// see only one field
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -690,7 +690,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
fields.add("fieldA");
fields.add("fieldC");
fields.add(FieldNamesFieldMapper.NAME);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
// see only two fields
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -738,7 +738,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
fields.add("fieldA");
fields.add("fieldC");
fields.add(FieldNamesFieldMapper.NAME);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
// see only one field
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -774,7 +774,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
Set<String> fields = new HashSet<>();
fields.add("fieldA");
fields.add(FieldNamesFieldMapper.NAME);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
// see only one field
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -803,7 +803,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
// open reader
Set<String> fields = Collections.singleton("id");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
assertEquals(2, ir.numDocs());
assertEquals(1, ir.leaves().size());
@ -838,7 +838,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
// open reader
Set<String> fields = Collections.singleton("fieldB");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
// sees no fields
assertNull(ir.getTermVectors(0));
@ -858,7 +858,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, false);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
// see no fields
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -887,48 +887,12 @@ public class FieldSubsetReaderTests extends ESTestCase {
IndexWriter iw = new IndexWriter(dir, iwc);
iw.close();
DirectoryReader directoryReader = DirectoryReader.open(dir);
directoryReader = FieldSubsetReader.wrap(directoryReader, Collections.emptySet(), false);
try {
FieldSubsetReader.wrap(directoryReader, Collections.emptySet(), false);
fail("shouldn't be able to wrap FieldSubsetDirectoryReader twice");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("Can't wrap [class org.elasticsearch.xpack.security.authz.accesscontrol" +
".FieldSubsetReader$FieldSubsetDirectoryReader] twice"));
}
final DirectoryReader directoryReader = FieldSubsetReader.wrap(DirectoryReader.open(dir), Collections.emptySet());
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> FieldSubsetReader.wrap(directoryReader,
Collections.emptySet()));
assertThat(e.getMessage(), equalTo("Can't wrap [class org.elasticsearch.xpack.security.authz.accesscontrol" +
".FieldSubsetReader$FieldSubsetDirectoryReader] twice"));
directoryReader.close();
dir.close();
}
/**
* test filtering two string fields, with negated set
*/
public void testNegative() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = new IndexWriterConfig(null);
IndexWriter iw = new IndexWriter(dir, iwc);
// add document with 2 fields
Document doc = new Document();
doc.add(new StringField("fieldA", "test", Field.Store.NO));
doc.add(new StringField("fieldB", "test", Field.Store.NO));
iw.addDocument(doc);
// open reader
Set<String> fields = Collections.singleton("fieldB");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields, true);
// see only one field
LeafReader segmentReader = ir.leaves().get(0).reader();
Set<String> seenFields = new HashSet<>();
for (String field : segmentReader.fields()) {
seenFields.add(field);
}
assertEquals(Collections.singleton("fieldA"), seenFields);
assertNotNull(segmentReader.terms("fieldA"));
assertNull(segmentReader.terms("fieldB"));
TestUtil.checkReader(ir);
IOUtils.close(ir, iw, dir);
}
}

View File

@ -5,15 +5,17 @@
*/
package org.elasticsearch.xpack.security.authz.accesscontrol;
import org.apache.lucene.util.automaton.Automaton;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.security.authz.accesscontrol.IndicesAccessControl.IndexAccessControl;
import org.elasticsearch.xpack.security.authz.permission.FieldPermissions;
import java.util.Collections;
import java.util.Set;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
@ -31,46 +33,52 @@ public class IndicesAccessControlTests extends ESTestCase {
}
public void testMergeFields() {
IndexAccessControl indexAccessControl = new IndexAccessControl(true, Sets.newHashSet("a", "c"), null);
IndexAccessControl other = new IndexAccessControl(true, Sets.newHashSet("b"), null);
IndexAccessControl indexAccessControl = new IndexAccessControl(true, new FieldPermissions(new String[]{"a", "c"}, null), null);
IndexAccessControl other = new IndexAccessControl(true,new FieldPermissions(new String[]{"b"}, null), null);
IndexAccessControl merge1 = indexAccessControl.merge(other);
assertThat(merge1.getFields(), containsInAnyOrder("a", "b", "c"));
assertTrue(merge1.getFieldPermissions().grantsAccessTo("a"));
assertTrue(merge1.getFieldPermissions().grantsAccessTo("b"));
assertTrue(merge1.getFieldPermissions().grantsAccessTo("c"));
assertTrue(merge1.getFieldPermissions().hasFieldLevelSecurity());
assertThat(merge1.isGranted(), is(true));
assertThat(merge1.getQueries(), nullValue());
IndexAccessControl merge2 = other.merge(indexAccessControl);
assertThat(merge2.getFields(), containsInAnyOrder("a", "b", "c"));
assertTrue(merge2.getFieldPermissions().grantsAccessTo("a"));
assertTrue(merge2.getFieldPermissions().grantsAccessTo("b"));
assertTrue(merge2.getFieldPermissions().grantsAccessTo("c"));
assertTrue(merge2.getFieldPermissions().hasFieldLevelSecurity());
assertThat(merge2.isGranted(), is(true));
assertThat(merge2.getQueries(), nullValue());
}
public void testMergeEmptyAndNullFields() {
IndexAccessControl indexAccessControl = new IndexAccessControl(true, Collections.emptySet(), null);
IndexAccessControl other = new IndexAccessControl(true, null, null);
IndexAccessControl indexAccessControl = new IndexAccessControl(true, new FieldPermissions(new String[]{}, null), null);
IndexAccessControl other = new IndexAccessControl(true, new FieldPermissions(), null);
IndexAccessControl merge1 = indexAccessControl.merge(other);
assertThat(merge1.getFields(), nullValue());
assertFalse(merge1.getFieldPermissions().hasFieldLevelSecurity());
assertThat(merge1.isGranted(), is(true));
assertThat(merge1.getQueries(), nullValue());
IndexAccessControl merge2 = other.merge(indexAccessControl);
assertThat(merge2.getFields(), nullValue());
assertFalse(merge1.getFieldPermissions().hasFieldLevelSecurity());
assertThat(merge2.isGranted(), is(true));
assertThat(merge2.getQueries(), nullValue());
}
public void testMergeNullFields() {
IndexAccessControl indexAccessControl = new IndexAccessControl(true, Sets.newHashSet("a", "b"), null);
IndexAccessControl other = new IndexAccessControl(true, null, null);
IndexAccessControl indexAccessControl = new IndexAccessControl(true, new FieldPermissions(new String[]{"a", "b"}, null), null);
IndexAccessControl other = new IndexAccessControl(true, new FieldPermissions(), null);
IndexAccessControl merge1 = indexAccessControl.merge(other);
assertThat(merge1.getFields(), nullValue());
assertFalse(merge1.getFieldPermissions().hasFieldLevelSecurity());
assertThat(merge1.isGranted(), is(true));
assertThat(merge1.getQueries(), nullValue());
IndexAccessControl merge2 = other.merge(indexAccessControl);
assertThat(merge2.getFields(), nullValue());
assertFalse(merge1.getFieldPermissions().hasFieldLevelSecurity());
assertThat(merge2.isGranted(), is(true));
assertThat(merge2.getQueries(), nullValue());
}
@ -78,81 +86,106 @@ public class IndicesAccessControlTests extends ESTestCase {
public void testMergeQueries() {
BytesReference query1 = new BytesArray(new byte[] { 0x1 });
BytesReference query2 = new BytesArray(new byte[] { 0x2 });
IndexAccessControl indexAccessControl = new IndexAccessControl(true, null, Collections.singleton(query1));
IndexAccessControl other = new IndexAccessControl(true, null, Collections.singleton(query2));
IndexAccessControl indexAccessControl = new IndexAccessControl(true, new FieldPermissions(), Collections.singleton
(query1));
IndexAccessControl other = new IndexAccessControl(true, new FieldPermissions(), Collections.singleton(query2));
IndexAccessControl merge1 = indexAccessControl.merge(other);
assertThat(merge1.getFields(), nullValue());
assertFalse(merge1.getFieldPermissions().hasFieldLevelSecurity());
assertThat(merge1.isGranted(), is(true));
assertThat(merge1.getQueries(), containsInAnyOrder(query1, query2));
IndexAccessControl merge2 = other.merge(indexAccessControl);
assertThat(merge2.getFields(), nullValue());
assertFalse(merge1.getFieldPermissions().hasFieldLevelSecurity());
assertThat(merge2.isGranted(), is(true));
assertThat(merge1.getQueries(), containsInAnyOrder(query1, query2));
}
public void testMergeNullQuery() {
BytesReference query1 = new BytesArray(new byte[] { 0x1 });
IndexAccessControl indexAccessControl = new IndexAccessControl(true, null, Collections.singleton(query1));
IndexAccessControl other = new IndexAccessControl(true, null, null);
IndexAccessControl indexAccessControl = new IndexAccessControl(true, new FieldPermissions(), Collections.singleton
(query1));
IndexAccessControl other = new IndexAccessControl(true, new FieldPermissions(), null);
IndexAccessControl merge1 = indexAccessControl.merge(other);
assertThat(merge1.getFields(), nullValue());
assertFalse(merge1.getFieldPermissions().hasFieldLevelSecurity());
assertThat(merge1.isGranted(), is(true));
assertThat(merge1.getQueries(), nullValue());
IndexAccessControl merge2 = other.merge(indexAccessControl);
assertThat(merge2.getFields(), nullValue());
assertFalse(merge1.getFieldPermissions().hasFieldLevelSecurity());
assertThat(merge2.isGranted(), is(true));
assertThat(merge1.getQueries(), nullValue());
}
public void testMergeNotGrantedAndGranted() {
final Set<String> notGrantedFields = randomFrom(Collections.<String>emptySet(), Collections.singleton("baz"), null);
final String[] notGrantedFields = randomFrom(new String[]{}, new String[]{"baz"}, null);
final Set<BytesReference> notGrantedQueries = randomFrom(Collections.<BytesReference>emptySet(), null,
Collections.<BytesReference>singleton(new BytesArray(new byte[] { randomByte() })));
final IndexAccessControl indexAccessControl = new IndexAccessControl(false, notGrantedFields, notGrantedQueries);
final IndexAccessControl indexAccessControl = new IndexAccessControl(false, new FieldPermissions(notGrantedFields, null),
notGrantedQueries);
final BytesReference query1 = new BytesArray(new byte[] { 0x1 });
final Set<String> fields =
randomFrom(Collections.singleton("foo"), Sets.newHashSet("foo", "bar"), Collections.<String>emptySet(), null);
final String[] fields =
randomFrom(new String[]{"foo"}, new String[]{"foo", "bar"}, new String[]{}, null);
final Set<BytesReference> queries =
randomFrom(Collections.singleton(query1), Collections.<BytesReference>emptySet(), null);
final IndexAccessControl other = new IndexAccessControl(true, fields, queries);
final IndexAccessControl other = new IndexAccessControl(true, new FieldPermissions(fields, null), queries);
IndexAccessControl merged = indexAccessControl.merge(other);
assertThat(merged.isGranted(), is(true));
assertThat(merged.getFields(), equalTo(fields));
assertThat(merged.getQueries(), equalTo(queries));
if (fields == null) {
assertFalse(merged.getFieldPermissions().hasFieldLevelSecurity());
} else {
assertTrue(merged.getFieldPermissions().hasFieldLevelSecurity());
if (notGrantedFields != null) {
for (String field : notGrantedFields) {
assertFalse(merged.getFieldPermissions().grantsAccessTo(field));
}
}
for (String field : fields) {
assertTrue(merged.getFieldPermissions().grantsAccessTo(field));
}
}
merged = other.merge(indexAccessControl);
assertThat(merged.isGranted(), is(true));
assertThat(merged.getFields(), equalTo(fields));
assertThat(merged.getQueries(), equalTo(queries));
if (fields == null) {
assertFalse(merged.getFieldPermissions().hasFieldLevelSecurity());
} else {
assertTrue(merged.getFieldPermissions().hasFieldLevelSecurity());
if (notGrantedFields != null) {
for (String field : notGrantedFields) {
assertFalse(merged.getFieldPermissions().grantsAccessTo(field));
}
}
for (String field : fields) {
assertTrue(merged.getFieldPermissions().grantsAccessTo(field));
}
}
}
public void testMergeNotGranted() {
final Set<String> notGrantedFields = randomFrom(Collections.<String>emptySet(), Collections.singleton("baz"), null);
final String[] notGrantedFields = randomFrom(new String[]{}, new String[]{"baz"}, null);
final Set<BytesReference> notGrantedQueries = randomFrom(Collections.<BytesReference>emptySet(), null,
Collections.<BytesReference>singleton(new BytesArray(new byte[] { randomByte() })));
final IndexAccessControl indexAccessControl = new IndexAccessControl(false, notGrantedFields, notGrantedQueries);
final IndexAccessControl indexAccessControl = new IndexAccessControl(false, new FieldPermissions(notGrantedFields, null),
notGrantedQueries);
final BytesReference query1 = new BytesArray(new byte[] { 0x1 });
final Set<String> fields =
randomFrom(Collections.singleton("foo"), Sets.newHashSet("foo", "bar"), Collections.<String>emptySet(), null);
final String[] fields =
randomFrom(new String[]{"foo"}, new String[]{"foo", "bar"}, new String[]{}, null);
final Set<BytesReference> queries =
randomFrom(Collections.singleton(query1), Collections.<BytesReference>emptySet(), null);
final IndexAccessControl other = new IndexAccessControl(false, fields, queries);
final IndexAccessControl other = new IndexAccessControl(false, new FieldPermissions(fields, null), queries);
IndexAccessControl merged = indexAccessControl.merge(other);
assertThat(merged.isGranted(), is(false));
assertThat(merged.getFields(), equalTo(notGrantedFields));
assertThat(merged.getQueries(), equalTo(notGrantedQueries));
merged = other.merge(indexAccessControl);
assertThat(merged.isGranted(), is(false));
assertThat(merged.getFields(), equalTo(fields));
assertThat(merged.getQueries(), equalTo(queries));
}
}

View File

@ -12,16 +12,19 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.security.authz.RoleDescriptor;
import org.elasticsearch.xpack.security.authz.permission.FieldPermissions;
import org.elasticsearch.xpack.security.authz.permission.IndicesPermission;
import org.elasticsearch.xpack.security.authz.permission.Role;
import org.elasticsearch.xpack.security.authz.privilege.IndexPrivilege;
import org.elasticsearch.test.ESTestCase;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.io.IOException;
import java.util.Map;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.notNullValue;
@ -41,49 +44,103 @@ public class IndicesPermissionTests extends ESTestCase {
// basics:
BytesReference query = new BytesArray("{}");
List<String> fields = Arrays.asList("_field");
Role role = Role.builder("_role").add(fields, query, IndexPrivilege.ALL, "_index").build();
String[] fields = new String[]{"_field"};
Role role = Role.builder("_role").add(new FieldPermissions(fields, null), query, IndexPrivilege.ALL, "_index").build();
IndicesAccessControl permissions = role.authorize(SearchAction.NAME, Sets.newHashSet("_index"), md);
assertThat(permissions.getIndexPermissions("_index"), notNullValue());
assertThat(permissions.getIndexPermissions("_index").getFields().size(), equalTo(1));
assertThat(permissions.getIndexPermissions("_index").getFields().iterator().next(), equalTo("_field"));
assertTrue(permissions.getIndexPermissions("_index").getFieldPermissions().grantsAccessTo("_field"));
assertTrue(permissions.getIndexPermissions("_index").getFieldPermissions().hasFieldLevelSecurity());
assertThat(permissions.getIndexPermissions("_index").getQueries().size(), equalTo(1));
assertThat(permissions.getIndexPermissions("_index").getQueries().iterator().next(), equalTo(query));
// no document level security:
role = Role.builder("_role").add(fields, null, IndexPrivilege.ALL, "_index").build();
role = Role.builder("_role").add(new FieldPermissions(fields, null), null, IndexPrivilege.ALL, "_index").build();
permissions = role.authorize(SearchAction.NAME, Sets.newHashSet("_index"), md);
assertThat(permissions.getIndexPermissions("_index"), notNullValue());
assertThat(permissions.getIndexPermissions("_index").getFields().size(), equalTo(1));
assertThat(permissions.getIndexPermissions("_index").getFields().iterator().next(), equalTo("_field"));
assertTrue(permissions.getIndexPermissions("_index").getFieldPermissions().grantsAccessTo("_field"));
assertTrue(permissions.getIndexPermissions("_index").getFieldPermissions().hasFieldLevelSecurity());
assertThat(permissions.getIndexPermissions("_index").getQueries(), nullValue());
// no field level security:
role = Role.builder("_role").add(null, query, IndexPrivilege.ALL, "_index").build();
role = Role.builder("_role").add(new FieldPermissions(), query, IndexPrivilege.ALL, "_index").build();
permissions = role.authorize(SearchAction.NAME, Sets.newHashSet("_index"), md);
assertThat(permissions.getIndexPermissions("_index"), notNullValue());
assertThat(permissions.getIndexPermissions("_index").getFields(), nullValue());
assertFalse(permissions.getIndexPermissions("_index").getFieldPermissions().hasFieldLevelSecurity());
assertThat(permissions.getIndexPermissions("_index").getQueries().size(), equalTo(1));
assertThat(permissions.getIndexPermissions("_index").getQueries().iterator().next(), equalTo(query));
// index group associated with an alias:
role = Role.builder("_role").add(fields, query, IndexPrivilege.ALL, "_alias").build();
role = Role.builder("_role").add(new FieldPermissions(fields, null), query, IndexPrivilege.ALL, "_alias").build();
permissions = role.authorize(SearchAction.NAME, Sets.newHashSet("_alias"), md);
assertThat(permissions.getIndexPermissions("_index"), notNullValue());
assertThat(permissions.getIndexPermissions("_index").getFields().size(), equalTo(1));
assertThat(permissions.getIndexPermissions("_index").getFields().iterator().next(), equalTo("_field"));
assertTrue(permissions.getIndexPermissions("_index").getFieldPermissions().grantsAccessTo("_field"));
assertTrue(permissions.getIndexPermissions("_index").getFieldPermissions().hasFieldLevelSecurity());
assertThat(permissions.getIndexPermissions("_index").getQueries().size(), equalTo(1));
assertThat(permissions.getIndexPermissions("_index").getQueries().iterator().next(), equalTo(query));
// match all fields
List<String> allFields = randomFrom(Collections.singletonList("*"), Arrays.asList("foo", "*"),
Arrays.asList(randomAsciiOfLengthBetween(1, 10), "*"));
role = Role.builder("_role").add(allFields, query, IndexPrivilege.ALL, "_alias").build();
String[] allFields = randomFrom(new String[]{"*"}, new String[]{"foo", "*"},
new String[]{randomAsciiOfLengthBetween(1, 10), "*"});
role = Role.builder("_role").add(new FieldPermissions(allFields, null), query, IndexPrivilege.ALL, "_alias").build();
permissions = role.authorize(SearchAction.NAME, Sets.newHashSet("_alias"), md);
assertThat(permissions.getIndexPermissions("_index"), notNullValue());
assertThat(permissions.getIndexPermissions("_index").getFields(), nullValue());
assertFalse(permissions.getIndexPermissions("_index").getFieldPermissions().hasFieldLevelSecurity());
assertThat(permissions.getIndexPermissions("_index").getQueries().size(), equalTo(1));
assertThat(permissions.getIndexPermissions("_index").getQueries().iterator().next(), equalTo(query));
}
public void testIndicesPriviledgesStreaming() throws IOException {
BytesStreamOutput out = new BytesStreamOutput();
String[] allowed = new String[]{randomAsciiOfLength(5) + "*", randomAsciiOfLength(5) + "*", randomAsciiOfLength(5) + "*"};
String[] denied = new String[]{allowed[0] + randomAsciiOfLength(5), allowed[1] + randomAsciiOfLength(5),
allowed[2] + randomAsciiOfLength(5)};
FieldPermissions fieldPermissions = new FieldPermissions(allowed, denied);
RoleDescriptor.IndicesPrivileges.Builder indicesPrivileges = RoleDescriptor.IndicesPrivileges.builder();
indicesPrivileges.fieldPermissions(fieldPermissions);
indicesPrivileges.query("{match_all:{}}");
indicesPrivileges.indices(randomAsciiOfLength(5), randomAsciiOfLength(5), randomAsciiOfLength(5));
indicesPrivileges.privileges("all", "read", "priv");
indicesPrivileges.build().writeTo(out);
out.close();
StreamInput in = out.bytes().streamInput();
RoleDescriptor.IndicesPrivileges readIndicesPriviledges = RoleDescriptor.IndicesPrivileges.createFrom(in);
assertEquals(readIndicesPriviledges, indicesPrivileges.build());
}
// tests that field permissions are merged correctly when we authorize with several groups and don't crash when an index has no group
public void testCorePermissionAuthorize() {
final Settings indexSettings = Settings.builder().put("index.version.created", Version.CURRENT).build();
final MetaData metaData = new MetaData.Builder()
.put(new IndexMetaData.Builder("a1").settings(indexSettings).numberOfShards(1).numberOfReplicas(0).build(), true)
.put(new IndexMetaData.Builder("a2").settings(indexSettings).numberOfShards(1).numberOfReplicas(0).build(), true)
.build();
IndicesPermission.Group group1 = new IndicesPermission.Group(IndexPrivilege.ALL, new FieldPermissions(), null, "a1");
IndicesPermission.Group group2 = new IndicesPermission.Group(IndexPrivilege.ALL, new FieldPermissions(null, new
String[]{"denied_field"}), null, "a1");
IndicesPermission.Core core = new IndicesPermission.Core(group1, group2);
Map<String, IndicesAccessControl.IndexAccessControl> authzMap =
core.authorize(SearchAction.NAME, Sets.newHashSet("a1", "ba"), metaData);
assertTrue(authzMap.get("a1").getFieldPermissions().grantsAccessTo("denied_field"));
assertTrue(authzMap.get("a1").getFieldPermissions().grantsAccessTo(randomAsciiOfLength(5)));
// did not define anything for ba so we allow all
assertFalse(authzMap.get("ba").getFieldPermissions().hasFieldLevelSecurity());
// test with two indices
group1 = new IndicesPermission.Group(IndexPrivilege.ALL, new FieldPermissions(), null, "a1");
group2 = new IndicesPermission.Group(IndexPrivilege.ALL, new FieldPermissions(null, new
String[]{"denied_field"}), null, "a1");
IndicesPermission.Group group3 = new IndicesPermission.Group(IndexPrivilege.ALL, new FieldPermissions(new String[]{"*_field"}
, new String[]{"denied_field"}), null, "a2");
IndicesPermission.Group group4 = new IndicesPermission.Group(IndexPrivilege.ALL, new FieldPermissions(new String[]{"*_field2"}
, new String[]{"denied_field2"}), null, "a2");
core = new IndicesPermission.Core(group1, group2, group3, group4);
authzMap = core.authorize(SearchAction.NAME, Sets.newHashSet("a1", "a2"), metaData);
assertFalse(authzMap.get("a1").getFieldPermissions().hasFieldLevelSecurity());
assertFalse(authzMap.get("a2").getFieldPermissions().grantsAccessTo("denied_field2"));
assertFalse(authzMap.get("a2").getFieldPermissions().grantsAccessTo("denied_field"));
assertTrue(authzMap.get("a2").getFieldPermissions().grantsAccessTo(randomAsciiOfLength(5) + "_field"));
assertTrue(authzMap.get("a2").getFieldPermissions().grantsAccessTo(randomAsciiOfLength(5) + "_field2"));
assertTrue(authzMap.get("a2").getFieldPermissions().hasFieldLevelSecurity());
}
}

View File

@ -0,0 +1,107 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.security.authz.accesscontrol;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.Weight;
import org.apache.lucene.store.Directory;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.security.authz.permission.FieldPermissions;
import org.junit.After;
import org.junit.Before;
import java.io.IOException;
import java.util.HashSet;
/** Simple tests for opt out query cache*/
public class OptOutQueryCacheTests extends ESTestCase {
IndexSearcher searcher;
Directory dir;
RandomIndexWriter w;
DirectoryReader reader;
@Before
void initLuceneStuff() throws IOException {
dir = newDirectory();
w = new RandomIndexWriter(random(), dir);
reader = w.getReader();
searcher = newSearcher(reader);
}
@After
void closeLuceneStuff() throws IOException {
w.close();
dir.close();
reader.close();
}
public void testOptOutQueryCacheSafetyCheck() throws IOException {
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.add(new TermQuery(new Term("foo", "bar")), BooleanClause.Occur.MUST);
builder.add(new TermQuery(new Term("no", "baz")), BooleanClause.Occur.MUST_NOT);
Weight weight = builder.build().createWeight(searcher, false);
// whenever the allowed fields match the fields in the query and we do not deny access to any fields we allow caching.
IndicesAccessControl.IndexAccessControl permissions = new IndicesAccessControl.IndexAccessControl(true,
new FieldPermissions(new String[]{"foo", "no"}, null), new HashSet<>());
assertTrue(OptOutQueryCache.cachingIsSafe(weight, permissions));
permissions = new IndicesAccessControl.IndexAccessControl(true,
new FieldPermissions(new String[]{"foo", "no"}, new String[]{}), new HashSet<>());
assertTrue(OptOutQueryCache.cachingIsSafe(weight, permissions));
permissions = new IndicesAccessControl.IndexAccessControl(true,
new FieldPermissions(new String[]{"*"}, new String[]{}), new HashSet<>());
assertTrue(OptOutQueryCache.cachingIsSafe(weight, permissions));
permissions = new IndicesAccessControl.IndexAccessControl(true,
new FieldPermissions(new String[]{"*"}, null), new HashSet<>());
assertTrue(OptOutQueryCache.cachingIsSafe(weight, permissions));
permissions = new IndicesAccessControl.IndexAccessControl(true,
new FieldPermissions(new String[]{"*"}, new String[]{"oof"}), new HashSet<>());
assertTrue(OptOutQueryCache.cachingIsSafe(weight, permissions));
permissions = new IndicesAccessControl.IndexAccessControl(true,
new FieldPermissions(new String[]{"f*", "n*"}, new String[]{}), new HashSet<>());
assertTrue(OptOutQueryCache.cachingIsSafe(weight, permissions));
// check we don't cache if a field is not allowed
permissions = new IndicesAccessControl.IndexAccessControl(true,
new FieldPermissions(new String[]{"foo"}, null), new HashSet<>());
assertFalse(OptOutQueryCache.cachingIsSafe(weight, permissions));
permissions = new IndicesAccessControl.IndexAccessControl(true,
new FieldPermissions(new String[]{"a*"}, new String[]{"aa"}), new HashSet<>());
assertFalse(OptOutQueryCache.cachingIsSafe(weight, permissions));
permissions = new IndicesAccessControl.IndexAccessControl(true,
new FieldPermissions(null, new String[]{"no"}), new HashSet<>());
assertFalse(OptOutQueryCache.cachingIsSafe(weight, permissions));
permissions = new IndicesAccessControl.IndexAccessControl(true,
new FieldPermissions(null, new String[]{"*"}), new HashSet<>());
assertFalse(OptOutQueryCache.cachingIsSafe(weight, permissions));
permissions = new IndicesAccessControl.IndexAccessControl(true,
new FieldPermissions(new String[]{"foo", "no"}, new String[]{"no"}), new HashSet<>());
assertFalse(OptOutQueryCache.cachingIsSafe(weight, permissions));
permissions = new IndicesAccessControl.IndexAccessControl(true,
new FieldPermissions(new String[]{}, new String[]{}), new HashSet<>());
assertFalse(OptOutQueryCache.cachingIsSafe(weight, permissions));
permissions = new IndicesAccessControl.IndexAccessControl(true,
new FieldPermissions(new String[]{}, null), new HashSet<>());
assertFalse(OptOutQueryCache.cachingIsSafe(weight, permissions));
}
}

View File

@ -42,7 +42,7 @@ import org.elasticsearch.license.XPackLicenseState;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.IndexSettingsModule;
import org.mockito.internal.matchers.Any;
import org.elasticsearch.xpack.security.authz.permission.FieldPermissions;
import java.util.Collections;
import java.util.Optional;
@ -68,7 +68,8 @@ public class SecurityIndexSearcherWrapperIntegrationTests extends ESTestCase {
.then(invocationOnMock -> Collections.singletonList((String) invocationOnMock.getArguments()[0]));
ThreadContext threadContext = new ThreadContext(Settings.EMPTY);
IndicesAccessControl.IndexAccessControl indexAccessControl = new IndicesAccessControl.IndexAccessControl(true, null,
IndicesAccessControl.IndexAccessControl indexAccessControl = new IndicesAccessControl.IndexAccessControl(true, new
FieldPermissions(),
singleton(new BytesArray("{\"match_all\" : {}}")));
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(shardId.getIndex(), Settings.EMPTY);
IndicesQueriesRegistry indicesQueriesRegistry = mock(IndicesQueriesRegistry.class);

View File

@ -8,8 +8,8 @@ package org.elasticsearch.xpack.security.authz.accesscontrol;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
@ -78,6 +78,10 @@ import org.elasticsearch.license.XPackLicenseState;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.IndexSettingsModule;
import org.elasticsearch.xpack.security.user.User;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.IndexSettingsModule;
import org.elasticsearch.xpack.security.authz.accesscontrol.DocumentSubsetReader.DocumentSubsetDirectoryReader;
import org.elasticsearch.xpack.security.authz.permission.FieldPermissions;
import org.junit.After;
import org.junit.Before;
import org.mockito.ArgumentCaptor;
@ -86,16 +90,16 @@ import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.IdentityHashMap;
import java.util.Map;
import java.util.Set;
import static java.util.Collections.emptyList;
import static java.util.Collections.emptySet;
import static java.util.Collections.singleton;
import static java.util.Collections.singletonMap;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.xpack.security.authz.accesscontrol.SecurityIndexSearcherWrapper.intersectScorerAndRoleBits;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
@ -161,7 +165,7 @@ public class SecurityIndexSearcherWrapperUnitTests extends ESTestCase {
@Override
protected IndicesAccessControl getIndicesAccessControl() {
IndicesAccessControl.IndexAccessControl indexAccessControl = new IndicesAccessControl.IndexAccessControl(true,
emptySet(), null);
new FieldPermissions(new String[]{}, null), null);
return new IndicesAccessControl(true, singletonMap("_index", indexAccessControl));
}
};
@ -203,12 +207,12 @@ public class SecurityIndexSearcherWrapperUnitTests extends ESTestCase {
public void testWildcards() throws Exception {
XContentBuilder mappingSource = jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("field1_a").field("type", "text").endObject()
.startObject("field1_b").field("type", "text").endObject()
.startObject("field1_c").field("type", "text").endObject()
.startObject("field2_a").field("type", "text").endObject()
.startObject("field2_b").field("type", "text").endObject()
.startObject("field2_c").field("type", "text").endObject()
.startObject("field1_a").field("type", "text").endObject()
.startObject("field1_b").field("type", "text").endObject()
.startObject("field1_c").field("type", "text").endObject()
.startObject("field2_a").field("type", "text").endObject()
.startObject("field2_b").field("type", "text").endObject()
.startObject("field2_c").field("type", "text").endObject()
.endObject().endObject().endObject();
mapperService.merge("type", new CompressedXContent(mappingSource.string()), MapperService.MergeReason.MAPPING_UPDATE, false);
@ -219,25 +223,25 @@ public class SecurityIndexSearcherWrapperUnitTests extends ESTestCase {
public void testDotNotion() throws Exception {
XContentBuilder mappingSource = jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("foo")
.field("type", "object")
.startObject("properties")
.startObject("bar").field("type", "text").endObject()
.startObject("baz").field("type", "text").endObject()
.endObject()
.field("type", "object")
.startObject("properties")
.startObject("bar").field("type", "text").endObject()
.startObject("baz").field("type", "text").endObject()
.endObject()
.endObject()
.startObject("bar")
.field("type", "object")
.startObject("properties")
.startObject("foo").field("type", "text").endObject()
.startObject("baz").field("type", "text").endObject()
.endObject()
.field("type", "object")
.startObject("properties")
.startObject("foo").field("type", "text").endObject()
.startObject("baz").field("type", "text").endObject()
.endObject()
.endObject()
.startObject("baz")
.field("type", "object")
.startObject("properties")
.startObject("bar").field("type", "text").endObject()
.startObject("foo").field("type", "text").endObject()
.endObject()
.field("type", "object")
.startObject("properties")
.startObject("bar").field("type", "text").endObject()
.startObject("foo").field("type", "text").endObject()
.endObject()
.endObject()
.endObject().endObject().endObject();
mapperService.merge("type", new CompressedXContent(mappingSource.string()), MapperService.MergeReason.MAPPING_UPDATE, false);
@ -251,25 +255,25 @@ public class SecurityIndexSearcherWrapperUnitTests extends ESTestCase {
public void testParentChild() throws Exception {
XContentBuilder mappingSource = jsonBuilder().startObject().startObject("parent1")
.startObject("properties")
.startObject("field").field("type", "text").endObject()
.startObject("field").field("type", "text").endObject()
.endObject()
.endObject().endObject();
mapperService.merge("parent1", new CompressedXContent(mappingSource.string()), MapperService.MergeReason.MAPPING_UPDATE, false);
mappingSource = jsonBuilder().startObject().startObject("child1")
.startObject("properties")
.startObject("field").field("type", "text").endObject()
.startObject("field").field("type", "text").endObject()
.endObject()
.startObject("_parent")
.field("type", "parent1")
.field("type", "parent1")
.endObject()
.endObject().endObject();
mapperService.merge("child1", new CompressedXContent(mappingSource.string()), MapperService.MergeReason.MAPPING_UPDATE, false);
mappingSource = jsonBuilder().startObject().startObject("child2")
.startObject("properties")
.startObject("field").field("type", "text").endObject()
.startObject("field").field("type", "text").endObject()
.endObject()
.startObject("_parent")
.field("type", "parent1")
.field("type", "parent1")
.endObject()
.endObject().endObject();
mapperService.merge("child2", new CompressedXContent(mappingSource.string()), MapperService.MergeReason.MAPPING_UPDATE, false);
@ -281,15 +285,15 @@ public class SecurityIndexSearcherWrapperUnitTests extends ESTestCase {
mapperService.merge("parent2", new CompressedXContent(mappingSource.string()), MapperService.MergeReason.MAPPING_UPDATE, false);
mappingSource = jsonBuilder().startObject().startObject("child3")
.startObject("properties")
.startObject("field").field("type", "text").endObject()
.startObject("field").field("type", "text").endObject()
.endObject()
.startObject("_parent")
.field("type", "parent2")
.field("type", "parent2")
.endObject()
.endObject().endObject();
mapperService.merge("child3", new CompressedXContent(mappingSource.string()), MapperService.MergeReason.MAPPING_UPDATE, false);
assertResolvedFields("field1", "field1", ParentFieldMapper.joinField("parent1"), ParentFieldMapper.joinField("parent2"));
assertResolvedFields("field", "field", ParentFieldMapper.joinField("parent1"), ParentFieldMapper.joinField("parent2"));
}
public void testDelegateSimilarity() throws Exception {
@ -393,10 +397,142 @@ public class SecurityIndexSearcherWrapperUnitTests extends ESTestCase {
directory.close();
}
public void testFieldPermissionsWithFieldExceptions() throws Exception {
XContentBuilder mappingSource = jsonBuilder().startObject().startObject("some_type")
.startObject("properties")
.startObject("field1").field("type", "text").endObject()
.startObject("field2").field("type", "text").endObject()
.startObject("xfield3").field("type", "text").endObject()
.endObject()
.endObject().endObject();
mapperService.merge("some_type", new CompressedXContent(mappingSource.string()), MapperService.MergeReason.MAPPING_UPDATE, false);
securityIndexSearcherWrapper =
new SecurityIndexSearcherWrapper(indexSettings, null, mapperService, null, threadContext, licenseState, null);
Set<String> allowedMetaFields = securityIndexSearcherWrapper.getAllowedMetaFields();
String[] grantedFields = new String[]{};
String[] deniedFields;
// Presence of fields in a role with an empty array implies access to no fields except the meta fields
Set<String> resolvedAllowedFields = new FieldPermissions(grantedFields, randomBoolean() ? null : new String[]{})
.resolveAllowedFields(allowedMetaFields, mapperService);
Set<String> expectedResultSet = new HashSet<>(allowedMetaFields);
assertThat(resolvedAllowedFields.size(), equalTo(expectedResultSet.size()));
assertThat(resolvedAllowedFields, containsInAnyOrder(expectedResultSet.toArray()));
// make sure meta fields cannot be denied access to
deniedFields = allowedMetaFields.toArray(new String[allowedMetaFields.size()]);
resolvedAllowedFields = new FieldPermissions(null, deniedFields)
.resolveAllowedFields(allowedMetaFields, mapperService);
expectedResultSet = new HashSet<>(allowedMetaFields);
expectedResultSet.addAll(Arrays.asList("field1", "field2", "xfield3"));
assertThat(resolvedAllowedFields.size(), equalTo(expectedResultSet.size()));
assertThat(resolvedAllowedFields, containsInAnyOrder(expectedResultSet.toArray()));
// check we can add all fields with *
grantedFields = new String[]{"*"};
resolvedAllowedFields = new FieldPermissions(grantedFields, randomBoolean() ? null : new String[]{})
.resolveAllowedFields(allowedMetaFields, mapperService);
expectedResultSet = new HashSet<>(allowedMetaFields);
expectedResultSet.addAll(Arrays.asList("field1", "field2", "xfield3"));
assertThat(resolvedAllowedFields.size(), equalTo(expectedResultSet.size()));
assertThat(resolvedAllowedFields, containsInAnyOrder(expectedResultSet.toArray()));
// same with null
resolvedAllowedFields = new FieldPermissions(grantedFields, randomBoolean() ? null : new String[]{})
.resolveAllowedFields(allowedMetaFields, mapperService);
expectedResultSet = new HashSet<>(allowedMetaFields);
expectedResultSet.addAll(Arrays.asList("field1", "field2", "xfield3"));
assertThat(resolvedAllowedFields.size(), equalTo(expectedResultSet.size()));
assertThat(resolvedAllowedFields, containsInAnyOrder(expectedResultSet.toArray()));
// check we remove only excluded fields
grantedFields = new String[]{"*"};
deniedFields = new String[]{"xfield3"};
resolvedAllowedFields = new FieldPermissions(grantedFields, deniedFields)
.resolveAllowedFields(allowedMetaFields, mapperService);
expectedResultSet = new HashSet<>(allowedMetaFields);
expectedResultSet.addAll(Arrays.asList("field1", "field2"));
assertThat(resolvedAllowedFields.size(), equalTo(expectedResultSet.size()));
assertThat(resolvedAllowedFields, containsInAnyOrder(expectedResultSet.toArray()));
// same with null
deniedFields = new String[]{"field1"};
resolvedAllowedFields = new FieldPermissions(null, deniedFields)
.resolveAllowedFields(allowedMetaFields, mapperService);
expectedResultSet = new HashSet<>(allowedMetaFields);
expectedResultSet.addAll(Arrays.asList("field2", "xfield3"));
assertThat(resolvedAllowedFields.size(), equalTo(expectedResultSet.size()));
assertThat(resolvedAllowedFields, containsInAnyOrder(expectedResultSet.toArray()));
// some other checks
grantedFields = new String[]{"field*"};
deniedFields = new String[]{"field1", "field2"};
resolvedAllowedFields = new FieldPermissions(grantedFields, deniedFields)
.resolveAllowedFields(allowedMetaFields, mapperService);
expectedResultSet = new HashSet<>(allowedMetaFields);
assertThat(resolvedAllowedFields.size(), equalTo(expectedResultSet.size()));
assertThat(resolvedAllowedFields, containsInAnyOrder(expectedResultSet.toArray()));
grantedFields = new String[]{"field1", "field2"};
deniedFields = new String[]{"field2"};
resolvedAllowedFields = new FieldPermissions(grantedFields, deniedFields)
.resolveAllowedFields(allowedMetaFields, mapperService);
expectedResultSet = new HashSet<>(allowedMetaFields);
expectedResultSet.addAll(Arrays.asList("field1"));
assertThat(resolvedAllowedFields.size(), equalTo(expectedResultSet.size()));
assertThat(resolvedAllowedFields, containsInAnyOrder(expectedResultSet.toArray()));
grantedFields = new String[]{"field*"};
deniedFields = new String[]{"field2"};
resolvedAllowedFields = new FieldPermissions(grantedFields, deniedFields)
.resolveAllowedFields(allowedMetaFields, mapperService);
expectedResultSet = new HashSet<>(allowedMetaFields);
expectedResultSet.addAll(Arrays.asList("field1"));
assertThat(resolvedAllowedFields.size(), equalTo(expectedResultSet.size()));
assertThat(resolvedAllowedFields, containsInAnyOrder(expectedResultSet.toArray()));
deniedFields = new String[]{"field*"};
resolvedAllowedFields = new FieldPermissions(null, deniedFields)
.resolveAllowedFields(allowedMetaFields, mapperService);
expectedResultSet = new HashSet<>(allowedMetaFields);
expectedResultSet.addAll(Arrays.asList("xfield3"));
assertThat(resolvedAllowedFields.size(), equalTo(expectedResultSet.size()));
assertThat(resolvedAllowedFields, containsInAnyOrder(expectedResultSet.toArray()));
// empty array for allowed fields always means no field is allowed
grantedFields = new String[]{};
deniedFields = new String[]{};
resolvedAllowedFields = new FieldPermissions(grantedFields, deniedFields)
.resolveAllowedFields(allowedMetaFields, mapperService);
expectedResultSet = new HashSet<>(allowedMetaFields);
assertThat(resolvedAllowedFields.size(), equalTo(expectedResultSet.size()));
assertThat(resolvedAllowedFields, containsInAnyOrder(expectedResultSet.toArray()));
// make sure all field can be explicitly allowed
grantedFields = new String[]{"_all", "*"};
deniedFields = randomBoolean() ? null : new String[]{};
resolvedAllowedFields = new FieldPermissions(grantedFields, deniedFields)
.resolveAllowedFields(allowedMetaFields, mapperService);
expectedResultSet = new HashSet<>(allowedMetaFields);
expectedResultSet.addAll(Arrays.asList("field1", "field2", "xfield3", "_all"));
assertThat(resolvedAllowedFields.size(), equalTo(expectedResultSet.size()));
assertThat(resolvedAllowedFields, containsInAnyOrder(expectedResultSet.toArray()));
// make sure all field can be explicitly allowed
grantedFields = new String[]{"_all"};
deniedFields = randomBoolean() ? null : new String[]{};
resolvedAllowedFields = new FieldPermissions(grantedFields, deniedFields)
.resolveAllowedFields(allowedMetaFields, mapperService);
expectedResultSet = new HashSet<>(allowedMetaFields);
expectedResultSet.addAll(Arrays.asList("_all"));
assertThat(resolvedAllowedFields.size(), equalTo(expectedResultSet.size()));
assertThat(resolvedAllowedFields, containsInAnyOrder(expectedResultSet.toArray()));
}
private SparseFixedBitSet query(LeafReaderContext leaf, String field, String value) throws IOException {
SparseFixedBitSet sparseFixedBitSet = new SparseFixedBitSet(leaf.reader().maxDoc());
TermsEnum tenum = leaf.reader().terms(field).iterator();
while (tenum.next().utf8ToString().equals(value) == false) {}
while (tenum.next().utf8ToString().equals(value) == false) {
}
PostingsEnum penum = tenum.postings(null);
sparseFixedBitSet.or(penum);
return sparseFixedBitSet;
@ -408,7 +544,7 @@ public class SecurityIndexSearcherWrapperUnitTests extends ESTestCase {
@Override
protected IndicesAccessControl getIndicesAccessControl() {
IndicesAccessControl.IndexAccessControl indexAccessControl = new IndicesAccessControl.IndexAccessControl(true,
singleton(expression), null);
new FieldPermissions(new String[]{expression}, null), null);
return new IndicesAccessControl(true, singletonMap("_index", indexAccessControl));
}
};
@ -496,7 +632,7 @@ public class SecurityIndexSearcherWrapperUnitTests extends ESTestCase {
private final Weight weight;
private final Set<Object> seenLeaves = Collections.newSetFromMap(new IdentityHashMap<>());
protected CreateScorerOnceWeight(Weight weight) {
super(weight.getQuery());
this.weight = weight;
@ -608,6 +744,7 @@ public class SecurityIndexSearcherWrapperUnitTests extends ESTestCase {
public void onCache(ShardId shardId, Accountable accountable) {
}
@Override
public void onRemoval(ShardId shardId, Accountable accountable) {

View File

@ -0,0 +1,241 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.security.authz.permission;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.security.authz.RoleDescriptor;
import java.io.IOException;
import static org.hamcrest.Matchers.containsString;
public class FieldPermissionTests extends ESTestCase {
public void testParseFieldPermissions() throws Exception {
String q = "{\"indices\": [ {\"names\": \"idx2\", \"privileges\": [\"p3\"], " +
"\"field_security\": {" +
"\"grant\": [\"f1\", \"f2\", \"f3\", \"f4\"]," +
"\"except\": [\"f3\",\"f4\"]" +
"}}]}";
RoleDescriptor rd = RoleDescriptor.parse("test", new BytesArray(q), false);
assertArrayEquals(rd.getIndicesPrivileges()[0].getFieldPermissions().getGrantedFieldsArray(),
new String[]{"f1", "f2", "f3", "f4"});
assertArrayEquals(rd.getIndicesPrivileges()[0].getFieldPermissions().getDeniedFieldsArray(), new String[]{"f3", "f4"});
q = "{\"indices\": [ {\"names\": \"idx2\", \"privileges\": [\"p3\"], " +
"\"field_security\": {" +
"\"except\": [\"f3\",\"f4\"]," +
"\"grant\": [\"f1\", \"f2\", \"f3\", \"f4\"]" +
"}}]}";
rd = RoleDescriptor.parse("test", new BytesArray(q), false);
assertArrayEquals(rd.getIndicesPrivileges()[0].getFieldPermissions().getGrantedFieldsArray(),
new String[]{"f1", "f2", "f3", "f4"});
assertArrayEquals(rd.getIndicesPrivileges()[0].getFieldPermissions().getDeniedFieldsArray(), new String[]{"f3", "f4"});
q = "{\"indices\": [ {\"names\": \"idx2\", \"privileges\": [\"p3\"], " +
"\"field_security\": {" +
"\"grant\": [\"f1\", \"f2\"]" +
"}}]}";
rd = RoleDescriptor.parse("test", new BytesArray(q), false);
assertArrayEquals(rd.getIndicesPrivileges()[0].getFieldPermissions().getGrantedFieldsArray(), new String[]{"f1", "f2"});
assertNull(rd.getIndicesPrivileges()[0].getFieldPermissions().getDeniedFieldsArray());
q = "{\"indices\": [ {\"names\": \"idx2\", \"privileges\": [\"p3\"], " +
"\"field_security\": {" +
"\"grant\": []" +
"}}]}";
rd = RoleDescriptor.parse("test", new BytesArray(q), false);
assertArrayEquals(rd.getIndicesPrivileges()[0].getFieldPermissions().getGrantedFieldsArray(), new String[]{});
assertNull(rd.getIndicesPrivileges()[0].getFieldPermissions().getDeniedFieldsArray());
q = "{\"indices\": [ {\"names\": \"idx2\", \"privileges\": [\"p3\"], " +
"\"field_security\": {" +
"\"except\": []," +
"\"grant\": []" +
"}}]}";
rd = RoleDescriptor.parse("test", new BytesArray(q), false);
assertArrayEquals(rd.getIndicesPrivileges()[0].getFieldPermissions().getGrantedFieldsArray(), new String[]{});
assertArrayEquals(rd.getIndicesPrivileges()[0].getFieldPermissions().getDeniedFieldsArray(), new String[]{});
final String exceptWithoutGrant = "{\"indices\": [ {\"names\": \"idx2\", \"privileges\": [\"p3\"], " +
"\"field_security\": {" +
"\"except\": [\"f1\"]" +
"}}]}";
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> RoleDescriptor.parse("test", new BytesArray
(exceptWithoutGrant), false));
assertThat(e.getDetailedMessage(), containsString("failed to parse indices privileges for role [test]. field_security requires " +
"grant if except is given"));
final String grantNull = "{\"indices\": [ {\"names\": \"idx2\", \"privileges\": [\"p3\"], " +
"\"field_security\": {" +
"\"grant\": null" +
"}}]}";
e = expectThrows(ElasticsearchParseException.class, () -> RoleDescriptor.parse("test", new BytesArray
(grantNull), false));
assertThat(e.getDetailedMessage(), containsString("failed to parse indices privileges for role [test]. grant must not be null."));
final String exceptNull = "{\"indices\": [ {\"names\": \"idx2\", \"privileges\": [\"p3\"], " +
"\"field_security\": {" +
"\"grant\": [\"*\"]," +
"\"except\": null" +
"}}]}";
e = expectThrows(ElasticsearchParseException.class, () -> RoleDescriptor.parse("test", new BytesArray
(exceptNull), false));
assertThat(e.getDetailedMessage(), containsString("failed to parse indices privileges for role [test]. except must not be null."));
final String exceptGrantNull = "{\"indices\": [ {\"names\": \"idx2\", \"privileges\": [\"p3\"], " +
"\"field_security\": {" +
"\"grant\": null," +
"\"except\": null" +
"}}]}";
e = expectThrows(ElasticsearchParseException.class, () -> RoleDescriptor.parse("test", new BytesArray
(exceptGrantNull), false));
assertThat(e.getDetailedMessage(), containsString("failed to parse indices privileges for role [test]. grant must not be null."));
final String bothFieldsMissing = "{\"indices\": [ {\"names\": \"idx2\", \"privileges\": [\"p3\"], " +
"\"field_security\": {" +
"}}]}";
e = expectThrows(ElasticsearchParseException.class, () -> RoleDescriptor.parse("test", new BytesArray
(bothFieldsMissing), false));
assertThat(e.getDetailedMessage(), containsString("failed to parse indices privileges for role [test]. \"field_security\" " +
"must not be empty."));
// try with two indices and mix order a little
q = "{\"indices\": [ {\"names\": \"idx2\", \"privileges\": [\"p3\"], " +
"\"field_security\": {" +
"\"grant\": []" +
"}}," +
"{\"names\": \"idx3\",\n" +
" \"field_security\": {\n" +
" \"grant\": [\"*\"], \n" +
" \"except\": [\"f2\"]}," +
"\"privileges\": [\"p3\"]}]}";
rd = RoleDescriptor.parse("test", new BytesArray(q), false);
assertArrayEquals(rd.getIndicesPrivileges()[0].getFieldPermissions().getGrantedFieldsArray(), new String[]{});
assertNull(rd.getIndicesPrivileges()[0].getFieldPermissions().getDeniedFieldsArray());
assertArrayEquals(rd.getIndicesPrivileges()[1].getFieldPermissions().getGrantedFieldsArray(), new String[]{"*"});
assertArrayEquals(rd.getIndicesPrivileges()[1].getFieldPermissions().getDeniedFieldsArray(), new String[]{"f2"});
}
// test old syntax for field permissions
public void testBWCFieldPermissions() throws Exception {
String q = "{\"indices\": [ {\"names\": \"idx2\", \"privileges\": [\"p3\"], " +
"\"fields\": [\"f1\", \"f2\"]" +
"}]}";
RoleDescriptor rd = RoleDescriptor.parse("test", new BytesArray(q), true);
assertArrayEquals(rd.getIndicesPrivileges()[0].getFieldPermissions().getGrantedFieldsArray(), new String[]{"f1", "f2"});
assertNull(rd.getIndicesPrivileges()[0].getFieldPermissions().getDeniedFieldsArray());
final String failingQuery = q;
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> RoleDescriptor.parse("test", new BytesArray
(failingQuery), false));
assertThat(e.getDetailedMessage(), containsString("[\"fields\": [...]] format has changed for field permissions in role [test]" +
", use [\"field_security\": {\"grant\":[...],\"except\":[...]}] instead"));
q = "{\"indices\": [ {\"names\": \"idx2\", \"privileges\": [\"p3\"], " +
"\"fields\": []" +
"}]}";
rd = RoleDescriptor.parse("test", new BytesArray(q), true);
assertArrayEquals(rd.getIndicesPrivileges()[0].getFieldPermissions().getGrantedFieldsArray(), new String[]{});
assertNull(rd.getIndicesPrivileges()[0].getFieldPermissions().getDeniedFieldsArray());
final String failingQuery2 = q;
e = expectThrows(ElasticsearchParseException.class, () -> RoleDescriptor.parse("test", new BytesArray
(failingQuery2), false));
assertThat(e.getDetailedMessage(), containsString("[\"fields\": [...]] format has changed for field permissions in role [test]" +
", use [\"field_security\": {\"grant\":[...],\"except\":[...]}] instead"));
q = "{\"indices\": [ {\"names\": \"idx2\", \"privileges\": [\"p3\"], " +
"\"fields\": null" +
"}]}";
rd = RoleDescriptor.parse("test", new BytesArray(q), true);
assertNull(rd.getIndicesPrivileges()[0].getFieldPermissions().getGrantedFieldsArray());
assertNull(rd.getIndicesPrivileges()[0].getFieldPermissions().getDeniedFieldsArray());
final String failingQuery3 = q;
e = expectThrows(ElasticsearchParseException.class, () -> RoleDescriptor.parse("test", new BytesArray(failingQuery3), false));
assertThat(e.getDetailedMessage(), containsString("[\"fields\": [...]] format has changed for field permissions in role [test]" +
", use [\"field_security\": {\"grant\":[...],\"except\":[...]}] instead"));
}
public void testMergeFieldPermissions() {
String allowedPrefix1 = randomAsciiOfLength(5);
String allowedPrefix2 = randomAsciiOfLength(5);
String[] allowed1 = new String[]{allowedPrefix1 + "*"};
String[] allowed2 = new String[]{allowedPrefix2 + "*"};
String[] denied1 = new String[]{allowedPrefix1 + "a"};
String[] denied2 = new String[]{allowedPrefix2 + "a"};
FieldPermissions fieldPermissions1 = new FieldPermissions(allowed1, denied1);
FieldPermissions fieldPermissions2 = new FieldPermissions(allowed2, denied2);
FieldPermissions mergedFieldPermissions = FieldPermissions.merge(fieldPermissions1, fieldPermissions2);
assertTrue(mergedFieldPermissions.grantsAccessTo(allowedPrefix1 + "b"));
assertTrue(mergedFieldPermissions.grantsAccessTo(allowedPrefix2 + "b"));
assertFalse(mergedFieldPermissions.grantsAccessTo(denied1[0]));
assertFalse(mergedFieldPermissions.grantsAccessTo(denied2[0]));
allowed1 = new String[]{randomAsciiOfLength(5) + "*", randomAsciiOfLength(5) + "*"};
allowed2 = null;
denied1 = new String[]{allowed1[0] + "a", allowed1[1] + "a"};
denied2 = null;
fieldPermissions1 = new FieldPermissions(allowed1, denied1);
fieldPermissions2 = new FieldPermissions(allowed2, denied2);
mergedFieldPermissions = FieldPermissions.merge(fieldPermissions1, fieldPermissions2);
assertFalse(mergedFieldPermissions.hasFieldLevelSecurity());
allowed1 = new String[]{};
allowed2 = new String[]{randomAsciiOfLength(5) + "*", randomAsciiOfLength(5) + "*"};
denied1 = new String[]{};
denied2 = new String[]{allowed2[0] + "a", allowed2[1] + "a"};
fieldPermissions1 = new FieldPermissions(allowed1, denied1);
fieldPermissions2 = new FieldPermissions(allowed2, denied2);
mergedFieldPermissions = FieldPermissions.merge(fieldPermissions1, fieldPermissions2);
for (String field : allowed2) {
assertTrue(mergedFieldPermissions.grantsAccessTo(field));
}
for (String field : denied2) {
assertFalse(mergedFieldPermissions.grantsAccessTo(field));
}
allowed1 = randomBoolean() ? null : new String[]{"*"};
allowed2 = randomBoolean() ? null : new String[]{"*"};
denied1 = new String[]{"a"};
denied2 = new String[]{"b"};
fieldPermissions1 = new FieldPermissions(allowed1, denied1);
fieldPermissions2 = new FieldPermissions(allowed2, denied2);
mergedFieldPermissions = FieldPermissions.merge(fieldPermissions1, fieldPermissions2);
assertTrue(mergedFieldPermissions.grantsAccessTo("a"));
assertTrue(mergedFieldPermissions.grantsAccessTo("b"));
// test merge does not remove _all
allowed1 = new String[]{"_all"};
allowed2 = new String[]{};
denied1 = null;
denied2 = null;
fieldPermissions1 = new FieldPermissions(allowed1, denied1);
assertTrue(fieldPermissions1.allFieldIsAllowed);
fieldPermissions2 = new FieldPermissions(allowed2, denied2);
assertFalse(fieldPermissions2.allFieldIsAllowed);
mergedFieldPermissions = FieldPermissions.merge(fieldPermissions1, fieldPermissions2);
assertTrue(mergedFieldPermissions.grantsAccessTo("_all"));
assertTrue(mergedFieldPermissions.allFieldIsAllowed);
}
public void testFieldPermissionsStreaming() throws IOException {
BytesStreamOutput out = new BytesStreamOutput();
String[] allowed = new String[]{randomAsciiOfLength(5) + "*", randomAsciiOfLength(5) + "*", randomAsciiOfLength(5) + "*"};
String[] denied = new String[]{allowed[0] + randomAsciiOfLength(5), allowed[1] + randomAsciiOfLength(5),
allowed[2] + randomAsciiOfLength(5)};
FieldPermissions fieldPermissions = new FieldPermissions(allowed, denied);
out.writeOptionalWriteable(fieldPermissions);
out.close();
StreamInput in = out.bytes().streamInput();
FieldPermissions readFieldPermissions = in.readOptionalWriteable(FieldPermissions::new);
// order should be preserved in any case
assertEquals(readFieldPermissions, fieldPermissions);
}
}

View File

@ -9,23 +9,26 @@ import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.TestThreadPool;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.watcher.ResourceWatcherService;
import org.elasticsearch.xpack.XPackPlugin;
import org.elasticsearch.xpack.XPackSettings;
import org.elasticsearch.xpack.security.audit.logfile.CapturingLogger;
import org.elasticsearch.xpack.security.authc.support.RefreshListener;
import org.elasticsearch.xpack.security.authz.RoleDescriptor;
import org.elasticsearch.xpack.security.authz.permission.ClusterPermission;
import org.elasticsearch.xpack.security.authz.permission.IndicesPermission;
import org.elasticsearch.xpack.security.authz.permission.Role;
import org.elasticsearch.xpack.security.authz.permission.RunAsPermission;
import org.elasticsearch.xpack.security.authz.privilege.ClusterPrivilege;
import org.elasticsearch.xpack.security.authz.privilege.IndexPrivilege;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.TestThreadPool;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.watcher.ResourceWatcherService;
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
@ -37,7 +40,6 @@ import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.empty;
@ -163,7 +165,9 @@ public class FileRolesStoreTests extends ESTestCase {
assertThat(group.indices()[0], equalTo("field_idx"));
assertThat(group.privilege(), notNullValue());
assertThat(group.privilege().isAlias(IndexPrivilege.READ), is(true));
assertThat(group.getFields(), contains("foo", "boo"));
assertTrue(group.getFieldPermissions().grantsAccessTo("foo"));
assertTrue(group.getFieldPermissions().grantsAccessTo("boo"));
assertTrue(group.getFieldPermissions().hasFieldLevelSecurity());
role = roles.get("role_query");
assertThat(role, notNullValue());
@ -181,7 +185,7 @@ public class FileRolesStoreTests extends ESTestCase {
assertThat(group.indices()[0], equalTo("query_idx"));
assertThat(group.privilege(), notNullValue());
assertThat(group.privilege().isAlias(IndexPrivilege.READ), is(true));
assertThat(group.getFields(), nullValue());
assertFalse(group.getFieldPermissions().hasFieldLevelSecurity());
assertThat(group.getQuery(), notNullValue());
role = roles.get("role_query_fields");
@ -200,7 +204,9 @@ public class FileRolesStoreTests extends ESTestCase {
assertThat(group.indices()[0], equalTo("query_fields_idx"));
assertThat(group.privilege(), notNullValue());
assertThat(group.privilege().isAlias(IndexPrivilege.READ), is(true));
assertThat(group.getFields(), contains("foo", "boo"));
assertTrue(group.getFieldPermissions().grantsAccessTo("foo"));
assertTrue(group.getFieldPermissions().grantsAccessTo("boo"));
assertTrue(group.getFieldPermissions().hasFieldLevelSecurity());
assertThat(group.getQuery(), notNullValue());
}
@ -395,4 +401,16 @@ public class FileRolesStoreTests extends ESTestCase {
assertThat(usageStats.get("fls"), is(flsDlsEnabled));
assertThat(usageStats.get("dls"), is(flsDlsEnabled));
}
// test that we can read a role where field permissions are stored in 2.x format (fields:...)
public void testBWCFieldPermissions() throws IOException {
Path path = getDataPath("roles2xformat.yml");
byte[] bytes = Files.readAllBytes(path);
String roleString = new String(bytes, Charset.defaultCharset());
RoleDescriptor role = FileRolesStore.parseRoleDescriptor(roleString, path, logger, true,
Settings.EMPTY);
RoleDescriptor.IndicesPrivileges indicesPrivileges = role.getIndicesPrivileges()[0];
assertTrue(indicesPrivileges.getFieldPermissions().grantsAccessTo("foo"));
assertTrue(indicesPrivileges.getFieldPermissions().grantsAccessTo("boo"));
}
}

View File

@ -0,0 +1,29 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.security.authz.store;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.security.authz.RoleDescriptor;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Path;
public class NativeRolesStoreTests extends ESTestCase {
// test that we can read a role where field permissions are stored in 2.x format (fields:...)
public void testBWCFieldPermissions() throws IOException {
Path path = getDataPath("roles2xformat.json");
byte[] bytes = Files.readAllBytes(path);
String roleString = new String(bytes, Charset.defaultCharset());
RoleDescriptor role = NativeRolesStore.transformRole("role1", new BytesArray(roleString), logger);
RoleDescriptor.IndicesPrivileges indicesPrivileges = role.getIndicesPrivileges()[0];
assertTrue(indicesPrivileges.getFieldPermissions().grantsAccessTo("foo"));
assertTrue(indicesPrivileges.getFieldPermissions().grantsAccessTo("boo"));
}
}

View File

@ -0,0 +1,19 @@
{
"indices": [
{
"names": [
"test"
],
"privileges": [
"READ"
],
"query": {
"match_all": {}
},
"fields": [
"foo",
"boo"
]
}
]
}

View File

@ -45,9 +45,10 @@ role_fields:
- 'field_idx'
privileges:
- READ
fields:
- foo
- boo
field_security:
grant:
- foo
- boo
role_query:
indices:
@ -65,6 +66,7 @@ role_query_fields:
- READ
query:
match_all:
fields:
- foo
- boo
field_security:
grant:
- foo
- boo

View File

@ -0,0 +1,19 @@
{
"indices": [
{
"names": [
"test"
],
"privileges": [
"READ"
],
"query": {
"match_all": {}
},
"fields": [
"foo",
"boo"
]
}
]
}

View File

@ -0,0 +1,11 @@
role1:
indices:
- names:
- 'test'
privileges:
- READ
query:
match_all:
fields:
- foo
- boo