Clean up field-level security. (elastic/elasticsearch#4784)

This commit reuses the automaton that defines the allowed fields in
`FieldSubsetReader` rather than resolving the list of all matching fields from
the mapping. As a side-effect this change solves a bug that unmapped fields
could previously not be read from the source. Moreover it avoids determinization
errors in the case that the number of matching fields is high.

It also uses `CharacterRunAutomaton` to evaluate automata against a given
string, which should be faster than naively stepping into the automaton since
`CharacterRunAutomaton` builds a lookup table of transitions.

Closes elastic/elasticsearch#4679

Original commit: elastic/x-pack-elasticsearch@a30913dbd5
This commit is contained in:
Adrien Grand 2017-02-03 12:31:04 +01:00 committed by GitHub
parent 44618b5b87
commit 16be5ecc81
10 changed files with 414 additions and 441 deletions

View File

@ -464,7 +464,7 @@ public class Security implements ActionPlugin, IngestPlugin, NetworkPlugin {
() -> {
throw new IllegalArgumentException("permission filters are not allowed to use the current timestamp");
}),
indexService.mapperService(), indexService.cache().bitsetFilterCache(),
indexService.cache().bitsetFilterCache(),
indexService.getThreadPool().getThreadContext(), licenseState,
indexService.getScriptService()));
}

View File

@ -24,6 +24,7 @@ import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.FilterIterator;
import org.apache.lucene.util.automaton.CharacterRunAutomaton;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Tuple;
@ -31,15 +32,15 @@ import org.elasticsearch.common.logging.LoggerMessageFormat;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.mapper.FieldNamesFieldMapper;
import org.elasticsearch.index.mapper.SourceFieldMapper;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* A {@link FilterLeafReader} that exposes only a subset
@ -55,35 +56,36 @@ public final class FieldSubsetReader extends FilterLeafReader {
* can be used normally (e.g. passed to {@link DirectoryReader#openIfChanged(DirectoryReader)})
* and so on.
* @param in reader to filter
* @param fieldNames fields to filter.
* @param filter fields to filter.
*/
public static DirectoryReader wrap(DirectoryReader in, Set<String> fieldNames) throws IOException {
return new FieldSubsetDirectoryReader(in, fieldNames);
public static DirectoryReader wrap(DirectoryReader in, CharacterRunAutomaton filter) throws IOException {
return new FieldSubsetDirectoryReader(in, filter);
}
// wraps subreaders with fieldsubsetreaders.
static class FieldSubsetDirectoryReader extends FilterDirectoryReader {
private final Set<String> fieldNames;
private final CharacterRunAutomaton filter;
FieldSubsetDirectoryReader(DirectoryReader in, final Set<String> fieldNames) throws IOException {
FieldSubsetDirectoryReader(DirectoryReader in, final CharacterRunAutomaton filter) throws IOException {
super(in, new FilterDirectoryReader.SubReaderWrapper() {
@Override
public LeafReader wrap(LeafReader reader) {
return new FieldSubsetReader(reader, fieldNames);
return new FieldSubsetReader(reader, filter);
}
});
this.fieldNames = fieldNames;
this.filter = filter;
verifyNoOtherFieldSubsetDirectoryReaderIsWrapped(in);
}
@Override
protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException {
return new FieldSubsetDirectoryReader(in, fieldNames);
return new FieldSubsetDirectoryReader(in, filter);
}
public Set<String> getFieldNames() {
return fieldNames;
/** Return the automaton that is used to filter fields. */
CharacterRunAutomaton getFilter() {
return filter;
}
private static void verifyNoOtherFieldSubsetDirectoryReaderIsWrapped(DirectoryReader reader) {
@ -106,22 +108,22 @@ public final class FieldSubsetReader extends FilterLeafReader {
/** List of filtered fields */
private final FieldInfos fieldInfos;
/** List of filtered fields. this is used for _source filtering */
private final String[] fieldNames;
/** An automaton that only accepts authorized fields. */
private final CharacterRunAutomaton filter;
/**
* Wrap a single segment, exposing a subset of its fields.
*/
FieldSubsetReader(LeafReader in, Set<String> fieldNames) {
FieldSubsetReader(LeafReader in, CharacterRunAutomaton filter) {
super(in);
ArrayList<FieldInfo> filteredInfos = new ArrayList<>();
for (FieldInfo fi : in.getFieldInfos()) {
if (fieldNames.contains(fi.name)) {
if (filter.run(fi.name)) {
filteredInfos.add(fi);
}
}
fieldInfos = new FieldInfos(filteredInfos.toArray(new FieldInfo[filteredInfos.size()]));
this.fieldNames = fieldNames.toArray(new String[fieldNames.size()]);
this.filter = filter;
}
/** returns true if this field is allowed. */
@ -145,6 +147,75 @@ public final class FieldSubsetReader extends FilterLeafReader {
return f.iterator().hasNext() ? f : null;
}
/** Filter a map by a {@link CharacterRunAutomaton} that defines the fields to retain. */
static Map<String, Object> filter(Map<String, ?> map, CharacterRunAutomaton includeAutomaton, int initialState) {
Map<String, Object> filtered = new HashMap<>();
for (Map.Entry<String, ?> entry : map.entrySet()) {
String key = entry.getKey();
int state = step(includeAutomaton, key, initialState);
if (state == -1) {
continue;
}
Object value = entry.getValue();
if (includeAutomaton.isAccept(state)) {
filtered.put(key, value);
} else if (value instanceof Map) {
state = includeAutomaton.step(state, '.');
if (state == -1) {
continue;
}
Map<String, ?> mapValue = (Map<String, ?>) value;
Map<String, Object> filteredValue = filter(mapValue, includeAutomaton, state);
if (filteredValue.isEmpty() == false) {
filtered.put(key, filteredValue);
}
} else if (value instanceof Iterable) {
Iterable<?> iterableValue = (Iterable<?>) value;
List<Object> filteredValue = filter(iterableValue, includeAutomaton, state);
if (filteredValue.isEmpty() == false) {
filtered.put(key, filteredValue);
}
}
}
return filtered;
}
/** Filter a list by a {@link CharacterRunAutomaton} that defines the fields to retain. */
private static List<Object> filter(Iterable<?> iterable, CharacterRunAutomaton includeAutomaton, int initialState) {
List<Object> filtered = new ArrayList<>();
for (Object value : iterable) {
if (value instanceof Map) {
int state = includeAutomaton.step(initialState, '.');
if (state == -1) {
continue;
}
Map<String, Object> filteredValue = filter((Map<String, ?>)value, includeAutomaton, state);
if (filteredValue.isEmpty() == false) {
filtered.add(filteredValue);
}
} else if (value instanceof Iterable) {
List<Object> filteredValue = filter((Iterable<?>) value, includeAutomaton, initialState);
if (filteredValue.isEmpty() == false) {
filtered.add(filteredValue);
}
}
}
return filtered;
}
/** Step through all characters of the provided string, and return the
* resulting state, or -1 if that did not lead to a valid state. */
private static int step(CharacterRunAutomaton automaton, String key, int state) {
for (int i = 0; state != -1 && i < key.length(); ++i) {
state = automaton.step(state, key.charAt(i));
}
return state;
}
@Override
public void document(final int docID, final StoredFieldVisitor visitor) throws IOException {
super.document(docID, new StoredFieldVisitor() {
@ -154,7 +225,7 @@ public final class FieldSubsetReader extends FilterLeafReader {
// for _source, parse, filter out the fields we care about, and serialize back downstream
BytesReference bytes = new BytesArray(value);
Tuple<XContentType, Map<String, Object>> result = XContentHelper.convertToMap(bytes, true);
Map<String, Object> transformedSource = XContentMapValues.filter(result.v2(), fieldNames, null);
Map<String, Object> transformedSource = filter(result.v2(), filter, 0);
XContentBuilder xContentBuilder = XContentBuilder.builder(result.v1().xContent()).map(transformedSource);
visitor.binaryField(fieldInfo, BytesReference.toBytes(xContentBuilder.bytes()));
} else {

View File

@ -42,10 +42,6 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.engine.EngineException;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.FieldNamesFieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParentFieldMapper;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.BoostingQueryBuilder;
import org.elasticsearch.index.query.ConstantScoreQueryBuilder;
@ -77,10 +73,8 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import static org.apache.lucene.search.BooleanClause.Occur.SHOULD;
@ -98,8 +92,6 @@ import static org.apache.lucene.search.BooleanClause.Occur.SHOULD;
*/
public class SecurityIndexSearcherWrapper extends IndexSearcherWrapper {
private final MapperService mapperService;
private final Set<String> allowedMetaFields;
private final Function<ShardId, QueryShardContext> queryShardContextProvider;
private final BitsetFilterCache bitsetFilterCache;
private final XPackLicenseState licenseState;
@ -108,26 +100,14 @@ public class SecurityIndexSearcherWrapper extends IndexSearcherWrapper {
private final ScriptService scriptService;
public SecurityIndexSearcherWrapper(IndexSettings indexSettings, Function<ShardId, QueryShardContext> queryShardContextProvider,
MapperService mapperService, BitsetFilterCache bitsetFilterCache,
ThreadContext threadContext, XPackLicenseState licenseState,
BitsetFilterCache bitsetFilterCache, ThreadContext threadContext, XPackLicenseState licenseState,
ScriptService scriptService) {
this.scriptService = scriptService;
this.logger = Loggers.getLogger(getClass(), indexSettings.getSettings());
this.mapperService = mapperService;
this.queryShardContextProvider = queryShardContextProvider;
this.bitsetFilterCache = bitsetFilterCache;
this.threadContext = threadContext;
this.licenseState = licenseState;
Set<String> allowedMetaFields = new HashSet<>();
allowedMetaFields.addAll(Arrays.asList(MapperService.getAllMetaFields()));
allowedMetaFields.add(FieldNamesFieldMapper.NAME); // TODO: add _field_names to MapperService#META_FIELDS?
allowedMetaFields.add("_source"); // TODO: add _source to MapperService#META_FIELDS?
allowedMetaFields.add("_version"); // TODO: add _version to MapperService#META_FIELDS?
allowedMetaFields.remove("_all"); // The _all field contains actual data and we can't include that by default.
allowedMetaFields.add("_seq_no"); // TODO: add _seq_no to MapperService#META_FIELDS?
this.allowedMetaFields = Collections.unmodifiableSet(allowedMetaFields);
}
@Override
@ -136,7 +116,6 @@ public class SecurityIndexSearcherWrapper extends IndexSearcherWrapper {
return reader;
}
final Set<String> allowedMetaFields = this.allowedMetaFields;
try {
final IndicesAccessControl indicesAccessControl = getIndicesAccessControl();
@ -170,14 +149,7 @@ public class SecurityIndexSearcherWrapper extends IndexSearcherWrapper {
reader = DocumentSubsetReader.wrap(reader, bitsetFilterCache, new ConstantScoreQuery(filter.build()));
}
if (permissions.getFieldPermissions().hasFieldLevelSecurity()) {
// now add the allowed fields based on the current granted permissions and :
Set<String> allowedFields = permissions.getFieldPermissions().resolveAllowedFields(allowedMetaFields, mapperService);
resolveParentChildJoinFields(allowedFields);
reader = FieldSubsetReader.wrap(reader, allowedFields);
}
return reader;
return permissions.getFieldPermissions().filter(reader);
} catch (IOException e) {
logger.error("Unable to apply field level security");
throw ExceptionsHelper.convertToElastic(e);
@ -259,20 +231,6 @@ public class SecurityIndexSearcherWrapper extends IndexSearcherWrapper {
}
}
public Set<String> getAllowedMetaFields() {
return allowedMetaFields;
}
private void resolveParentChildJoinFields(Set<String> allowedFields) {
for (DocumentMapper mapper : mapperService.docMappers(false)) {
ParentFieldMapper parentFieldMapper = mapper.parentFieldMapper();
if (parentFieldMapper.active()) {
String joinField = ParentFieldMapper.joinField(parentFieldMapper.type());
allowedFields.add(joinField);
}
}
}
static void intersectScorerAndRoleBits(Scorer scorer, SparseFixedBitSet roleBits, LeafCollector collector, Bits acceptDocs) throws
IOException {
// ConjunctionDISI uses the DocIdSetIterator#cost() to order the iterators, so if roleBits has the lowest cardinality it should

View File

@ -5,7 +5,14 @@
*/
package org.elasticsearch.xpack.security.authz.permission;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.automaton.Automata;
import org.apache.lucene.util.automaton.Automaton;
import org.apache.lucene.util.automaton.CharacterRunAutomaton;
import org.apache.lucene.util.automaton.MinimizationOperations;
import org.apache.lucene.util.automaton.Operations;
import org.elasticsearch.ElasticsearchSecurityException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
@ -14,18 +21,13 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.index.mapper.AllFieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.xpack.security.authz.RoleDescriptor;
import org.elasticsearch.xpack.security.authz.accesscontrol.FieldSubsetReader;
import org.elasticsearch.xpack.security.support.Automatons;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
import static org.apache.lucene.util.automaton.Operations.isTotal;
import static org.apache.lucene.util.automaton.Operations.run;
import static org.apache.lucene.util.automaton.Operations.subsetOf;
import static org.elasticsearch.xpack.security.support.Automatons.minusAndMinimize;
@ -37,7 +39,7 @@ import static org.elasticsearch.xpack.security.support.Automatons.minusAndMinimi
* 1. It has to match the patterns in grantedFieldsArray
* 2. it must not match the patterns in deniedFieldsArray
*/
public final class FieldPermissions implements Writeable {
public final class FieldPermissions implements Writeable, Accountable {
public static final FieldPermissions DEFAULT = new FieldPermissions();
@ -48,12 +50,12 @@ public final class FieldPermissions implements Writeable {
private final String[] deniedFieldsArray;
// an automaton that matches all strings that match the patterns in permittedFieldsArray but does not match those that also match a
// pattern in deniedFieldsArray. If permittedFieldsAutomaton is null we assume that all fields are granted access to.
private final Automaton permittedFieldsAutomaton;
private final CharacterRunAutomaton permittedFieldsAutomaton;
private final boolean permittedFieldsAutomatonIsTotal;
// we cannot easily determine if all fields are allowed and we can therefore also allow access to the _all field hence we deny access
// to _all unless this was explicitly configured.
private final boolean allFieldIsAllowed;
private final long ramBytesUsed;
/** Constructor that does not enable field-level security: all fields are accepted. */
public FieldPermissions() {
this(null, null);
}
@ -62,67 +64,96 @@ public final class FieldPermissions implements Writeable {
this(in.readOptionalStringArray(), in.readOptionalStringArray());
}
/** Constructor that enables field-level security based on include/exclude rules. Exclude rules
* have precedence over include rules. */
public FieldPermissions(@Nullable String[] grantedFieldsArray, @Nullable String[] deniedFieldsArray) {
this(grantedFieldsArray, deniedFieldsArray, initializePermittedFieldsAutomaton(grantedFieldsArray, deniedFieldsArray),
checkAllFieldIsAllowed(grantedFieldsArray, deniedFieldsArray));
this(grantedFieldsArray, deniedFieldsArray, initializePermittedFieldsAutomaton(grantedFieldsArray, deniedFieldsArray));
}
FieldPermissions(@Nullable String[] grantedFieldsArray, @Nullable String[] deniedFieldsArray,
Automaton permittedFieldsAutomaton, boolean allFieldIsAllowed) {
private FieldPermissions(@Nullable String[] grantedFieldsArray, @Nullable String[] deniedFieldsArray,
Automaton permittedFieldsAutomaton) {
if (permittedFieldsAutomaton.isDeterministic() == false && permittedFieldsAutomaton.getNumStates() > 1) {
// we only accept deterministic automata so that the CharacterRunAutomaton constructor
// directly wraps the provided automaton
throw new IllegalArgumentException("Only accepts deterministic automata");
}
this.grantedFieldsArray = grantedFieldsArray;
this.deniedFieldsArray = deniedFieldsArray;
this.permittedFieldsAutomaton = permittedFieldsAutomaton;
this.allFieldIsAllowed = allFieldIsAllowed;
this.permittedFieldsAutomaton = new CharacterRunAutomaton(permittedFieldsAutomaton);
// we cache the result of isTotal since this might be a costly operation
this.permittedFieldsAutomatonIsTotal = Operations.isTotal(permittedFieldsAutomaton);
long ramBytesUsed = ramBytesUsed(grantedFieldsArray);
ramBytesUsed += ramBytesUsed(deniedFieldsArray);
ramBytesUsed += permittedFieldsAutomaton.ramBytesUsed();
ramBytesUsed += runAutomatonRamBytesUsed(permittedFieldsAutomaton);
this.ramBytesUsed = ramBytesUsed;
}
private static boolean checkAllFieldIsAllowed(String[] grantedFieldsArray, String[] deniedFieldsArray) {
if (deniedFieldsArray != null) {
for (String fieldName : deniedFieldsArray) {
if (fieldName.equals(AllFieldMapper.NAME)) {
return false;
}
/**
* Return an estimation of the ram bytes used by the given {@link String}
* array.
*/
private static long ramBytesUsed(String[] array) {
long ramBytesUsed = 0;
if (array != null) {
ramBytesUsed += RamUsageEstimator.shallowSizeOf(array);
for (String s : array) {
// might be overestimated because of compact strings but it is better to overestimate here
ramBytesUsed += s.length() * Character.BYTES;
}
}
if (grantedFieldsArray != null) {
for (String fieldName : grantedFieldsArray) {
if (fieldName.equals(AllFieldMapper.NAME)) {
return true;
}
}
}
return false;
return ramBytesUsed;
}
/**
* Return an estimation of the ram bytes used by a {@link CharacterRunAutomaton}
* that wraps the given automaton.
*/
private static long runAutomatonRamBytesUsed(Automaton a) {
return a.getNumStates() * 5; // wild guess, better than 0
}
private static Automaton initializePermittedFieldsAutomaton(final String[] grantedFieldsArray, final String[] deniedFieldsArray) {
Automaton grantedFieldsAutomaton;
if (grantedFieldsArray == null || containsWildcard(grantedFieldsArray)) {
if (grantedFieldsArray == null || Arrays.stream(grantedFieldsArray).anyMatch(Regex::isMatchAllPattern)) {
grantedFieldsAutomaton = Automatons.MATCH_ALL;
} else {
grantedFieldsAutomaton = Automatons.patterns(grantedFieldsArray);
// an automaton that includes metadata fields, including join fields created by the _parent field such
// as _parent#type
Automaton metaFieldsAutomaton = Operations.concatenate(Automata.makeChar('_'), Automata.makeAnyString());
grantedFieldsAutomaton = Operations.union(Automatons.patterns(grantedFieldsArray), metaFieldsAutomaton);
}
Automaton deniedFieldsAutomaton;
if (deniedFieldsArray == null || deniedFieldsArray.length == 0) {
deniedFieldsAutomaton = Automatons.EMPTY;
} else {
deniedFieldsAutomaton = Automatons.patterns(deniedFieldsArray);
}
grantedFieldsAutomaton = MinimizationOperations.minimize(grantedFieldsAutomaton, Operations.DEFAULT_MAX_DETERMINIZED_STATES);
deniedFieldsAutomaton = MinimizationOperations.minimize(deniedFieldsAutomaton, Operations.DEFAULT_MAX_DETERMINIZED_STATES);
if (subsetOf(deniedFieldsAutomaton, grantedFieldsAutomaton) == false) {
throw new ElasticsearchSecurityException("Exceptions for field permissions must be a subset of the " +
"granted fields but " + Arrays.toString(deniedFieldsArray) + " is not a subset of " +
Arrays.toString(grantedFieldsArray));
}
grantedFieldsAutomaton = minusAndMinimize(grantedFieldsAutomaton, deniedFieldsAutomaton);
return grantedFieldsAutomaton;
}
private static boolean containsWildcard(String[] grantedFieldsArray) {
for (String fieldPattern : grantedFieldsArray) {
if (Regex.isMatchAllPattern(fieldPattern)) {
return true;
if ((grantedFieldsArray == null || Arrays.asList(grantedFieldsArray).contains(AllFieldMapper.NAME) == false) &&
(deniedFieldsArray == null || Arrays.asList(deniedFieldsArray).contains(AllFieldMapper.NAME) == false)) {
// It is not explicitly stated whether _all should be allowed
// In that case we automatically disable _all, unless all fields would match
if (Operations.isTotal(grantedFieldsAutomaton) && Operations.isEmpty(deniedFieldsAutomaton)) {
// all fields are accepted, so using _all is fine
} else {
deniedFieldsAutomaton = Operations.union(deniedFieldsAutomaton, Automata.makeString(AllFieldMapper.NAME));
}
}
return false;
grantedFieldsAutomaton = minusAndMinimize(grantedFieldsAutomaton, deniedFieldsAutomaton);
return grantedFieldsAutomaton;
}
@Override
@ -160,11 +191,7 @@ public final class FieldPermissions implements Writeable {
* fieldName can be a wildcard.
*/
public boolean grantsAccessTo(String fieldName) {
return isTotal(permittedFieldsAutomaton) || run(permittedFieldsAutomaton, fieldName);
}
Automaton getPermittedFieldsAutomaton() {
return permittedFieldsAutomaton;
return permittedFieldsAutomatonIsTotal || permittedFieldsAutomaton.run(fieldName);
}
@Nullable
@ -177,30 +204,22 @@ public final class FieldPermissions implements Writeable {
return deniedFieldsArray;
}
/** Return whether field-level security is enabled, ie. whether any field might be filtered out. */
public boolean hasFieldLevelSecurity() {
return isTotal(permittedFieldsAutomaton) == false;
return permittedFieldsAutomatonIsTotal == false;
}
boolean isAllFieldIsAllowed() {
return allFieldIsAllowed;
/** Return a wrapped reader that only exposes allowed fields. */
public DirectoryReader filter(DirectoryReader reader) throws IOException {
if (hasFieldLevelSecurity() == false) {
return reader;
}
return FieldSubsetReader.wrap(reader, permittedFieldsAutomaton);
}
public Set<String> resolveAllowedFields(Set<String> allowedMetaFields, MapperService mapperService) {
HashSet<String> finalAllowedFields = new HashSet<>();
// we always add the allowed meta fields because we must make sure access is not denied accidentally
finalAllowedFields.addAll(allowedMetaFields);
// now check all other fields if we allow them
Collection<String> allFields = mapperService.simpleMatchToIndexNames("*");
for (String fieldName : allFields) {
if (grantsAccessTo(fieldName)) {
finalAllowedFields.add(fieldName);
}
}
if (allFieldIsAllowed == false) {
// we probably added the _all field and now we have to remove it again
finalAllowedFields.remove("_all");
}
return finalAllowedFields;
// for testing only
CharacterRunAutomaton getIncludeAutomaton() {
return permittedFieldsAutomaton;
}
@Override
@ -210,7 +229,6 @@ public final class FieldPermissions implements Writeable {
FieldPermissions that = (FieldPermissions) o;
if (allFieldIsAllowed != that.allFieldIsAllowed) return false;
// Probably incorrect - comparing Object[] arrays with Arrays.equals
if (!Arrays.equals(grantedFieldsArray, that.grantedFieldsArray)) return false;
// Probably incorrect - comparing Object[] arrays with Arrays.equals
@ -221,7 +239,11 @@ public final class FieldPermissions implements Writeable {
public int hashCode() {
int result = Arrays.hashCode(grantedFieldsArray);
result = 31 * result + Arrays.hashCode(deniedFieldsArray);
result = 31 * result + (allFieldIsAllowed ? 1 : 0);
return result;
}
@Override
public long ramBytesUsed() {
return ramBytesUsed;
}
}

View File

@ -5,7 +5,6 @@
*/
package org.elasticsearch.xpack.security.authz.permission;
import org.apache.lucene.util.automaton.Operations;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.cache.Cache;
@ -22,6 +21,7 @@ import java.util.HashSet;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import static org.elasticsearch.xpack.security.Security.setting;
@ -39,8 +39,7 @@ public final class FieldPermissionsCache {
public FieldPermissionsCache(Settings settings) {
this.cache = CacheBuilder.<Key, FieldPermissions>builder()
.setMaximumWeight(CACHE_SIZE_SETTING.get(settings))
// this is not completely accurate but in most cases the automaton should be the most expensive aspect
.weigher((key, fieldPermissions) -> fieldPermissions.getPermittedFieldsAutomaton().ramBytesUsed())
.weigher((key, fieldPermissions) -> fieldPermissions.ramBytesUsed())
.build();
}
@ -90,7 +89,7 @@ public final class FieldPermissionsCache {
*/
FieldPermissions getFieldPermissions(Collection<FieldPermissions> fieldPermissionsCollection) {
Optional<FieldPermissions> allowAllFieldPermissions = fieldPermissionsCollection.stream()
.filter((fp) -> Operations.isTotal(fp.getPermittedFieldsAutomaton()))
.filter(((Predicate<FieldPermissions>) (FieldPermissions::hasFieldLevelSecurity)).negate())
.findFirst();
return allowAllFieldPermissions.orElseGet(() -> {
final Set<String> allowedFields;

View File

@ -16,6 +16,8 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NoMergePolicy;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.automaton.Automata;
import org.apache.lucene.util.automaton.CharacterRunAutomaton;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
@ -37,8 +39,6 @@ import org.elasticsearch.test.ESTestCase;
import org.junit.After;
import org.junit.Before;
import java.util.Collections;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
@ -98,7 +98,7 @@ public class FieldDataCacheWithFieldSubsetReaderTests extends ESTestCase {
assertThat(atomic.getOrdinalsValues().getValueCount(), equalTo(numDocs));
assertThat(indexFieldDataCache.topLevelBuilds, equalTo(1));
DirectoryReader ir = FieldSubsetReader.wrap(this.ir, Collections.<String>emptySet());
DirectoryReader ir = FieldSubsetReader.wrap(this.ir, new CharacterRunAutomaton(Automata.makeEmpty()));
global = sortedSetDVOrdinalsIndexFieldData.loadGlobal(ir);
atomic = global.load(ir.leaves().get(0));
assertThat(atomic.getOrdinalsValues().getValueCount(), equalTo(0L));
@ -111,7 +111,7 @@ public class FieldDataCacheWithFieldSubsetReaderTests extends ESTestCase {
assertThat(atomic.getOrdinalsValues().getValueCount(), greaterThanOrEqualTo(1L));
}
DirectoryReader ir = FieldSubsetReader.wrap(this.ir, Collections.<String>emptySet());
DirectoryReader ir = FieldSubsetReader.wrap(this.ir, new CharacterRunAutomaton(Automata.makeEmpty()));
for (LeafReaderContext context : ir.leaves()) {
AtomicOrdinalsFieldData atomic = sortedSetDVOrdinalsIndexFieldData.load(context);
assertThat(atomic.getOrdinalsValues().getValueCount(), equalTo(0L));
@ -127,7 +127,7 @@ public class FieldDataCacheWithFieldSubsetReaderTests extends ESTestCase {
assertThat(atomic.getOrdinalsValues().getValueCount(), equalTo(numDocs));
assertThat(indexFieldDataCache.topLevelBuilds, equalTo(1));
DirectoryReader ir = FieldSubsetReader.wrap(this.ir, Collections.<String>emptySet());
DirectoryReader ir = FieldSubsetReader.wrap(this.ir, new CharacterRunAutomaton(Automata.makeEmpty()));
global = pagedBytesIndexFieldData.loadGlobal(ir);
atomic = global.load(ir.leaves().get(0));
assertThat(atomic.getOrdinalsValues().getValueCount(), equalTo(0L));
@ -142,7 +142,7 @@ public class FieldDataCacheWithFieldSubsetReaderTests extends ESTestCase {
}
assertThat(indexFieldDataCache.leafLevelBuilds, equalTo(ir.leaves().size()));
DirectoryReader ir = FieldSubsetReader.wrap(this.ir, Collections.<String>emptySet());
DirectoryReader ir = FieldSubsetReader.wrap(this.ir, new CharacterRunAutomaton(Automata.makeEmpty()));
for (LeafReaderContext context : ir.leaves()) {
AtomicOrdinalsFieldData atomic = pagedBytesIndexFieldData.load(context);
assertThat(atomic.getOrdinalsValues().getValueCount(), equalTo(0L));

View File

@ -38,14 +38,23 @@ import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.automaton.Automata;
import org.apache.lucene.util.automaton.Automaton;
import org.apache.lucene.util.automaton.CharacterRunAutomaton;
import org.elasticsearch.index.mapper.FieldNamesFieldMapper;
import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.security.support.Automatons;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
@ -69,8 +78,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
iw.addDocument(doc);
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA")));
// see only one field
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -101,8 +109,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
iw.addDocument(doc);
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA")));
// see only one field
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -188,8 +195,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
iw.addDocument(doc);
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA")));
// see only one field
Document d2 = ir.document(0);
@ -215,8 +221,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
iw.addDocument(doc);
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA")));
// see only one field
Document d2 = ir.document(0);
@ -242,8 +247,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
iw.addDocument(doc);
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA")));
// see only one field
Document d2 = ir.document(0);
@ -269,8 +273,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
iw.addDocument(doc);
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA")));
// see only one field
Document d2 = ir.document(0);
@ -296,8 +299,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
iw.addDocument(doc);
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA")));
// see only one field
Document d2 = ir.document(0);
@ -323,8 +325,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
iw.addDocument(doc);
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA")));
// see only one field
Document d2 = ir.document(0);
@ -352,8 +353,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
iw.addDocument(doc);
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA")));
// see only one field
Fields vectors = ir.getTermVectors(0);
@ -382,8 +382,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
iw.addDocument(doc);
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA")));
// see only one field
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -409,8 +408,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
iw.addDocument(doc);
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA")));
// see only one field
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -441,8 +439,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
iw.addDocument(doc);
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA")));
// see only one field
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -473,8 +470,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
iw.addDocument(doc);
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA")));
// see only one field
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -505,8 +501,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
iw.addDocument(doc);
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA")));
// see only one field
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -541,8 +536,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
iw.addDocument(doc);
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA")));
// see only one field
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -576,8 +570,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
iw.addDocument(doc);
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA")));
// see only one field
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -593,7 +586,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
/**
* test special handling for _source field.
*/
public void testSourceFiltering() throws Exception {
public void testSourceFilteringIntegration() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = new IndexWriterConfig(null);
IndexWriter iw = new IndexWriter(dir, iwc);
@ -607,10 +600,8 @@ public class FieldSubsetReaderTests extends ESTestCase {
iw.addDocument(doc);
// open reader
Set<String> fields = new HashSet<>();
fields.add("fieldA");
fields.add(SourceFieldMapper.NAME);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
Automaton automaton = Automatons.patterns(Arrays.asList("fieldA", SourceFieldMapper.NAME));
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(automaton));
// see only one field
Document d2 = ir.document(0);
@ -620,7 +611,82 @@ public class FieldSubsetReaderTests extends ESTestCase {
TestUtil.checkReader(ir);
IOUtils.close(ir, iw, dir);
}
public void testSourceFiltering() {
// include on top-level value
Map<String, Object> map = new HashMap<>();
map.put("foo", 3);
map.put("bar", "baz");
CharacterRunAutomaton include = new CharacterRunAutomaton(Automata.makeString("foo"));
Map<String, Object> filtered = FieldSubsetReader.filter(map, include, 0);
Map<String, Object> expected = new HashMap<>();
expected.put("foo", 3);
assertEquals(expected, filtered);
// include on object
map = new HashMap<>();
Map<String, Object> subMap = new HashMap<>();
subMap.put("bar", 42);
subMap.put("baz", 6);
map.put("foo", subMap);
map.put("bar", "baz");
include = new CharacterRunAutomaton(Automata.makeString("foo"));
filtered = FieldSubsetReader.filter(map, include, 0);
expected = new HashMap<>();
expected.put("foo", subMap);
assertEquals(expected, filtered);
// include on inner wildcard
include = new CharacterRunAutomaton(Automatons.patterns("foo.*"));
filtered = FieldSubsetReader.filter(map, include, 0);
expected = new HashMap<>();
expected.put("foo", subMap);
assertEquals(expected, filtered);
// include on leading wildcard
include = new CharacterRunAutomaton(Automatons.patterns("*.bar"));
filtered = FieldSubsetReader.filter(map, include, 0);
expected = new HashMap<>();
expected = new HashMap<>();
subMap = new HashMap<>();
subMap.put("bar", 42);
expected.put("foo", subMap);
assertEquals(expected, filtered);
// include on inner value
include = new CharacterRunAutomaton(Automatons.patterns("foo.bar"));
filtered = FieldSubsetReader.filter(map, include, 0);
assertEquals(expected, filtered);
// include on inner array
map = new HashMap<>();
List<Object> subArray = new ArrayList<>();
subMap = new HashMap<>();
subMap.put("bar", 42);
subMap.put("baz", "foo");
subArray.add(subMap);
subArray.add(12);
map.put("foo", subMap);
include = new CharacterRunAutomaton(Automatons.patterns("foo.bar"));
filtered = FieldSubsetReader.filter(map, include, 0);
expected = new HashMap<>();
subArray = new ArrayList<>();
subMap = new HashMap<>();
subMap.put("bar", 42);
subArray.add(subMap);
expected.put("foo", subMap);
assertEquals(expected, filtered);
}
/**
* test special handling for _field_names field.
*/
@ -640,8 +706,8 @@ public class FieldSubsetReaderTests extends ESTestCase {
// open reader
Set<String> fields = new HashSet<>();
fields.add("fieldA");
fields.add(FieldNamesFieldMapper.NAME);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
Automaton automaton = Automatons.patterns(Arrays.asList("fieldA", FieldNamesFieldMapper.NAME));
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(automaton));
// see only one field
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -686,11 +752,8 @@ public class FieldSubsetReaderTests extends ESTestCase {
iw.addDocument(doc);
// open reader
Set<String> fields = new HashSet<>();
fields.add("fieldA");
fields.add("fieldC");
fields.add(FieldNamesFieldMapper.NAME);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
Automaton automaton = Automatons.patterns(Arrays.asList("fieldA", "fieldC", FieldNamesFieldMapper.NAME));
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(automaton));
// see only two fields
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -734,11 +797,8 @@ public class FieldSubsetReaderTests extends ESTestCase {
iw.addDocument(doc);
// open reader
Set<String> fields = new HashSet<>();
fields.add("fieldA");
fields.add("fieldC");
fields.add(FieldNamesFieldMapper.NAME);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
Automaton automaton = Automatons.patterns(Arrays.asList("fieldA", "fieldC", FieldNamesFieldMapper.NAME));
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(automaton));
// see only one field
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -771,10 +831,8 @@ public class FieldSubsetReaderTests extends ESTestCase {
iw.addDocument(doc);
// open reader
Set<String> fields = new HashSet<>();
fields.add("fieldA");
fields.add(FieldNamesFieldMapper.NAME);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
Automaton automaton = Automatons.patterns(Arrays.asList("fieldA", SourceFieldMapper.NAME));
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(automaton));
// see only one field
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -802,8 +860,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
iw.addDocument(doc);
// open reader
Set<String> fields = Collections.singleton("id");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("id")));
assertEquals(2, ir.numDocs());
assertEquals(1, ir.leaves().size());
@ -837,8 +894,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
iw.addDocument(doc);
// open reader
Set<String> fields = Collections.singleton("fieldB");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldB")));
// sees no fields
assertNull(ir.getTermVectors(0));
@ -857,8 +913,7 @@ public class FieldSubsetReaderTests extends ESTestCase {
iw.addDocument(new Document());
// open reader
Set<String> fields = Collections.singleton("fieldA");
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), fields);
DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA")));
// see no fields
LeafReader segmentReader = ir.leaves().get(0).reader();
@ -887,9 +942,10 @@ public class FieldSubsetReaderTests extends ESTestCase {
IndexWriter iw = new IndexWriter(dir, iwc);
iw.close();
final DirectoryReader directoryReader = FieldSubsetReader.wrap(DirectoryReader.open(dir), Collections.emptySet());
final DirectoryReader directoryReader = FieldSubsetReader.wrap(DirectoryReader.open(dir),
new CharacterRunAutomaton(Automata.makeString("fieldA")));
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> FieldSubsetReader.wrap(directoryReader,
Collections.emptySet()));
new CharacterRunAutomaton(Automata.makeString("fieldA"))));
assertThat(e.getMessage(), equalTo("Can't wrap [class org.elasticsearch.xpack.security.authz.accesscontrol" +
".FieldSubsetReader$FieldSubsetDirectoryReader] twice"));
directoryReader.close();

View File

@ -89,7 +89,7 @@ public class SecurityIndexSearcherWrapperIntegrationTests extends ESTestCase {
});
XPackLicenseState licenseState = mock(XPackLicenseState.class);
when(licenseState.isDocumentAndFieldLevelSecurityAllowed()).thenReturn(true);
SecurityIndexSearcherWrapper wrapper = new SecurityIndexSearcherWrapper(indexSettings, s -> queryShardContext, mapperService,
SecurityIndexSearcherWrapper wrapper = new SecurityIndexSearcherWrapper(indexSettings, s -> queryShardContext,
bitsetFilterCache, threadContext, licenseState, scriptService) {
@Override

View File

@ -38,7 +38,6 @@ import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.SparseFixedBitSet;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ThreadContext;
@ -46,12 +45,13 @@ import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AnalyzerScope;
import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.mapper.AllFieldMapper;
import org.elasticsearch.index.mapper.FieldNamesFieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParentFieldMapper;
import org.elasticsearch.index.mapper.SeqNoFieldMapper;
import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.BoostingQueryBuilder;
import org.elasticsearch.index.query.ConstantScoreQueryBuilder;
@ -66,8 +66,6 @@ import org.elasticsearch.index.query.TermsQueryBuilder;
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.indices.IndicesModule;
import org.elasticsearch.indices.TermsLookup;
import org.elasticsearch.license.XPackLicenseState;
import org.elasticsearch.script.ExecutableScript;
@ -94,11 +92,9 @@ import java.util.IdentityHashMap;
import java.util.Map;
import java.util.Set;
import static java.util.Collections.emptyList;
import static java.util.Collections.singletonMap;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.xpack.security.authz.accesscontrol.SecurityIndexSearcherWrapper.intersectScorerAndRoleBits;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
@ -113,8 +109,17 @@ import static org.mockito.Mockito.when;
public class SecurityIndexSearcherWrapperUnitTests extends ESTestCase {
private static final Set<String> META_FIELDS_WITHOUT_ALL;
static {
final Set<String> metaFieldsWithoutAll = new HashSet<>(Arrays.asList(MapperService.getAllMetaFields()));
metaFieldsWithoutAll.add(SourceFieldMapper.NAME);
metaFieldsWithoutAll.add(FieldNamesFieldMapper.NAME);
metaFieldsWithoutAll.add(SeqNoFieldMapper.NAME);
metaFieldsWithoutAll.remove(AllFieldMapper.NAME);
META_FIELDS_WITHOUT_ALL = Collections.unmodifiableSet(metaFieldsWithoutAll);
}
private ThreadContext threadContext;
private MapperService mapperService;
private ScriptService scriptService;
private SecurityIndexSearcherWrapper securityIndexSearcherWrapper;
private ElasticsearchDirectoryReader esIn;
@ -126,12 +131,6 @@ public class SecurityIndexSearcherWrapperUnitTests extends ESTestCase {
Index index = new Index("_index", "testUUID");
scriptService = mock(ScriptService.class);
indexSettings = IndexSettingsModule.newIndexSettings(index, Settings.EMPTY);
NamedAnalyzer namedAnalyzer = new NamedAnalyzer("default", AnalyzerScope.INDEX, new StandardAnalyzer());
IndexAnalyzers indexAnalyzers = new IndexAnalyzers(indexSettings, namedAnalyzer, namedAnalyzer, namedAnalyzer,
Collections.emptyMap(), Collections.emptyMap());
SimilarityService similarityService = new SimilarityService(indexSettings, Collections.emptyMap());
mapperService = new MapperService(indexSettings, indexAnalyzers, xContentRegistry(), similarityService,
new IndicesModule(emptyList()).getMapperRegistry(), () -> null);
ShardId shardId = new ShardId(index, 0);
licenseState = mock(XPackLicenseState.class);
@ -155,14 +154,8 @@ public class SecurityIndexSearcherWrapperUnitTests extends ESTestCase {
}
public void testDefaultMetaFields() throws Exception {
XContentBuilder mappingSource = jsonBuilder().startObject().startObject("type")
.startObject("properties")
.endObject()
.endObject().endObject();
mapperService.merge("type", new CompressedXContent(mappingSource.string()), MapperService.MergeReason.MAPPING_UPDATE, false);
securityIndexSearcherWrapper =
new SecurityIndexSearcherWrapper(indexSettings, null, mapperService, null, threadContext, licenseState, scriptService) {
new SecurityIndexSearcherWrapper(indexSettings, null, null, threadContext, licenseState, scriptService) {
@Override
protected IndicesAccessControl getIndicesAccessControl() {
IndicesAccessControl.IndexAccessControl indexAccessControl = new IndicesAccessControl.IndexAccessControl(true,
@ -173,129 +166,64 @@ public class SecurityIndexSearcherWrapperUnitTests extends ESTestCase {
FieldSubsetReader.FieldSubsetDirectoryReader result =
(FieldSubsetReader.FieldSubsetDirectoryReader) securityIndexSearcherWrapper.wrap(esIn);
assertThat(result.getFieldNames().size(), equalTo(13));
assertThat(result.getFieldNames().contains("_uid"), is(true));
assertThat(result.getFieldNames().contains("_id"), is(true));
assertThat(result.getFieldNames().contains("_version"), is(true));
assertThat(result.getFieldNames().contains("_type"), is(true));
assertThat(result.getFieldNames().contains("_source"), is(true));
assertThat(result.getFieldNames().contains("_routing"), is(true));
assertThat(result.getFieldNames().contains("_parent"), is(true));
assertThat(result.getFieldNames().contains("_timestamp"), is(true));
assertThat(result.getFieldNames().contains("_ttl"), is(true));
assertThat(result.getFieldNames().contains("_size"), is(true));
assertThat(result.getFieldNames().contains("_index"), is(true));
assertThat(result.getFieldNames().contains("_field_names"), is(true));
assertThat(result.getFieldNames().contains("_seq_no"), is(true));
assertThat(result.getFilter().run("_uid"), is(true));
assertThat(result.getFilter().run("_id"), is(true));
assertThat(result.getFilter().run("_version"), is(true));
assertThat(result.getFilter().run("_type"), is(true));
assertThat(result.getFilter().run("_source"), is(true));
assertThat(result.getFilter().run("_routing"), is(true));
assertThat(result.getFilter().run("_parent"), is(true));
assertThat(result.getFilter().run("_timestamp"), is(true));
assertThat(result.getFilter().run("_ttl"), is(true));
assertThat(result.getFilter().run("_size"), is(true));
assertThat(result.getFilter().run("_index"), is(true));
assertThat(result.getFilter().run("_field_names"), is(true));
assertThat(result.getFilter().run("_seq_no"), is(true));
// _all contains actual user data and therefor can't be included by default
assertThat(result.getFieldNames().contains("_all"), is(false));
assertThat(result.getFilter().run("_all"), is(false));
assertThat(result.getFilter().run("_some_random_meta_field"), is(true));
assertThat(result.getFilter().run("some_random_regular_field"), is(false));
}
public void testWrapReaderWhenFeatureDisabled() throws Exception {
when(licenseState.isDocumentAndFieldLevelSecurityAllowed()).thenReturn(false);
securityIndexSearcherWrapper =
new SecurityIndexSearcherWrapper(indexSettings, null, mapperService, null, threadContext, licenseState, scriptService);
new SecurityIndexSearcherWrapper(indexSettings, null, null, threadContext, licenseState, scriptService);
DirectoryReader reader = securityIndexSearcherWrapper.wrap(esIn);
assertThat(reader, sameInstance(esIn));
}
public void testWrapSearcherWhenFeatureDisabled() throws Exception {
securityIndexSearcherWrapper =
new SecurityIndexSearcherWrapper(indexSettings, null, mapperService, null, threadContext, licenseState, scriptService);
new SecurityIndexSearcherWrapper(indexSettings, null, null, threadContext, licenseState, scriptService);
IndexSearcher indexSearcher = new IndexSearcher(esIn);
IndexSearcher result = securityIndexSearcherWrapper.wrap(indexSearcher);
assertThat(result, sameInstance(indexSearcher));
}
public void testWildcards() throws Exception {
XContentBuilder mappingSource = jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("field1_a").field("type", "text").endObject()
.startObject("field1_b").field("type", "text").endObject()
.startObject("field1_c").field("type", "text").endObject()
.startObject("field2_a").field("type", "text").endObject()
.startObject("field2_b").field("type", "text").endObject()
.startObject("field2_c").field("type", "text").endObject()
.endObject().endObject().endObject();
mapperService.merge("type", new CompressedXContent(mappingSource.string()), MapperService.MergeReason.MAPPING_UPDATE, false);
assertResolvedFields("field1*", "field1_a", "field1_b", "field1_c");
assertResolvedFields("field2*", "field2_a", "field2_b", "field2_c");
Set<String> expected = new HashSet<>(META_FIELDS_WITHOUT_ALL);
expected.add("field1_a");
expected.add("field1_b");
expected.add("field1_c");
assertResolved(new FieldPermissions(new String[] {"field1*"}, null), expected, "field", "field2");
}
public void testDotNotion() throws Exception {
XContentBuilder mappingSource = jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("foo")
.field("type", "object")
.startObject("properties")
.startObject("bar").field("type", "text").endObject()
.startObject("baz").field("type", "text").endObject()
.endObject()
.endObject()
.startObject("bar")
.field("type", "object")
.startObject("properties")
.startObject("foo").field("type", "text").endObject()
.startObject("baz").field("type", "text").endObject()
.endObject()
.endObject()
.startObject("baz")
.field("type", "object")
.startObject("properties")
.startObject("bar").field("type", "text").endObject()
.startObject("foo").field("type", "text").endObject()
.endObject()
.endObject()
.endObject().endObject().endObject();
mapperService.merge("type", new CompressedXContent(mappingSource.string()), MapperService.MergeReason.MAPPING_UPDATE, false);
assertResolvedFields("foo.bar", "foo.bar");
assertResolvedFields("bar.baz", "bar.baz");
assertResolvedFields("foo.*", "foo.bar", "foo.baz");
assertResolvedFields("baz.*", "baz.bar", "baz.foo");
Set<String> expected = new HashSet<>(META_FIELDS_WITHOUT_ALL);
expected.add("foo.bar");
assertResolved(new FieldPermissions(new String[] {"foo.bar"}, null), expected, "foo", "foo.baz", "bar.foo");
expected = new HashSet<>(META_FIELDS_WITHOUT_ALL);
expected.add("foo.bar");
assertResolved(new FieldPermissions(new String[] {"foo.*"}, null), expected, "foo", "bar");
}
public void testParentChild() throws Exception {
XContentBuilder mappingSource = jsonBuilder().startObject().startObject("parent1")
.startObject("properties")
.startObject("field").field("type", "text").endObject()
.endObject()
.endObject().endObject();
mapperService.merge("parent1", new CompressedXContent(mappingSource.string()), MapperService.MergeReason.MAPPING_UPDATE, false);
mappingSource = jsonBuilder().startObject().startObject("child1")
.startObject("properties")
.startObject("field").field("type", "text").endObject()
.endObject()
.startObject("_parent")
.field("type", "parent1")
.endObject()
.endObject().endObject();
mapperService.merge("child1", new CompressedXContent(mappingSource.string()), MapperService.MergeReason.MAPPING_UPDATE, false);
mappingSource = jsonBuilder().startObject().startObject("child2")
.startObject("properties")
.startObject("field").field("type", "text").endObject()
.endObject()
.startObject("_parent")
.field("type", "parent1")
.endObject()
.endObject().endObject();
mapperService.merge("child2", new CompressedXContent(mappingSource.string()), MapperService.MergeReason.MAPPING_UPDATE, false);
mappingSource = jsonBuilder().startObject().startObject("parent2")
.startObject("properties")
.startObject("field").field("type", "text").endObject()
.endObject()
.endObject().endObject();
mapperService.merge("parent2", new CompressedXContent(mappingSource.string()), MapperService.MergeReason.MAPPING_UPDATE, false);
mappingSource = jsonBuilder().startObject().startObject("child3")
.startObject("properties")
.startObject("field").field("type", "text").endObject()
.endObject()
.startObject("_parent")
.field("type", "parent2")
.endObject()
.endObject().endObject();
mapperService.merge("child3", new CompressedXContent(mappingSource.string()), MapperService.MergeReason.MAPPING_UPDATE, false);
assertResolvedFields("field", "field", ParentFieldMapper.joinField("parent1"), ParentFieldMapper.joinField("parent2"));
Set<String> expected = new HashSet<>(META_FIELDS_WITHOUT_ALL);
expected.add(ParentFieldMapper.joinField("parent1"));
expected.add("foo");
assertResolved(new FieldPermissions(new String[] {"foo"}, null), expected, "bar");
}
public void testDelegateSimilarity() throws Exception {
@ -313,7 +241,7 @@ public class SecurityIndexSearcherWrapperUnitTests extends ESTestCase {
DirectoryReader directoryReader = DocumentSubsetReader.wrap(esIn, bitsetFilterCache, new MatchAllDocsQuery());
IndexSearcher indexSearcher = new IndexSearcher(directoryReader);
securityIndexSearcherWrapper =
new SecurityIndexSearcherWrapper(indexSettings, null, mapperService, null, threadContext, licenseState, scriptService);
new SecurityIndexSearcherWrapper(indexSettings, null, null, threadContext, licenseState, scriptService);
IndexSearcher result = securityIndexSearcherWrapper.wrap(indexSearcher);
assertThat(result, not(sameInstance(indexSearcher)));
assertThat(result.getSimilarity(true), sameInstance(indexSearcher.getSimilarity(true)));
@ -322,7 +250,7 @@ public class SecurityIndexSearcherWrapperUnitTests extends ESTestCase {
public void testIntersectScorerAndRoleBits() throws Exception {
securityIndexSearcherWrapper =
new SecurityIndexSearcherWrapper(indexSettings, null, mapperService, null, threadContext, licenseState, scriptService);
new SecurityIndexSearcherWrapper(indexSettings, null, null, threadContext, licenseState, scriptService);
final Directory directory = newDirectory();
IndexWriter iw = new IndexWriter(
directory,
@ -399,135 +327,95 @@ public class SecurityIndexSearcherWrapperUnitTests extends ESTestCase {
directory.close();
}
private void assertResolved(FieldPermissions permissions, Set<String> expected, String... fieldsToTest) {
for (String field : expected) {
assertThat(field, permissions.grantsAccessTo(field), is(true));
}
for (String field : fieldsToTest) {
assertThat(field, permissions.grantsAccessTo(field), is(expected.contains(field)));
}
}
public void testFieldPermissionsWithFieldExceptions() throws Exception {
XContentBuilder mappingSource = jsonBuilder().startObject().startObject("some_type")
.startObject("properties")
.startObject("field1").field("type", "text").endObject()
.startObject("field2").field("type", "text").endObject()
.startObject("xfield3").field("type", "text").endObject()
.endObject()
.endObject().endObject();
mapperService.merge("some_type", new CompressedXContent(mappingSource.string()), MapperService.MergeReason.MAPPING_UPDATE, false);
securityIndexSearcherWrapper =
new SecurityIndexSearcherWrapper(indexSettings, null, mapperService, null, threadContext, licenseState, null);
Set<String> allowedMetaFields = securityIndexSearcherWrapper.getAllowedMetaFields();
new SecurityIndexSearcherWrapper(indexSettings, null, null, threadContext, licenseState, null);
String[] grantedFields = new String[]{};
String[] deniedFields;
Set<String> expected = new HashSet<>(META_FIELDS_WITHOUT_ALL);
// Presence of fields in a role with an empty array implies access to no fields except the meta fields
Set<String> resolvedAllowedFields = new FieldPermissions(grantedFields, randomBoolean() ? null : new String[]{})
.resolveAllowedFields(allowedMetaFields, mapperService);
Set<String> expectedResultSet = new HashSet<>(allowedMetaFields);
assertThat(resolvedAllowedFields.size(), equalTo(expectedResultSet.size()));
assertThat(resolvedAllowedFields, containsInAnyOrder(expectedResultSet.toArray()));
assertResolved(new FieldPermissions(grantedFields, randomBoolean() ? null : new String[]{}),
expected, "foo", "bar");
// make sure meta fields cannot be denied access to
deniedFields = allowedMetaFields.toArray(new String[allowedMetaFields.size()]);
resolvedAllowedFields = new FieldPermissions(null, deniedFields)
.resolveAllowedFields(allowedMetaFields, mapperService);
expectedResultSet = new HashSet<>(allowedMetaFields);
expectedResultSet.addAll(Arrays.asList("field1", "field2", "xfield3"));
assertThat(resolvedAllowedFields.size(), equalTo(expectedResultSet.size()));
assertThat(resolvedAllowedFields, containsInAnyOrder(expectedResultSet.toArray()));
deniedFields = META_FIELDS_WITHOUT_ALL.toArray(new String[0]);
assertResolved(new FieldPermissions(null, deniedFields),
new HashSet<>(Arrays.asList("foo", "bar", "_some_plugin_meta_field")));
// check we can add all fields with *
grantedFields = new String[]{"*"};
resolvedAllowedFields = new FieldPermissions(grantedFields, randomBoolean() ? null : new String[]{})
.resolveAllowedFields(allowedMetaFields, mapperService);
expectedResultSet = new HashSet<>(allowedMetaFields);
expectedResultSet.addAll(Arrays.asList("field1", "field2", "xfield3"));
assertThat(resolvedAllowedFields.size(), equalTo(expectedResultSet.size()));
assertThat(resolvedAllowedFields, containsInAnyOrder(expectedResultSet.toArray()));
expected = new HashSet<>(META_FIELDS_WITHOUT_ALL);
expected.add(AllFieldMapper.NAME);
expected.add("foo");
assertResolved(new FieldPermissions(grantedFields, randomBoolean() ? null : new String[]{}), expected);
// same with null
resolvedAllowedFields = new FieldPermissions(grantedFields, randomBoolean() ? null : new String[]{})
.resolveAllowedFields(allowedMetaFields, mapperService);
expectedResultSet = new HashSet<>(allowedMetaFields);
expectedResultSet.addAll(Arrays.asList("field1", "field2", "xfield3"));
assertThat(resolvedAllowedFields.size(), equalTo(expectedResultSet.size()));
assertThat(resolvedAllowedFields, containsInAnyOrder(expectedResultSet.toArray()));
grantedFields = null;
assertResolved(new FieldPermissions(grantedFields, randomBoolean() ? null : new String[]{}), expected);
// check we remove only excluded fields
grantedFields = new String[]{"*"};
deniedFields = new String[]{"xfield3"};
resolvedAllowedFields = new FieldPermissions(grantedFields, deniedFields)
.resolveAllowedFields(allowedMetaFields, mapperService);
expectedResultSet = new HashSet<>(allowedMetaFields);
expectedResultSet.addAll(Arrays.asList("field1", "field2"));
assertThat(resolvedAllowedFields.size(), equalTo(expectedResultSet.size()));
assertThat(resolvedAllowedFields, containsInAnyOrder(expectedResultSet.toArray()));
deniedFields = new String[]{"xfield"};
expected = new HashSet<>(META_FIELDS_WITHOUT_ALL);
expected.add("foo");
assertResolved(new FieldPermissions(grantedFields, deniedFields), expected, "xfield", "_all");
// same with null
deniedFields = new String[]{"field1"};
resolvedAllowedFields = new FieldPermissions(null, deniedFields)
.resolveAllowedFields(allowedMetaFields, mapperService);
expectedResultSet = new HashSet<>(allowedMetaFields);
expectedResultSet.addAll(Arrays.asList("field2", "xfield3"));
assertThat(resolvedAllowedFields.size(), equalTo(expectedResultSet.size()));
assertThat(resolvedAllowedFields, containsInAnyOrder(expectedResultSet.toArray()));
grantedFields = null;
assertResolved(new FieldPermissions(grantedFields, deniedFields), expected, "xfield", "_all");
// some other checks
grantedFields = new String[]{"field*"};
deniedFields = new String[]{"field1", "field2"};
resolvedAllowedFields = new FieldPermissions(grantedFields, deniedFields)
.resolveAllowedFields(allowedMetaFields, mapperService);
expectedResultSet = new HashSet<>(allowedMetaFields);
assertThat(resolvedAllowedFields.size(), equalTo(expectedResultSet.size()));
assertThat(resolvedAllowedFields, containsInAnyOrder(expectedResultSet.toArray()));
expected = new HashSet<>(META_FIELDS_WITHOUT_ALL);
expected.add("field3");
assertResolved(new FieldPermissions(grantedFields, deniedFields), expected, "field1", "field2", "_all");
grantedFields = new String[]{"field1", "field2"};
deniedFields = new String[]{"field2"};
resolvedAllowedFields = new FieldPermissions(grantedFields, deniedFields)
.resolveAllowedFields(allowedMetaFields, mapperService);
expectedResultSet = new HashSet<>(allowedMetaFields);
expectedResultSet.addAll(Arrays.asList("field1"));
assertThat(resolvedAllowedFields.size(), equalTo(expectedResultSet.size()));
assertThat(resolvedAllowedFields, containsInAnyOrder(expectedResultSet.toArray()));
expected = new HashSet<>(META_FIELDS_WITHOUT_ALL);
expected.add("field1");
assertResolved(new FieldPermissions(grantedFields, deniedFields), expected, "field1", "field2", "_all");
grantedFields = new String[]{"field*"};
deniedFields = new String[]{"field2"};
resolvedAllowedFields = new FieldPermissions(grantedFields, deniedFields)
.resolveAllowedFields(allowedMetaFields, mapperService);
expectedResultSet = new HashSet<>(allowedMetaFields);
expectedResultSet.addAll(Arrays.asList("field1"));
assertThat(resolvedAllowedFields.size(), equalTo(expectedResultSet.size()));
assertThat(resolvedAllowedFields, containsInAnyOrder(expectedResultSet.toArray()));
expected = new HashSet<>(META_FIELDS_WITHOUT_ALL);
expected.add("field1");
assertResolved(new FieldPermissions(grantedFields, deniedFields), expected, "field2", "_all");
deniedFields = new String[]{"field*"};
resolvedAllowedFields = new FieldPermissions(null, deniedFields)
.resolveAllowedFields(allowedMetaFields, mapperService);
expectedResultSet = new HashSet<>(allowedMetaFields);
expectedResultSet.addAll(Arrays.asList("xfield3"));
assertThat(resolvedAllowedFields.size(), equalTo(expectedResultSet.size()));
assertThat(resolvedAllowedFields, containsInAnyOrder(expectedResultSet.toArray()));
assertResolved(new FieldPermissions(grantedFields, deniedFields),
META_FIELDS_WITHOUT_ALL, "field1", "field2");
// empty array for allowed fields always means no field is allowed
grantedFields = new String[]{};
deniedFields = new String[]{};
resolvedAllowedFields = new FieldPermissions(grantedFields, deniedFields)
.resolveAllowedFields(allowedMetaFields, mapperService);
expectedResultSet = new HashSet<>(allowedMetaFields);
assertThat(resolvedAllowedFields.size(), equalTo(expectedResultSet.size()));
assertThat(resolvedAllowedFields, containsInAnyOrder(expectedResultSet.toArray()));
assertResolved(new FieldPermissions(grantedFields, deniedFields),
META_FIELDS_WITHOUT_ALL, "field1", "field2");
// make sure all field can be explicitly allowed
grantedFields = new String[]{"_all", "*"};
deniedFields = randomBoolean() ? null : new String[]{};
resolvedAllowedFields = new FieldPermissions(grantedFields, deniedFields)
.resolveAllowedFields(allowedMetaFields, mapperService);
expectedResultSet = new HashSet<>(allowedMetaFields);
expectedResultSet.addAll(Arrays.asList("field1", "field2", "xfield3", "_all"));
assertThat(resolvedAllowedFields.size(), equalTo(expectedResultSet.size()));
assertThat(resolvedAllowedFields, containsInAnyOrder(expectedResultSet.toArray()));
expected = new HashSet<>(META_FIELDS_WITHOUT_ALL);
expected.add("_all");
expected.add("field1");
assertResolved(new FieldPermissions(grantedFields, deniedFields), expected);
// make sure all field can be explicitly allowed
grantedFields = new String[]{"_all"};
deniedFields = randomBoolean() ? null : new String[]{};
resolvedAllowedFields = new FieldPermissions(grantedFields, deniedFields)
.resolveAllowedFields(allowedMetaFields, mapperService);
expectedResultSet = new HashSet<>(allowedMetaFields);
expectedResultSet.addAll(Arrays.asList("_all"));
assertThat(resolvedAllowedFields.size(), equalTo(expectedResultSet.size()));
assertThat(resolvedAllowedFields, containsInAnyOrder(expectedResultSet.toArray()));
expected = new HashSet<>(META_FIELDS_WITHOUT_ALL);
expected.add("_all");
assertResolved(new FieldPermissions(grantedFields, deniedFields), expected, "field1", "_source");
}
private SparseFixedBitSet query(LeafReaderContext leaf, String field, String value) throws IOException {
@ -540,26 +428,6 @@ public class SecurityIndexSearcherWrapperUnitTests extends ESTestCase {
return sparseFixedBitSet;
}
private void assertResolvedFields(String expression, String... expectedFields) {
securityIndexSearcherWrapper =
new SecurityIndexSearcherWrapper(indexSettings, null, mapperService, null, threadContext, licenseState, scriptService) {
@Override
protected IndicesAccessControl getIndicesAccessControl() {
IndicesAccessControl.IndexAccessControl indexAccessControl = new IndicesAccessControl.IndexAccessControl(true,
new FieldPermissions(new String[]{expression}, null), null);
return new IndicesAccessControl(true, singletonMap("_index", indexAccessControl));
}
};
FieldSubsetReader.FieldSubsetDirectoryReader result =
(FieldSubsetReader.FieldSubsetDirectoryReader) securityIndexSearcherWrapper.wrap(esIn);
assertThat(result.getFieldNames().size() - securityIndexSearcherWrapper.getAllowedMetaFields().size(),
equalTo(expectedFields.length));
for (String expectedField : expectedFields) {
assertThat(result.getFieldNames().contains(expectedField), is(true));
}
}
public void testIndexSearcherWrapperSparseNoDeletions() throws IOException {
doTestIndexSearcherWrapper(true, false);
}
@ -580,7 +448,7 @@ public class SecurityIndexSearcherWrapperUnitTests extends ESTestCase {
User user = new User("_username", new String[]{"role1", "role2"}, "_full_name", "_email",
Collections.singletonMap("key", "value"), true);
securityIndexSearcherWrapper =
new SecurityIndexSearcherWrapper(indexSettings, null, mapperService, null, threadContext, licenseState, scriptService) {
new SecurityIndexSearcherWrapper(indexSettings, null, null, threadContext, licenseState, scriptService) {
@Override
protected User getUser() {
@ -621,7 +489,7 @@ public class SecurityIndexSearcherWrapperUnitTests extends ESTestCase {
public void testSkipTemplating() throws Exception {
securityIndexSearcherWrapper =
new SecurityIndexSearcherWrapper(indexSettings, null, mapperService, null, threadContext, licenseState, scriptService);
new SecurityIndexSearcherWrapper(indexSettings, null, null, threadContext, licenseState, scriptService);
XContentBuilder builder = jsonBuilder();
BytesReference querySource = new TermQueryBuilder("field", "value").toXContent(builder, ToXContent.EMPTY_PARAMS).bytes();
BytesReference result = securityIndexSearcherWrapper.evaluateTemplate(querySource);
@ -815,7 +683,7 @@ public class SecurityIndexSearcherWrapperUnitTests extends ESTestCase {
Client client = mock(Client.class);
when(client.settings()).thenReturn(Settings.EMPTY);
final long nowInMillis = randomNonNegativeLong();
QueryRewriteContext context = new QueryRewriteContext(null, mapperService, scriptService, xContentRegistry(), client, null,
QueryRewriteContext context = new QueryRewriteContext(null, null, scriptService, xContentRegistry(), client, null,
() -> nowInMillis);
QueryBuilder queryBuilder1 = new TermsQueryBuilder("field", "val1", "val2");
SecurityIndexSearcherWrapper.failIfQueryUsesClient(scriptService, queryBuilder1, context);

View File

@ -96,10 +96,9 @@ public class FieldPermissionsCacheTests extends ESTestCase {
new FieldPermissions(allowed2, denied2);
mergedFieldPermissions =
fieldPermissionsCache.getFieldPermissions(Arrays.asList(fieldPermissions1, fieldPermissions2));
assertTrue(fieldPermissions1.isAllFieldIsAllowed());
assertFalse(fieldPermissions2.isAllFieldIsAllowed());
assertTrue(fieldPermissions1.grantsAccessTo("_all"));
assertFalse(fieldPermissions2.grantsAccessTo("_all"));
assertTrue(mergedFieldPermissions.grantsAccessTo("_all"));
assertTrue(mergedFieldPermissions.isAllFieldIsAllowed());
allowed1 = new String[] { "a*" };
allowed2 = new String[] { "b*" };