Merge pull request elastic/elasticsearch#2736 from rmuir/fls_cache
support lucene query cache when using FLS Original commit: elastic/x-pack-elasticsearch@5b7054b702
This commit is contained in:
commit
0b52cedf60
|
@ -0,0 +1,91 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.xpack.security.authz.accesscontrol;
|
||||||
|
|
||||||
|
import org.apache.lucene.search.BooleanClause;
|
||||||
|
import org.apache.lucene.search.BooleanQuery;
|
||||||
|
import org.apache.lucene.search.DisjunctionMaxQuery;
|
||||||
|
import org.apache.lucene.search.DocValuesNumbersQuery;
|
||||||
|
import org.apache.lucene.search.DocValuesRangeQuery;
|
||||||
|
import org.apache.lucene.search.FieldValueQuery;
|
||||||
|
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||||
|
import org.apache.lucene.search.MatchNoDocsQuery;
|
||||||
|
import org.apache.lucene.search.MultiPhraseQuery;
|
||||||
|
import org.apache.lucene.search.PhraseQuery;
|
||||||
|
import org.apache.lucene.search.PointInSetQuery;
|
||||||
|
import org.apache.lucene.search.PointRangeQuery;
|
||||||
|
import org.apache.lucene.search.Query;
|
||||||
|
import org.apache.lucene.search.SynonymQuery;
|
||||||
|
import org.apache.lucene.search.TermQuery;
|
||||||
|
import org.apache.lucene.search.Weight;
|
||||||
|
import org.apache.lucene.search.spans.SpanTermQuery;
|
||||||
|
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extracts fields from a query, or throws UnsupportedOperationException.
|
||||||
|
* <p>
|
||||||
|
* Lucene queries have {@link Weight#extractTerms}, but this is really geared at things
|
||||||
|
* such as highlighting, not security. For example terms in a Boolean {@code MUST_NOT} clause
|
||||||
|
* are not included, TermsQuery doesn't implement the method as it could be terribly slow, etc.
|
||||||
|
*/
|
||||||
|
class FieldExtractor {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Populates {@code fields} with the set of fields used by the query, or throws
|
||||||
|
* UnsupportedOperationException if it doesn't know how to do this.
|
||||||
|
*/
|
||||||
|
static void extractFields(Query query, Set<String> fields) throws UnsupportedOperationException {
|
||||||
|
// NOTE: we expect a rewritten query, so we only need logic for "atomic" queries here:
|
||||||
|
if (query instanceof BooleanQuery) {
|
||||||
|
// extract from all clauses
|
||||||
|
BooleanQuery q = (BooleanQuery) query;
|
||||||
|
for (BooleanClause clause : q.clauses()) {
|
||||||
|
extractFields(clause.getQuery(), fields);
|
||||||
|
}
|
||||||
|
} else if (query instanceof DisjunctionMaxQuery) {
|
||||||
|
// extract from all clauses
|
||||||
|
DisjunctionMaxQuery q = (DisjunctionMaxQuery) query;
|
||||||
|
for (Query clause : q.getDisjuncts()) {
|
||||||
|
extractFields(clause, fields);
|
||||||
|
}
|
||||||
|
} else if (query instanceof SpanTermQuery) {
|
||||||
|
// we just do SpanTerm, other spans are trickier, they could contain
|
||||||
|
// the evil FieldMaskingSpanQuery: so SpanQuery.getField cannot be trusted.
|
||||||
|
fields.add(((SpanTermQuery)query).getField());
|
||||||
|
} else if (query instanceof TermQuery) {
|
||||||
|
fields.add(((TermQuery)query).getTerm().field());
|
||||||
|
} else if (query instanceof SynonymQuery) {
|
||||||
|
SynonymQuery q = (SynonymQuery) query;
|
||||||
|
// all terms must have the same field
|
||||||
|
fields.add(q.getTerms().get(0).field());
|
||||||
|
} else if (query instanceof PhraseQuery) {
|
||||||
|
PhraseQuery q = (PhraseQuery) query;
|
||||||
|
// all terms must have the same field
|
||||||
|
fields.add(q.getTerms()[0].field());
|
||||||
|
} else if (query instanceof MultiPhraseQuery) {
|
||||||
|
MultiPhraseQuery q = (MultiPhraseQuery) query;
|
||||||
|
// all terms must have the same field
|
||||||
|
fields.add(q.getTermArrays()[0][0].field());
|
||||||
|
} else if (query instanceof PointRangeQuery) {
|
||||||
|
fields.add(((PointRangeQuery)query).getField());
|
||||||
|
} else if (query instanceof PointInSetQuery) {
|
||||||
|
fields.add(((PointInSetQuery)query).getField());
|
||||||
|
} else if (query instanceof FieldValueQuery) {
|
||||||
|
fields.add(((FieldValueQuery)query).getField());
|
||||||
|
} else if (query instanceof DocValuesNumbersQuery) {
|
||||||
|
fields.add(((DocValuesNumbersQuery)query).getField());
|
||||||
|
} else if (query instanceof DocValuesRangeQuery) {
|
||||||
|
fields.add(((DocValuesRangeQuery)query).getField());
|
||||||
|
} else if (query instanceof MatchAllDocsQuery) {
|
||||||
|
// no field
|
||||||
|
} else if (query instanceof MatchNoDocsQuery) {
|
||||||
|
// no field
|
||||||
|
} else {
|
||||||
|
throw new UnsupportedOperationException(); // we don't know how to get the fields from it
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -16,8 +16,12 @@ import org.elasticsearch.indices.IndicesQueryCache;
|
||||||
import org.elasticsearch.search.internal.ShardSearchRequest;
|
import org.elasticsearch.search.internal.ShardSearchRequest;
|
||||||
import org.elasticsearch.xpack.security.authz.InternalAuthorizationService;
|
import org.elasticsearch.xpack.security.authz.InternalAuthorizationService;
|
||||||
|
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Opts out of the query cache if field level security is active for the current request.
|
* Opts out of the query cache if field level security is active for the current request,
|
||||||
|
* and its unsafe to cache.
|
||||||
*/
|
*/
|
||||||
public final class OptOutQueryCache extends AbstractIndexComponent implements QueryCache {
|
public final class OptOutQueryCache extends AbstractIndexComponent implements QueryCache {
|
||||||
|
|
||||||
|
@ -64,13 +68,41 @@ public final class OptOutQueryCache extends AbstractIndexComponent implements Qu
|
||||||
|
|
||||||
IndicesAccessControl.IndexAccessControl indexAccessControl = indicesAccessControl.getIndexPermissions(indexName);
|
IndicesAccessControl.IndexAccessControl indexAccessControl = indicesAccessControl.getIndexPermissions(indexName);
|
||||||
if (indexAccessControl != null && indexAccessControl.getFields() != null) {
|
if (indexAccessControl != null && indexAccessControl.getFields() != null) {
|
||||||
logger.debug("opting out of the query cache. request for index [{}] has field level security enabled", indexName);
|
if (cachingIsSafe(weight, indexAccessControl)) {
|
||||||
// If in the future there is a Query#extractFields() then we can be smart on when to skip the query cache.
|
logger.trace("not opting out of the query cache. request for index [{}] is safe to cache", indexName);
|
||||||
// (only cache if all fields in the query also are defined in the role)
|
return indicesQueryCache.doCache(weight, policy);
|
||||||
return weight;
|
} else {
|
||||||
|
logger.trace("opting out of the query cache. request for index [{}] is unsafe to cache", indexName);
|
||||||
|
return weight;
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
logger.trace("not opting out of the query cache. request for index [{}] has field level security disabled", indexName);
|
logger.trace("not opting out of the query cache. request for index [{}] has field level security disabled", indexName);
|
||||||
return indicesQueryCache.doCache(weight, policy);
|
return indicesQueryCache.doCache(weight, policy);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns true if its safe to use the query cache for this query.
|
||||||
|
*/
|
||||||
|
static boolean cachingIsSafe(Weight weight, IndicesAccessControl.IndexAccessControl permissions) {
|
||||||
|
// support caching for common queries, by inspecting the field
|
||||||
|
// TODO: If in the future there is a Query#extractFields() then we can do a better job
|
||||||
|
Set<String> fields = new HashSet<>();
|
||||||
|
try {
|
||||||
|
FieldExtractor.extractFields(weight.getQuery(), fields);
|
||||||
|
} catch (UnsupportedOperationException ok) {
|
||||||
|
// we don't know how to safely extract the fields of this query, don't cache.
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// we successfully extracted the set of fields: check each one
|
||||||
|
for (String field : fields) {
|
||||||
|
// don't cache any internal fields (e.g. _field_names), these are complicated.
|
||||||
|
if (field.startsWith("_") || permissions.getFields().contains(field) == false) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// we can cache, all fields are ok
|
||||||
|
return true;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,138 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.xpack.security.authz.accesscontrol;
|
||||||
|
|
||||||
|
import org.apache.lucene.document.IntPoint;
|
||||||
|
import org.apache.lucene.index.Term;
|
||||||
|
import org.apache.lucene.search.AssertingQuery;
|
||||||
|
import org.apache.lucene.search.BooleanClause;
|
||||||
|
import org.apache.lucene.search.BooleanQuery;
|
||||||
|
import org.apache.lucene.search.DisjunctionMaxQuery;
|
||||||
|
import org.apache.lucene.search.DocValuesNumbersQuery;
|
||||||
|
import org.apache.lucene.search.DocValuesRangeQuery;
|
||||||
|
import org.apache.lucene.search.FieldValueQuery;
|
||||||
|
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||||
|
import org.apache.lucene.search.MatchNoDocsQuery;
|
||||||
|
import org.apache.lucene.search.MultiPhraseQuery;
|
||||||
|
import org.apache.lucene.search.PhraseQuery;
|
||||||
|
import org.apache.lucene.search.SynonymQuery;
|
||||||
|
import org.apache.lucene.search.TermQuery;
|
||||||
|
import org.apache.lucene.search.spans.SpanTermQuery;
|
||||||
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
/** Simple tests for query field extraction */
|
||||||
|
public class FieldExtractorTests extends ESTestCase {
|
||||||
|
|
||||||
|
public void testBoolean() {
|
||||||
|
Set<String> fields = new HashSet<>();
|
||||||
|
BooleanQuery.Builder builder = new BooleanQuery.Builder();
|
||||||
|
builder.add(new TermQuery(new Term("foo", "bar")), BooleanClause.Occur.MUST);
|
||||||
|
builder.add(new TermQuery(new Term("no", "baz")), BooleanClause.Occur.MUST_NOT);
|
||||||
|
FieldExtractor.extractFields(builder.build(), fields);
|
||||||
|
assertEquals(asSet("foo", "no"), fields);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDisjunctionMax() {
|
||||||
|
Set<String> fields = new HashSet<>();
|
||||||
|
DisjunctionMaxQuery query = new DisjunctionMaxQuery(Arrays.asList(
|
||||||
|
new TermQuery(new Term("one", "bar")),
|
||||||
|
new TermQuery(new Term("two", "baz"))
|
||||||
|
), 1.0F);
|
||||||
|
FieldExtractor.extractFields(query, fields);
|
||||||
|
assertEquals(asSet("one", "two"), fields);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testSpanTerm() {
|
||||||
|
Set<String> fields = new HashSet<>();
|
||||||
|
FieldExtractor.extractFields(new SpanTermQuery(new Term("foo", "bar")), fields);
|
||||||
|
assertEquals(asSet("foo"), fields);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testTerm() {
|
||||||
|
Set<String> fields = new HashSet<>();
|
||||||
|
FieldExtractor.extractFields(new TermQuery(new Term("foo", "bar")), fields);
|
||||||
|
assertEquals(asSet("foo"), fields);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testSynonym() {
|
||||||
|
Set<String> fields = new HashSet<>();
|
||||||
|
SynonymQuery query = new SynonymQuery(new Term("foo", "bar"), new Term("foo", "baz"));
|
||||||
|
FieldExtractor.extractFields(query, fields);
|
||||||
|
assertEquals(asSet("foo"), fields);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testPhrase() {
|
||||||
|
Set<String> fields = new HashSet<>();
|
||||||
|
PhraseQuery.Builder builder = new PhraseQuery.Builder();
|
||||||
|
builder.add(new Term("foo", "bar"));
|
||||||
|
builder.add(new Term("foo", "baz"));
|
||||||
|
FieldExtractor.extractFields(builder.build(), fields);
|
||||||
|
assertEquals(asSet("foo"), fields);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testMultiPhrase() {
|
||||||
|
Set<String> fields = new HashSet<>();
|
||||||
|
MultiPhraseQuery.Builder builder = new MultiPhraseQuery.Builder();
|
||||||
|
builder.add(new Term("foo", "bar"));
|
||||||
|
builder.add(new Term[] { new Term("foo", "baz"), new Term("foo", "baz2") });
|
||||||
|
FieldExtractor.extractFields(builder.build(), fields);
|
||||||
|
assertEquals(asSet("foo"), fields);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testPointRange() {
|
||||||
|
Set<String> fields = new HashSet<>();
|
||||||
|
FieldExtractor.extractFields(IntPoint.newRangeQuery("foo", 3, 4), fields);
|
||||||
|
assertEquals(asSet("foo"), fields);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testPointSet() {
|
||||||
|
Set<String> fields = new HashSet<>();
|
||||||
|
FieldExtractor.extractFields(IntPoint.newSetQuery("foo", 3, 4, 5), fields);
|
||||||
|
assertEquals(asSet("foo"), fields);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testFieldValue() {
|
||||||
|
Set<String> fields = new HashSet<>();
|
||||||
|
FieldExtractor.extractFields(new FieldValueQuery("foo"), fields);
|
||||||
|
assertEquals(asSet("foo"), fields);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDocValuesNumbers() {
|
||||||
|
Set<String> fields = new HashSet<>();
|
||||||
|
FieldExtractor.extractFields(new DocValuesNumbersQuery("foo", 5L), fields);
|
||||||
|
assertEquals(asSet("foo"), fields);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDocValuesRange() {
|
||||||
|
Set<String> fields = new HashSet<>();
|
||||||
|
FieldExtractor.extractFields(DocValuesRangeQuery.newLongRange("foo", 1L, 2L, true, true), fields);
|
||||||
|
assertEquals(asSet("foo"), fields);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testMatchAllDocs() {
|
||||||
|
Set<String> fields = new HashSet<>();
|
||||||
|
FieldExtractor.extractFields(new MatchAllDocsQuery(), fields);
|
||||||
|
assertEquals(Collections.emptySet(), fields);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testMatchNoDocs() {
|
||||||
|
Set<String> fields = new HashSet<>();
|
||||||
|
FieldExtractor.extractFields(new MatchNoDocsQuery(), fields);
|
||||||
|
assertEquals(Collections.emptySet(), fields);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testUnsupported() {
|
||||||
|
Set<String> fields = new HashSet<>();
|
||||||
|
expectThrows(UnsupportedOperationException.class, () -> {
|
||||||
|
FieldExtractor.extractFields(new AssertingQuery(random(), new MatchAllDocsQuery()), fields);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in New Issue