Upgrade to a snapshot of Lucene 7. (elastic/x-pack-elasticsearch#1102)
Original commit: elastic/x-pack-elasticsearch@2c145e4160
This commit is contained in:
parent
51de15dcf1
commit
98c4dc6a92
|
@ -72,8 +72,8 @@ public final class DocumentSubsetReader extends FilterLeafReader {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Object getCoreCacheKey() {
|
||||
return in.getCoreCacheKey();
|
||||
public CacheHelper getReaderCacheHelper() {
|
||||
return in.getReaderCacheHelper();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -155,8 +155,13 @@ public final class DocumentSubsetReader extends FilterLeafReader {
|
|||
// Don't delegate getCombinedCoreAndDeletesKey(), because we change the live docs here.
|
||||
|
||||
@Override
|
||||
public Object getCoreCacheKey() {
|
||||
return in.getCoreCacheKey();
|
||||
public CacheHelper getCoreCacheHelper() {
|
||||
return in.getCoreCacheHelper();
|
||||
}
|
||||
|
||||
@Override
|
||||
public CacheHelper getReaderCacheHelper() {
|
||||
return in.getReaderCacheHelper();
|
||||
}
|
||||
|
||||
BitSet getRoleQueryBits() {
|
||||
|
|
|
@ -9,7 +9,6 @@ import org.apache.lucene.search.BooleanClause;
|
|||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.DisjunctionMaxQuery;
|
||||
import org.apache.lucene.search.DocValuesNumbersQuery;
|
||||
import org.apache.lucene.search.DocValuesRangeQuery;
|
||||
import org.apache.lucene.search.FieldValueQuery;
|
||||
import org.apache.lucene.search.IndexOrDocValuesQuery;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
|
@ -24,6 +23,9 @@ import org.apache.lucene.search.TermQuery;
|
|||
import org.apache.lucene.search.Weight;
|
||||
import org.apache.lucene.search.spans.SpanTermQuery;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
|
@ -80,10 +82,14 @@ class FieldExtractor {
|
|||
} else if (query instanceof DocValuesNumbersQuery) {
|
||||
fields.add(((DocValuesNumbersQuery)query).getField());
|
||||
} else if (query instanceof IndexOrDocValuesQuery) {
|
||||
extractFields(((IndexOrDocValuesQuery) query).getIndexQuery(), fields);
|
||||
extractFields(((IndexOrDocValuesQuery) query).getRandomAccessQuery(), fields);
|
||||
} else if (query instanceof DocValuesRangeQuery) {
|
||||
fields.add(((DocValuesRangeQuery)query).getField());
|
||||
// Both queries are supposed to be equivalent, so if any of them can be extracted, we are good
|
||||
try {
|
||||
Set<String> dvQueryFields = new HashSet<>(1);
|
||||
extractFields(((IndexOrDocValuesQuery) query).getRandomAccessQuery(), dvQueryFields);
|
||||
fields.addAll(dvQueryFields);
|
||||
} catch (UnsupportedOperationException e) {
|
||||
extractFields(((IndexOrDocValuesQuery) query).getIndexQuery(), fields);
|
||||
}
|
||||
} else if (query instanceof MatchAllDocsQuery) {
|
||||
// no field
|
||||
} else if (query instanceof MatchNoDocsQuery) {
|
||||
|
|
|
@ -21,7 +21,6 @@ import org.apache.lucene.index.SortedSetDocValues;
|
|||
import org.apache.lucene.index.StoredFieldVisitor;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.FilterIterator;
|
||||
import org.apache.lucene.util.automaton.CharacterRunAutomaton;
|
||||
|
@ -101,8 +100,8 @@ public final class FieldSubsetReader extends FilterLeafReader {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Object getCoreCacheKey() {
|
||||
return in.getCoreCacheKey();
|
||||
public CacheHelper getReaderCacheHelper() {
|
||||
return in.getReaderCacheHelper();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -302,16 +301,16 @@ public final class FieldSubsetReader extends FilterLeafReader {
|
|||
return hasField(field) ? super.getNormValues(field) : null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Bits getDocsWithField(String field) throws IOException {
|
||||
return hasField(field) ? super.getDocsWithField(field) : null;
|
||||
}
|
||||
|
||||
// we share core cache keys (for e.g. fielddata)
|
||||
|
||||
@Override
|
||||
public Object getCoreCacheKey() {
|
||||
return in.getCoreCacheKey();
|
||||
public CacheHelper getCoreCacheHelper() {
|
||||
return in.getCoreCacheHelper();
|
||||
}
|
||||
|
||||
@Override
|
||||
public CacheHelper getReaderCacheHelper() {
|
||||
return in.getReaderCacheHelper();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -458,100 +457,12 @@ public final class FieldSubsetReader extends FilterLeafReader {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Filters the PointValues instance.
|
||||
* <p>
|
||||
* Like other parts of the index, fields without access will not exist.
|
||||
*/
|
||||
// TODO: push up a similar class into lucene
|
||||
// TODO: fix the FieldFilterReader we use in lucene tests!
|
||||
final class FieldFilterPointValues extends PointValues {
|
||||
private final PointValues in;
|
||||
|
||||
FieldFilterPointValues(PointValues in) {
|
||||
this.in = in;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void intersect(String fieldName, IntersectVisitor visitor) throws IOException {
|
||||
if (hasField(fieldName)) {
|
||||
in.intersect(fieldName, visitor);
|
||||
} else {
|
||||
return; // behave as field does not exist
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long estimatePointCount(String fieldName, IntersectVisitor visitor) {
|
||||
if (hasField(fieldName)) {
|
||||
return in.estimatePointCount(fieldName, visitor);
|
||||
} else {
|
||||
return 0L; // behave as field does not exist
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] getMinPackedValue(String fieldName) throws IOException {
|
||||
if (hasField(fieldName)) {
|
||||
return in.getMinPackedValue(fieldName);
|
||||
} else {
|
||||
return null; // behave as field does not exist
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] getMaxPackedValue(String fieldName) throws IOException {
|
||||
if (hasField(fieldName)) {
|
||||
return in.getMaxPackedValue(fieldName);
|
||||
} else {
|
||||
return null; // behave as field does not exist
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getNumDimensions(String fieldName) throws IOException {
|
||||
if (hasField(fieldName)) {
|
||||
return in.getNumDimensions(fieldName);
|
||||
} else {
|
||||
return 0; // behave as field does not exist
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getBytesPerDimension(String fieldName) throws IOException {
|
||||
if (hasField(fieldName)) {
|
||||
return in.getBytesPerDimension(fieldName);
|
||||
} else {
|
||||
return 0; // behave as field does not exist
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long size(String fieldName) {
|
||||
if (hasField(fieldName)) {
|
||||
return in.size(fieldName);
|
||||
} else {
|
||||
return 0; // behave as field does not exist
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getDocCount(String fieldName) {
|
||||
if (hasField(fieldName)) {
|
||||
return in.getDocCount(fieldName);
|
||||
} else {
|
||||
return 0; // behave as field does not exist
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public PointValues getPointValues() {
|
||||
PointValues points = super.getPointValues();
|
||||
if (points == null) {
|
||||
return null;
|
||||
public PointValues getPointValues(String fieldName) throws IOException {
|
||||
if (hasField(fieldName)) {
|
||||
return super.getPointValues(fieldName);
|
||||
} else {
|
||||
return new FieldFilterPointValues(points);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -218,7 +218,8 @@ public class DocumentSubsetReaderTests extends ESTestCase {
|
|||
// we should have the same cache key as before
|
||||
assertEquals(1, ir2.numDocs());
|
||||
assertEquals(1, ir2.leaves().size());
|
||||
assertSame(ir.leaves().get(0).reader().getCoreCacheKey(), ir2.leaves().get(0).reader().getCoreCacheKey());
|
||||
assertSame(ir.leaves().get(0).reader().getCoreCacheHelper().getKey(),
|
||||
ir2.leaves().get(0).reader().getCoreCacheHelper().getKey());
|
||||
|
||||
TestUtil.checkReader(ir);
|
||||
IOUtils.close(ir, ir2, iw, dir);
|
||||
|
|
|
@ -6,19 +6,20 @@
|
|||
package org.elasticsearch.xpack.security.authz.accesscontrol;
|
||||
|
||||
import org.apache.lucene.document.IntPoint;
|
||||
import org.apache.lucene.document.NumericDocValuesField;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.AssertingQuery;
|
||||
import org.apache.lucene.search.BooleanClause;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.DisjunctionMaxQuery;
|
||||
import org.apache.lucene.search.DocValuesNumbersQuery;
|
||||
import org.apache.lucene.search.DocValuesRangeQuery;
|
||||
import org.apache.lucene.search.FieldValueQuery;
|
||||
import org.apache.lucene.search.IndexOrDocValuesQuery;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.search.MatchNoDocsQuery;
|
||||
import org.apache.lucene.search.MultiPhraseQuery;
|
||||
import org.apache.lucene.search.PhraseQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.SynonymQuery;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.search.spans.SpanTermQuery;
|
||||
|
@ -112,12 +113,6 @@ public class FieldExtractorTests extends ESTestCase {
|
|||
assertEquals(asSet("foo"), fields);
|
||||
}
|
||||
|
||||
public void testDocValuesRange() {
|
||||
Set<String> fields = new HashSet<>();
|
||||
FieldExtractor.extractFields(DocValuesRangeQuery.newLongRange("foo", 1L, 2L, true, true), fields);
|
||||
assertEquals(asSet("foo"), fields);
|
||||
}
|
||||
|
||||
public void testMatchAllDocs() {
|
||||
Set<String> fields = new HashSet<>();
|
||||
FieldExtractor.extractFields(new MatchAllDocsQuery(), fields);
|
||||
|
@ -139,16 +134,24 @@ public class FieldExtractorTests extends ESTestCase {
|
|||
|
||||
public void testIndexOrDocValuesQuery() {
|
||||
Set<String> fields = new HashSet<>();
|
||||
IndexOrDocValuesQuery query = new IndexOrDocValuesQuery(new FieldValueQuery("foo"),
|
||||
new DocValuesNumbersQuery("foo", 5L));
|
||||
Query supported = IntPoint.newExactQuery("foo", 42);
|
||||
Query unsupported = NumericDocValuesField.newExactQuery("bar", 3);
|
||||
|
||||
IndexOrDocValuesQuery query = new IndexOrDocValuesQuery(supported, supported);
|
||||
FieldExtractor.extractFields(query, fields);
|
||||
assertEquals(asSet("foo"), fields);
|
||||
|
||||
// what if they have different fields - some programming error
|
||||
fields.clear();
|
||||
query = new IndexOrDocValuesQuery(new FieldValueQuery("foo1"),
|
||||
new DocValuesNumbersQuery("bar", 5L));
|
||||
FieldExtractor.extractFields(query, fields);
|
||||
assertEquals(asSet("foo1", "bar"), fields);
|
||||
IndexOrDocValuesQuery query2 = new IndexOrDocValuesQuery(unsupported, unsupported);
|
||||
expectThrows(UnsupportedOperationException.class, () -> FieldExtractor.extractFields(query2, new HashSet<>()));
|
||||
|
||||
fields = new HashSet<>();
|
||||
IndexOrDocValuesQuery query3 = new IndexOrDocValuesQuery(supported, unsupported);
|
||||
FieldExtractor.extractFields(query3, fields);
|
||||
assertEquals(asSet("foo"), fields);
|
||||
|
||||
fields = new HashSet<>();
|
||||
IndexOrDocValuesQuery query4 = new IndexOrDocValuesQuery(unsupported, supported);
|
||||
FieldExtractor.extractFields(query4, fields);
|
||||
assertEquals(asSet("foo"), fields);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,6 +18,7 @@ import org.apache.lucene.document.SortedSetDocValuesField;
|
|||
import org.apache.lucene.document.StoredField;
|
||||
import org.apache.lucene.document.StringField;
|
||||
import org.apache.lucene.document.TextField;
|
||||
import org.apache.lucene.index.BinaryDocValues;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.FieldInfos;
|
||||
import org.apache.lucene.index.Fields;
|
||||
|
@ -25,9 +26,11 @@ import org.apache.lucene.index.IndexWriter;
|
|||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.LeafReader;
|
||||
import org.apache.lucene.index.NoMergePolicy;
|
||||
import org.apache.lucene.index.NumericDocValues;
|
||||
import org.apache.lucene.index.PointValues;
|
||||
import org.apache.lucene.index.PointValues.IntersectVisitor;
|
||||
import org.apache.lucene.index.PointValues.Relation;
|
||||
import org.apache.lucene.index.SortedDocValues;
|
||||
import org.apache.lucene.index.SortedNumericDocValues;
|
||||
import org.apache.lucene.index.SortedSetDocValues;
|
||||
import org.apache.lucene.index.Term;
|
||||
|
@ -114,35 +117,30 @@ public class FieldSubsetReaderTests extends ESTestCase {
|
|||
|
||||
// see only one field
|
||||
LeafReader segmentReader = ir.leaves().get(0).reader();
|
||||
PointValues points = segmentReader.getPointValues();
|
||||
PointValues points = segmentReader.getPointValues("fieldA");
|
||||
assertNull(segmentReader.getPointValues("fieldB"));
|
||||
|
||||
// size statistic
|
||||
assertEquals(1, points.size("fieldA"));
|
||||
assertEquals(0, points.size("fieldB"));
|
||||
assertEquals(1, points.size());
|
||||
|
||||
// doccount statistic
|
||||
assertEquals(1, points.getDocCount("fieldA"));
|
||||
assertEquals(0, points.getDocCount("fieldB"));
|
||||
assertEquals(1, points.getDocCount());
|
||||
|
||||
// min statistic
|
||||
assertNotNull(points.getMinPackedValue("fieldA"));
|
||||
assertNull(points.getMinPackedValue("fieldB"));
|
||||
assertNotNull(points.getMinPackedValue());
|
||||
|
||||
// max statistic
|
||||
assertNotNull(points.getMaxPackedValue("fieldA"));
|
||||
assertNull(points.getMaxPackedValue("fieldB"));
|
||||
assertNotNull(points.getMaxPackedValue());
|
||||
|
||||
// bytes per dimension
|
||||
assertEquals(Integer.BYTES, points.getBytesPerDimension("fieldA"));
|
||||
assertEquals(0, points.getBytesPerDimension("fieldB"));
|
||||
assertEquals(Integer.BYTES, points.getBytesPerDimension());
|
||||
|
||||
// number of dimensions
|
||||
assertEquals(1, points.getNumDimensions("fieldA"));
|
||||
assertEquals(0, points.getNumDimensions("fieldB"));
|
||||
assertEquals(1, points.getNumDimensions());
|
||||
|
||||
// walk the trees: we should see stuff in fieldA
|
||||
AtomicBoolean sawDoc = new AtomicBoolean(false);
|
||||
points.intersect("fieldA", new IntersectVisitor() {
|
||||
points.intersect(new IntersectVisitor() {
|
||||
@Override
|
||||
public void visit(int docID) throws IOException {
|
||||
throw new IllegalStateException("should not get here");
|
||||
|
@ -159,23 +157,6 @@ public class FieldSubsetReaderTests extends ESTestCase {
|
|||
}
|
||||
});
|
||||
assertTrue(sawDoc.get());
|
||||
// not in fieldB
|
||||
points.intersect("fieldB", new IntersectVisitor() {
|
||||
@Override
|
||||
public void visit(int docID) throws IOException {
|
||||
throw new IllegalStateException("should not get here");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visit(int docID, byte[] packedValue) throws IOException {
|
||||
throw new IllegalStateException("should not get here");
|
||||
}
|
||||
|
||||
@Override
|
||||
public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) {
|
||||
throw new IllegalStateException("should not get here");
|
||||
}
|
||||
});
|
||||
|
||||
TestUtil.checkReader(ir);
|
||||
IOUtils.close(ir, iw, dir);
|
||||
|
@ -413,14 +394,12 @@ public class FieldSubsetReaderTests extends ESTestCase {
|
|||
|
||||
// see only one field
|
||||
LeafReader segmentReader = ir.leaves().get(0).reader();
|
||||
assertNotNull(segmentReader.getNumericDocValues("fieldA"));
|
||||
assertEquals(1, segmentReader.getNumericDocValues("fieldA").get(0));
|
||||
NumericDocValues values = segmentReader.getNumericDocValues("fieldA");
|
||||
assertNotNull(values);
|
||||
assertTrue(values.advanceExact(0));
|
||||
assertEquals(1, values.longValue());
|
||||
assertNull(segmentReader.getNumericDocValues("fieldB"));
|
||||
|
||||
// check docs with field
|
||||
assertNotNull(segmentReader.getDocsWithField("fieldA"));
|
||||
assertNull(segmentReader.getDocsWithField("fieldB"));
|
||||
|
||||
TestUtil.checkReader(ir);
|
||||
IOUtils.close(ir, iw, dir);
|
||||
}
|
||||
|
@ -444,13 +423,11 @@ public class FieldSubsetReaderTests extends ESTestCase {
|
|||
|
||||
// see only one field
|
||||
LeafReader segmentReader = ir.leaves().get(0).reader();
|
||||
assertNotNull(segmentReader.getBinaryDocValues("fieldA"));
|
||||
assertEquals(new BytesRef("testA"), segmentReader.getBinaryDocValues("fieldA").get(0));
|
||||
BinaryDocValues values = segmentReader.getBinaryDocValues("fieldA");
|
||||
assertNotNull(values);
|
||||
assertTrue(values.advanceExact(0));
|
||||
assertEquals(new BytesRef("testA"), values.binaryValue());
|
||||
assertNull(segmentReader.getBinaryDocValues("fieldB"));
|
||||
|
||||
// check docs with field
|
||||
assertNotNull(segmentReader.getDocsWithField("fieldA"));
|
||||
assertNull(segmentReader.getDocsWithField("fieldB"));
|
||||
|
||||
TestUtil.checkReader(ir);
|
||||
IOUtils.close(ir, iw, dir);
|
||||
|
@ -475,14 +452,12 @@ public class FieldSubsetReaderTests extends ESTestCase {
|
|||
|
||||
// see only one field
|
||||
LeafReader segmentReader = ir.leaves().get(0).reader();
|
||||
assertNotNull(segmentReader.getSortedDocValues("fieldA"));
|
||||
assertEquals(new BytesRef("testA"), segmentReader.getSortedDocValues("fieldA").get(0));
|
||||
SortedDocValues values = segmentReader.getSortedDocValues("fieldA");
|
||||
assertNotNull(values);
|
||||
assertTrue(values.advanceExact(0));
|
||||
assertEquals(new BytesRef("testA"), values.binaryValue());
|
||||
assertNull(segmentReader.getSortedDocValues("fieldB"));
|
||||
|
||||
// check docs with field
|
||||
assertNotNull(segmentReader.getDocsWithField("fieldA"));
|
||||
assertNull(segmentReader.getDocsWithField("fieldB"));
|
||||
|
||||
TestUtil.checkReader(ir);
|
||||
IOUtils.close(ir, iw, dir);
|
||||
}
|
||||
|
@ -508,16 +483,12 @@ public class FieldSubsetReaderTests extends ESTestCase {
|
|||
LeafReader segmentReader = ir.leaves().get(0).reader();
|
||||
SortedSetDocValues dv = segmentReader.getSortedSetDocValues("fieldA");
|
||||
assertNotNull(dv);
|
||||
dv.setDocument(0);
|
||||
assertTrue(dv.advanceExact(0));
|
||||
assertEquals(0, dv.nextOrd());
|
||||
assertEquals(SortedSetDocValues.NO_MORE_ORDS, dv.nextOrd());
|
||||
assertEquals(new BytesRef("testA"), dv.lookupOrd(0));
|
||||
assertNull(segmentReader.getSortedSetDocValues("fieldB"));
|
||||
|
||||
// check docs with field
|
||||
assertNotNull(segmentReader.getDocsWithField("fieldA"));
|
||||
assertNull(segmentReader.getDocsWithField("fieldB"));
|
||||
|
||||
TestUtil.checkReader(ir);
|
||||
IOUtils.close(ir, iw, dir);
|
||||
}
|
||||
|
@ -543,15 +514,11 @@ public class FieldSubsetReaderTests extends ESTestCase {
|
|||
LeafReader segmentReader = ir.leaves().get(0).reader();
|
||||
SortedNumericDocValues dv = segmentReader.getSortedNumericDocValues("fieldA");
|
||||
assertNotNull(dv);
|
||||
dv.setDocument(0);
|
||||
assertEquals(1, dv.count());
|
||||
assertEquals(1, dv.valueAt(0));
|
||||
assertTrue(dv.advanceExact(0));
|
||||
assertEquals(1, dv.docValueCount());
|
||||
assertEquals(1, dv.nextValue());
|
||||
assertNull(segmentReader.getSortedNumericDocValues("fieldB"));
|
||||
|
||||
// check docs with field
|
||||
assertNotNull(segmentReader.getDocsWithField("fieldA"));
|
||||
assertNull(segmentReader.getDocsWithField("fieldB"));
|
||||
|
||||
TestUtil.checkReader(ir);
|
||||
IOUtils.close(ir, iw, dir);
|
||||
}
|
||||
|
@ -923,7 +890,8 @@ public class FieldSubsetReaderTests extends ESTestCase {
|
|||
// we should have the same cache key as before
|
||||
assertEquals(1, ir2.numDocs());
|
||||
assertEquals(1, ir2.leaves().size());
|
||||
assertSame(ir.leaves().get(0).reader().getCoreCacheKey(), ir2.leaves().get(0).reader().getCoreCacheKey());
|
||||
assertSame(ir.leaves().get(0).reader().getCoreCacheHelper().getKey(),
|
||||
ir2.leaves().get(0).reader().getCoreCacheHelper().getKey());
|
||||
|
||||
TestUtil.checkReader(ir);
|
||||
IOUtils.close(ir, ir2, iw, dir);
|
||||
|
|
|
@ -49,7 +49,7 @@ public class OptOutQueryCacheTests extends ESTestCase {
|
|||
BooleanQuery.Builder builder = new BooleanQuery.Builder();
|
||||
builder.add(new TermQuery(new Term("foo", "bar")), BooleanClause.Occur.MUST);
|
||||
builder.add(new TermQuery(new Term("no", "baz")), BooleanClause.Occur.MUST_NOT);
|
||||
Weight weight = builder.build().createWeight(searcher, false);
|
||||
Weight weight = builder.build().createWeight(searcher, false, 1f);
|
||||
|
||||
// whenever the allowed fields match the fields in the query and we do not deny access to any fields we allow caching.
|
||||
IndicesAccessControl.IndexAccessControl permissions = new IndicesAccessControl.IndexAccessControl(true,
|
||||
|
|
|
@ -518,26 +518,16 @@ public class SecurityIndexSearcherWrapperUnitTests extends ESTestCase {
|
|||
return weight.explain(context, doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() throws IOException {
|
||||
return weight.getValueForNormalization();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float norm, float boost) {
|
||||
weight.normalize(norm, boost);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||
assertTrue(seenLeaves.add(context.reader().getCoreCacheKey()));
|
||||
assertTrue(seenLeaves.add(context.reader().getCoreCacheHelper().getKey()));
|
||||
return weight.scorer(context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BulkScorer bulkScorer(LeafReaderContext context)
|
||||
throws IOException {
|
||||
assertTrue(seenLeaves.add(context.reader().getCoreCacheKey()));
|
||||
assertTrue(seenLeaves.add(context.reader().getCoreCacheHelper().getKey()));
|
||||
return weight.bulkScorer(context);
|
||||
}
|
||||
}
|
||||
|
@ -565,8 +555,8 @@ public class SecurityIndexSearcherWrapperUnitTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
return new CreateScorerOnceWeight(query.createWeight(searcher, needsScores));
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
return new CreateScorerOnceWeight(query.createWeight(searcher, needsScores, boost));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
Loading…
Reference in New Issue