SOLR-14520: Fixed server errors from the json.facet allBuckets:true option when combined with refine:true

This commit is contained in:
Chris Hostetter 2020-06-03 09:09:46 -07:00
parent 2af82c83d9
commit fb58f433fb
4 changed files with 76 additions and 6 deletions

View File

@ -209,6 +209,8 @@ Bug Fixes
* SOLR-14525: SolrCoreAware, ResourceLoaderAware should be honored for plugin loaded from packages (noble)
* SOLR-14520: Fixed server errors from the json.facet allBuckets:true option when combined with refine:true
(Michael Gibney, hossman)
Other Changes
---------------------

View File

@ -85,7 +85,11 @@ abstract class FacetFieldProcessorByArray extends FacetFieldProcessor {
if (refineResult != null) {
if (freq.allBuckets) {
// count is irrelevant, but hardcoded in collect(...), so intercept/mask normal counts.
// Set here to prevent createAccs(...) from creating a 1-slot countAcc that will fail with AIOOBE
countAcc = SlotAcc.DEV_NULL_SLOT_ACC;
createAccs(nDocs, 1);
otherAccs = accs; // accs is created above and set on allBucketsAcc; but during collection, setNextReader is called on otherAccs.
allBucketsAcc = new SpecialSlotAcc(fcontext, null, -1, accs, 0);
collectDocs();

View File

@ -595,6 +595,63 @@ public abstract class SlotAcc implements Closeable {
public abstract long getCount(int slot);
}
/**
* This CountSlotAcc exists as a /dev/null sink for callers of collect(...) and other "write"-type
* methods. It should be used in contexts where "read"-type access methods will never be called.
*/
static final CountSlotAcc DEV_NULL_SLOT_ACC = new CountSlotAcc(null) {
@Override
public void resize(Resizer resizer) {
// No-op
}
@Override
public void reset() throws IOException {
// No-op
}
@Override
public void collect(int doc, int slot, IntFunction<SlotContext> slotContext) throws IOException {
// No-op
}
@Override
public void incrementCount(int slot, long count) {
// No-op
}
@Override
public void setNextReader(LeafReaderContext readerContext) throws IOException {
// No-op
}
@Override
public int collect(DocSet docs, int slot, IntFunction<SlotContext> slotContext) throws IOException {
return docs.size(); // dressed up no-op
}
@Override
public Object getValue(int slotNum) throws IOException {
throw new UnsupportedOperationException("not supported");
}
@Override
public int compare(int slotA, int slotB) {
throw new UnsupportedOperationException("not supported");
}
@Override
public void setValues(SimpleOrderedMap<Object> bucket, int slotNum) throws IOException {
throw new UnsupportedOperationException("not supported");
}
@Override
public long getCount(int slot) {
throw new UnsupportedOperationException("not supported");
}
};
static class CountSlotArrAcc extends CountSlotAcc {
long[] result;

View File

@ -1082,7 +1082,7 @@ public class TestJsonFacetRefinement extends SolrTestCaseHS {
, "facets=={foo:555}"
);
****/
for (String method : new String[]{"","dvhash","stream","uif","enum","stream","smart"}) {
for (String method : new String[]{"","dv", "dvhash","stream","uif","enum","stream","smart"}) {
if (method.equals("")) {
p.remove("terms");
} else {
@ -1364,15 +1364,22 @@ public class TestJsonFacetRefinement extends SolrTestCaseHS {
// test filling in missing "allBuckets"
client.testJQ(params(p, "q", "*:*",
"json.facet", "{" +
" cat :{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:false, allBuckets:true, facet:{ xy:{${terms} type:terms, field:${xy_s}, limit:1, overrequest:0, allBuckets:true, refine:false} } }" +
" cat0:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:false, allBuckets:true, facet:{ xy:{${terms} type:terms, field:${xy_s}, limit:1, overrequest:0, allBuckets:true, refine:false} } }" +
", cat1:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:true, allBuckets:true, sort:'min asc', facet:{ min:'min(${num_d})' } }" +
", cat2:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:true , allBuckets:true, facet:{ xy:{${terms} type:terms, field:${xy_s}, limit:1, overrequest:0, allBuckets:true, refine:true } } }" +
", cat3:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:true , allBuckets:true, facet:{ xy:{${terms} type:terms, field:${xy_s}, limit:1, overrequest:0, allBuckets:true, refine:true , facet:{f:'sum(${num_d})'} } } }" +
", cat3:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:true , allBuckets:true, facet:{ xy:{${terms} type:terms, field:${xy_s}, limit:1, overrequest:0, allBuckets:true, refine:true , facet:{sum:'sum(${num_d})'} } } }" +
", cat4:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:true , allBuckets:true, facet:{ xy:{${terms} type:terms, field:${xy_s}, limit:1, overrequest:0, allBuckets:true, refine:true , sort:'sum asc', facet:{sum:'sum(${num_d})'} } } }" +
// using overrefine only so we aren't fooled by 'local maximum' and ask all shards for 'B'
", cat5:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:true, overrefine:2, allBuckets:true, sort:'min desc' facet:{ min:'min(${num_d})', xy:{${terms} type:terms, field:${xy_s}, limit:1, overrequest:0, allBuckets:true, refine:true, facet:{sum:'sum(${num_d})'} } } }" +
"}"
)
, "facets=={ count:8" +
", cat:{ allBuckets:{count:8}, buckets:[ {val:A, count:3, xy:{buckets:[{count:2, val:X}], allBuckets:{count:3}}}] }" +
",cat0:{ allBuckets:{count:8}, buckets:[ {val:A, count:3, xy:{buckets:[{count:2, val:X}], allBuckets:{count:3}}}] }" +
",cat1:{ allBuckets:{count:8, min:-19.0 }, buckets:[ {val:A, count:4, min:-19.0 }] }" +
",cat2:{ allBuckets:{count:8}, buckets:[ {val:A, count:4, xy:{buckets:[{count:3, val:X}], allBuckets:{count:4}}}] }" +
",cat3:{ allBuckets:{count:8}, buckets:[ {val:A, count:4, xy:{buckets:[{count:3, val:X, f:23.0}], allBuckets:{count:4, f:4.0}}}] }" +
",cat3:{ allBuckets:{count:8}, buckets:[ {val:A, count:4, xy:{buckets:[{count:3, val:X, sum:23.0}], allBuckets:{count:4, sum:4.0}}}] }" +
",cat4:{ allBuckets:{count:8}, buckets:[ {val:A, count:4, xy:{buckets:[{count:1, val:Y, sum:-19.0}], allBuckets:{count:4, sum:4.0}}}] }" +
",cat5:{ allBuckets:{count:8, min:-19.0 }, buckets:[ {val:B, count:4, min:-11.0, xy:{buckets:[{count:2, val:X, sum:6.0}], allBuckets:{count:4, sum:-2.0}}}] }" +
"}"
);