SOLR-12064: resize reused accs to fix bugs with limit:-1 and missing:true

This commit is contained in:
yonik 2018-03-12 21:56:02 -04:00
parent b82ce515f0
commit 68d8eb4504
3 changed files with 42 additions and 6 deletions

View File

@ -310,6 +310,10 @@ Bug Fixes
* SOLR-12072: Invalid path string using ZkConfigManager.copyConfigDir(String fromConfig, String toConfig)
(Alessandro Hoss via Erick Erickson)
* SOLR-12064: JSON Facet API: fix bug where a limit of -1 in conjunction with multiple facets or
missing=true caused an NPE or AIOOBE. (Karthik Ramachandran via yonik)
Optimizations
----------------------

View File

@ -83,9 +83,20 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
}
if (accs != null) {
// reuse these accs, but reset them first
// reuse these accs, but reset them first and resize since size could be different
for (SlotAcc acc : accs) {
acc.reset();
acc.resize(new SlotAcc.Resizer() {
@Override
public int getNewSize() {
return slotCount;
}
@Override
public int getNewSlot(int oldSlot) {
return 0;
}
});
}
return;
} else {
@ -339,11 +350,10 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
res.add("allBuckets", allBuckets);
}
SimpleOrderedMap<Object> missingBucket = new SimpleOrderedMap<>();
if (freq.missing) {
// TODO: it would be more efficient to build up a missing DocSet if we need it here anyway.
SimpleOrderedMap<Object> missingBucket = new SimpleOrderedMap<>();
fillBucket(missingBucket, getFieldMissingQuery(fcontext.searcher, freq.field), null, false, null);
res.add("missing", missingBucket);
// moved missing fillBucket after we fill facet since it will reset all the accumulators.
}
// if we are deep paging, we don't have to order the highest "offset" counts.
@ -371,6 +381,11 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
bucketList.add(bucket);
}
if (freq.missing) {
// TODO: it would be more efficient to build up a missing DocSet if we need it here anyway.
fillBucket(missingBucket, getFieldMissingQuery(fcontext.searcher, freq.field), null, false, null);
}
return res;
}
@ -477,6 +492,13 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
this.subAccs = subAccs;
}
@Override
public void setNextReader(LeafReaderContext ctx) throws IOException {
for (SlotAcc acc : subAccs) {
acc.setNextReader(ctx);
}
}
@Override
public void collect(int doc, int slot) throws IOException {
for (SlotAcc acc : subAccs) {

View File

@ -524,8 +524,8 @@ public class TestJsonFacets extends SolrTestCaseHS {
if (terms == null) terms="";
int limit=0;
switch (random().nextInt(4)) {
case 0: limit=-1;
case 1: limit=1000000;
case 0: limit=-1; break;
case 1: limit=1000000; break;
case 2: // fallthrough
case 3: // fallthrough
}
@ -686,6 +686,16 @@ public class TestJsonFacets extends SolrTestCaseHS {
", f2:{ 'buckets':[{ val:'B', count:3, n1:-3.0}, { val:'A', count:2, n1:6.0 }]} }"
);
// test sorting by other stats and more than one facet
client.testJQ(params(p, "q", "*:*"
, "json.facet", "{f1:{terms:{${terms} field:'${cat_s}', sort:'n1 desc', facet:{n1:'sum(${num_d})', n2:'avg(${num_d})'} }}" +
" , f2:{terms:{${terms} field:'${cat_s}', sort:'n1 asc' , facet:{n1:'sum(${num_d})', n2:'avg(${num_d})'} }} }"
)
, "facets=={ 'count':6, " +
" f1:{ 'buckets':[{ val:'A', count:2, n1:6.0 , n2:3.0 }, { val:'B', count:3, n1:-3.0, n2:-1.0}]}" +
", f2:{ 'buckets':[{ val:'B', count:3, n1:-3.0, n2:-1.0}, { val:'A', count:2, n1:6.0 , n2:3.0 }]} }"
);
// test sorting by other stats
client.testJQ(params(p, "q", "*:*"
, "json.facet", "{f1:{${terms} type:terms, field:'${cat_s}', sort:'x desc', facet:{x:'min(${num_d})'} }" +