mirror of https://github.com/apache/lucene.git
SOLR-9917: fix NPE in distrib percentiles when no values for field in bucket
This commit is contained in:
parent
f9e3554838
commit
7ef6a81846
|
@ -329,6 +329,9 @@ Bug Fixes
|
|||
|
||||
* SOLR-7495: Support Facet.field on a non-DocValued, single-value, int field (Varun Thacker, Scott Stults)
|
||||
|
||||
* SOLR-9917: JSON Facet API percentile function caused a NullPointerException in distributed mode when
|
||||
there were no values in a bucket from a shard. (yonik)
|
||||
|
||||
Other Changes
|
||||
----------------------
|
||||
|
||||
|
|
|
@ -149,7 +149,7 @@ public class PercentileAgg extends SimpleAggValueSource {
|
|||
}
|
||||
if (sortvals != null && percentiles.size()==1) {
|
||||
// we've already calculated everything we need
|
||||
return sortvals[slotNum];
|
||||
return digests[slotNum] != null ? sortvals[slotNum] : null;
|
||||
}
|
||||
return getValueFromDigest( digests[slotNum] );
|
||||
}
|
||||
|
@ -192,6 +192,7 @@ public class PercentileAgg extends SimpleAggValueSource {
|
|||
@Override
|
||||
public void merge(Object facetResult, Context mcontext) {
|
||||
byte[] arr = (byte[])facetResult;
|
||||
if (arr == null) return; // an explicit null can mean no values in the field
|
||||
AVLTreeDigest subDigest = AVLTreeDigest.fromBytes(ByteBuffer.wrap(arr));
|
||||
if (digest == null) {
|
||||
digest = subDigest;
|
||||
|
@ -202,7 +203,7 @@ public class PercentileAgg extends SimpleAggValueSource {
|
|||
|
||||
@Override
|
||||
public Object getMergedResult() {
|
||||
if (percentiles.size() == 1) return getSortVal();
|
||||
if (percentiles.size() == 1 && digest != null) return getSortVal();
|
||||
return getValueFromDigest(digest);
|
||||
}
|
||||
|
||||
|
@ -213,7 +214,7 @@ public class PercentileAgg extends SimpleAggValueSource {
|
|||
|
||||
private Double getSortVal() {
|
||||
if (sortVal == null) {
|
||||
sortVal = digest.quantile( percentiles.get(0) * 0.01 );
|
||||
sortVal = digest==null ? Double.NEGATIVE_INFINITY : digest.quantile( percentiles.get(0) * 0.01 );
|
||||
}
|
||||
return sortVal;
|
||||
}
|
||||
|
|
|
@ -90,7 +90,10 @@ public abstract class SlotAcc implements Closeable {
|
|||
|
||||
public void setValues(SimpleOrderedMap<Object> bucket, int slotNum) throws IOException {
|
||||
if (key == null) return;
|
||||
bucket.add(key, getValue(slotNum));
|
||||
Object val = getValue(slotNum);
|
||||
if (val != null) {
|
||||
bucket.add(key, val);
|
||||
}
|
||||
}
|
||||
|
||||
public abstract void reset();
|
||||
|
|
|
@ -381,6 +381,7 @@ public class TestJsonFacets extends SolrTestCaseHS {
|
|||
|
||||
public static void doStatsTemplated(Client client, ModifiableSolrParams p) throws Exception {
|
||||
p.set("Z_num_i", "Z_" + p.get("num_i") );
|
||||
p.set("sparse_num_d", "sparse_" + p.get("num_d") );
|
||||
if (p.get("num_is") == null) p.add("num_is","num_is");
|
||||
if (p.get("num_fs") == null) p.add("num_fs","num_fs");
|
||||
|
||||
|
@ -398,21 +399,34 @@ public class TestJsonFacets extends SolrTestCaseHS {
|
|||
String super_s = m.expand("${super_s}");
|
||||
String sparse_s = m.expand("${sparse_s}");
|
||||
String multi_ss = m.expand("${multi_ss}");
|
||||
String sparse_num_d = m.expand("${sparse_num_d}");
|
||||
|
||||
|
||||
client.deleteByQuery("*:*", null);
|
||||
|
||||
Client iclient = client;
|
||||
|
||||
/*** This code was not needed yet, but may be needed if we want to force empty shard results more often.
|
||||
// create a new indexing client that doesn't use one shard to better test for empty or non-existent results
|
||||
if (!client.local()) {
|
||||
List<SolrClient> shards = client.getClientProvider().all();
|
||||
iclient = new Client(shards.subList(0, shards.size()-1), client.getClientProvider().getSeed());
|
||||
}
|
||||
***/
|
||||
|
||||
SolrInputDocument doc =
|
||||
sdoc("id", "1", cat_s, "A", where_s, "NY", num_d, "4", num_i, "2", num_is,"2",num_is,"-5", num_fs,"2",num_fs,"-5", super_s, "zodiac", date, "2001-01-01T01:01:01Z", val_b, "true", sparse_s, "one");
|
||||
client.add(doc, null);
|
||||
client.add(doc, null);
|
||||
client.add(doc, null); // a couple of deleted docs
|
||||
client.add(sdoc("id", "2", cat_s, "B", where_s, "NJ", num_d, "-9", num_i, "-5", num_is,"3",num_is,"-1", num_fs,"3",num_fs,"-1.5", super_s,"superman", date,"2002-02-02T02:02:02Z", val_b, "false" , multi_ss,"a", multi_ss,"b" , Z_num_i, "0"), null);
|
||||
client.add(sdoc("id", "3"), null);
|
||||
client.commit();
|
||||
client.add(sdoc("id", "4", cat_s, "A", where_s, "NJ", num_d, "2", num_i, "3", num_is,"0",num_is,"3", num_fs,"0", num_fs,"3", super_s,"spiderman", date,"2003-03-03T03:03:03Z" , multi_ss, "b", Z_num_i, ""+Integer.MIN_VALUE), null);
|
||||
client.add(sdoc("id", "5", cat_s, "B", where_s, "NJ", num_d, "11", num_i, "7", num_is,"0", num_fs,"0", super_s,"batman" , date,"2001-02-03T01:02:03Z" ,sparse_s,"two", multi_ss, "a"), null);
|
||||
client.commit();
|
||||
client.add(sdoc("id", "6", cat_s, "B", where_s, "NY", num_d, "-5", num_i, "-5", num_is,"-1", num_fs,"-1.5", super_s,"hulk" , date,"2002-03-01T03:02:01Z" , multi_ss, "b", multi_ss, "a", Z_num_i, ""+Integer.MAX_VALUE), null);
|
||||
sdoc("id", "1", cat_s, "A", where_s, "NY", num_d, "4", sparse_num_d, "6", num_i, "2", num_is,"2",num_is,"-5", num_fs,"2",num_fs,"-5", super_s, "zodiac", date, "2001-01-01T01:01:01Z", val_b, "true", sparse_s, "one");
|
||||
iclient.add(doc, null);
|
||||
iclient.add(doc, null);
|
||||
iclient.add(doc, null); // a couple of deleted docs
|
||||
iclient.add(sdoc("id", "2", cat_s, "B", where_s, "NJ", num_d, "-9", num_i, "-5", num_is,"3",num_is,"-1", num_fs,"3",num_fs,"-1.5", super_s,"superman", date,"2002-02-02T02:02:02Z", val_b, "false" , multi_ss,"a", multi_ss,"b" , Z_num_i, "0"), null);
|
||||
iclient.add(sdoc("id", "3"), null);
|
||||
iclient.commit();
|
||||
iclient.add(sdoc("id", "4", cat_s, "A", where_s, "NJ", num_d, "2", sparse_num_d,"-4",num_i, "3", num_is,"0",num_is,"3", num_fs,"0", num_fs,"3", super_s,"spiderman", date,"2003-03-03T03:03:03Z" , multi_ss, "b", Z_num_i, ""+Integer.MIN_VALUE), null);
|
||||
iclient.add(sdoc("id", "5", cat_s, "B", where_s, "NJ", num_d, "11", num_i, "7", num_is,"0", num_fs,"0", super_s,"batman" , date,"2001-02-03T01:02:03Z" ,sparse_s,"two", multi_ss, "a"), null);
|
||||
iclient.commit();
|
||||
iclient.add(sdoc("id", "6", cat_s, "B", where_s, "NY", num_d, "-5", num_i, "-5", num_is,"-1", num_fs,"-1.5", super_s,"hulk" , date,"2002-03-01T03:02:01Z" , multi_ss, "b", multi_ss, "a", Z_num_i, ""+Integer.MAX_VALUE), null);
|
||||
iclient.commit();
|
||||
client.commit();
|
||||
|
||||
// test for presence of debugging info
|
||||
|
@ -542,11 +556,15 @@ public class TestJsonFacets extends SolrTestCaseHS {
|
|||
// test sorting by single percentile
|
||||
client.testJQ(params(p, "q", "*:*"
|
||||
, "json.facet", "{f1:{terms:{${terms} field:'${cat_s}', sort:'n1 desc', facet:{n1:'percentile(${num_d},50)'} }}" +
|
||||
" , f2:{terms:{${terms} field:'${cat_s}', sort:'n1 asc', facet:{n1:'percentile(${num_d},50)'} }} }"
|
||||
" , f2:{terms:{${terms} field:'${cat_s}', sort:'n1 asc', facet:{n1:'percentile(${num_d},50)'} }} " +
|
||||
" , f3:{terms:{${terms} field:'${cat_s}', sort:'n1 desc', facet:{n1:'percentile(${sparse_num_d},50)'} }} " +
|
||||
"}"
|
||||
)
|
||||
, "facets=={ 'count':6, " +
|
||||
" f1:{ 'buckets':[{ val:'A', count:2, n1:3.0 }, { val:'B', count:3, n1:-5.0}]}" +
|
||||
", f2:{ 'buckets':[{ val:'B', count:3, n1:-5.0}, { val:'A', count:2, n1:3.0 }]} }"
|
||||
", f2:{ 'buckets':[{ val:'B', count:3, n1:-5.0}, { val:'A', count:2, n1:3.0 }]}" +
|
||||
", f3:{ 'buckets':[{ val:'A', count:2, n1:1.0}, { val:'B', count:3}]}" +
|
||||
"}"
|
||||
);
|
||||
|
||||
// test sorting by multiple percentiles (sort is by first)
|
||||
|
|
|
@ -364,6 +364,10 @@ public class SolrTestCaseHS extends SolrTestCaseJ4 {
|
|||
public List<SolrClient> all() {
|
||||
return clients;
|
||||
}
|
||||
|
||||
public int getSeed() {
|
||||
return hashSeed;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
|
Loading…
Reference in New Issue