diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index 76a5bdfd81c..8ce244ef6d1 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -62,6 +62,8 @@ Bug Fixes * SOLR-12393: Compute score if requested even when expanded docs not sorted by score in ExpandComponent. (David Smiley, Munendra S N) +* SOLR-13877: Fix NPE in expand component when matched docs have fewer unique values. (Munendra S N) + Other Changes --------------------- diff --git a/solr/core/src/java/org/apache/solr/handler/component/ExpandComponent.java b/solr/core/src/java/org/apache/solr/handler/component/ExpandComponent.java index 5bf8a3a34ec..2a58248bfa9 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/ExpandComponent.java +++ b/solr/core/src/java/org/apache/solr/handler/component/ExpandComponent.java @@ -327,11 +327,7 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia } if(count > 0 && count < 200) { - try { - groupQuery = getGroupQuery(field, count, ordBytes); - } catch(Exception e) { - throw new IOException(e); - } + groupQuery = getGroupQuery(field, count, ordBytes); } } else { groupSet = new LongHashSet(docList.size()); @@ -689,16 +685,15 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia int size, LongHashSet groupSet) { - BytesRef[] bytesRefs = new BytesRef[size]; + List bytesRefs = new ArrayList<>(size); BytesRefBuilder term = new BytesRefBuilder(); Iterator it = groupSet.iterator(); - int index = -1; while (it.hasNext()) { LongCursor cursor = it.next(); String stringVal = numericToString(ft, cursor.value); ft.readableToIndexed(stringVal, term); - bytesRefs[++index] = term.toBytesRef(); + bytesRefs.add(term.toBytesRef()); } return new TermInSetQuery(fname, bytesRefs); @@ -738,13 +733,12 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia private Query getGroupQuery(String fname, int size, - IntObjectHashMap ordBytes) throws Exception { - BytesRef[] bytesRefs = new BytesRef[size]; - int index = -1; + IntObjectHashMap ordBytes) { + List bytesRefs = new ArrayList<>(size); Iterator>it = ordBytes.iterator(); while (it.hasNext()) { IntObjectCursor cursor = it.next(); - bytesRefs[++index] = cursor.value; + bytesRefs.add(cursor.value); } return new TermInSetQuery(fname, bytesRefs); } diff --git a/solr/core/src/test/org/apache/solr/handler/component/TestExpandComponent.java b/solr/core/src/test/org/apache/solr/handler/component/TestExpandComponent.java index 4ba2bd470cd..336d608a32c 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/TestExpandComponent.java +++ b/solr/core/src/test/org/apache/solr/handler/component/TestExpandComponent.java @@ -91,17 +91,7 @@ public class TestExpandComponent extends SolrTestCaseJ4 { {"id","7", "term_s", "YYYY", group, "1"+floatAppend, "test_i", "1", "test_l", "100000", "test_f", "2000", "type_s", "child"}, {"id","8", "term_s","YYYY", group, "2"+floatAppend, "test_i", "2", "test_l", "100000", "test_f", "200", "type_s", "child"} }; - // randomize addition of docs into bunch of segments - // TODO there ought to be a test utility to do this; even add in batches - Collections.shuffle(Arrays.asList(docs), random()); - for (String[] doc : docs) { - assertU(adoc(doc)); - if (random().nextBoolean()) { - assertU(commit()); - } - } - - assertU(commit()); + createIndex(docs); ModifiableSolrParams params = new ModifiableSolrParams(); params.add("q", "*:*"); @@ -165,7 +155,6 @@ public class TestExpandComponent extends SolrTestCaseJ4 { //Test override expand.q - params = new ModifiableSolrParams(); params.add("q", "type_s:parent"); params.add("defType", "edismax"); @@ -186,7 +175,6 @@ public class TestExpandComponent extends SolrTestCaseJ4 { //Test override expand.fq - params = new ModifiableSolrParams(); params.add("q", "*:*"); params.add("fq", "type_s:parent"); @@ -207,7 +195,6 @@ public class TestExpandComponent extends SolrTestCaseJ4 { ); //Test override expand.fq and expand.q - params = new ModifiableSolrParams(); params.add("q", "*:*"); params.add("fq", "type_s:parent"); @@ -229,7 +216,6 @@ public class TestExpandComponent extends SolrTestCaseJ4 { ); //Test expand.rows - params = new ModifiableSolrParams(); params.add("q", "*:*"); params.add("fq", "{!collapse field="+group+hint+"}"); @@ -250,7 +236,6 @@ public class TestExpandComponent extends SolrTestCaseJ4 { //Test no group results - params = new ModifiableSolrParams(); params.add("q", "test_i:5"); params.add("fq", "{!collapse field="+group+hint+"}"); @@ -264,7 +249,6 @@ public class TestExpandComponent extends SolrTestCaseJ4 { ); //Test zero results - params = new ModifiableSolrParams(); params.add("q", "test_i:5532535"); params.add("fq", "{!collapse field="+group+hint+"}"); @@ -278,7 +262,6 @@ public class TestExpandComponent extends SolrTestCaseJ4 { ); //Test key-only fl - params = new ModifiableSolrParams(); params.add("q", "*:*"); params.add("fq", "{!collapse field="+group+hint+"}"); @@ -299,7 +282,6 @@ public class TestExpandComponent extends SolrTestCaseJ4 { ); //Test key-only fl with score but no sorting - assertQ(req(params, "fl", "id,score"), "*[count(/response/result/doc)=2]", "*[count(/response/lst[@name='expanded']/result)=2]", "/response/result/doc[1]/str[@name='id'][.='2']", @@ -326,7 +308,6 @@ public class TestExpandComponent extends SolrTestCaseJ4 { ); //Test fl with score, sort by non-score - assertQ(req(params, "expand.sort", "test_l desc", "fl", "id,test_i,score"), "*[count(/response/result/doc)=2]", "count(/response/lst[@name='expanded']/result)=2", @@ -342,7 +323,6 @@ public class TestExpandComponent extends SolrTestCaseJ4 { ); //Test fl with score with multi-sort - assertQ(req(params, "expand.sort", "test_l desc, score asc", "fl", "id,test_i,score"), "*[count(/response/result/doc)=2]", "count(/response/lst[@name='expanded']/result)=2", @@ -356,6 +336,22 @@ public class TestExpandComponent extends SolrTestCaseJ4 { "count(//*[@name='score' and .='NaN'])=0", "count(/response/lst[@name='expanded']/result/doc[number(*/@name='score')!=number(*/@name='test_i')])=0" ); + + // Test for expand with collapse + // when matched docs have fewer unique values + params = params("q", "*:*", "sort", "id asc", "fl", "id", "rows", "6", "expand", "true", "expand.sort", "id asc"); + assertQ(req(params, "expand.field", "term_s"), + "*[count(/response/result/doc)=6]", + "/response/lst[@name='expanded']/result[@name='YYYY']/doc[1]/str[@name='id'][.='7']", + "/response/lst[@name='expanded']/result[@name='YYYY']/doc[2]/str[@name='id'][.='8']", + "count(//*[@name='score'])=0" + ); + assertQ(req(params, "expand.field", "test_f"), + "*[count(/response/result/doc)=6]", + "/response/lst[@name='expanded']/result[@name='200.0']/doc[1]/str[@name='id'][.='8']", + "/response/lst[@name='expanded']/result[@name='2000.0']/doc[1]/str[@name='id'][.='7']", + "count(//*[@name='score'])=0" + ); } @Test @@ -416,4 +412,19 @@ public class TestExpandComponent extends SolrTestCaseJ4 { resetExceptionIgnores(); } + + /** + * randomize addition of docs into bunch of segments + * TODO: there ought to be a test utility to do this; even add in batches + */ + private void createIndex(String[][] docs) { + Collections.shuffle(Arrays.asList(docs), random()); + for (String[] doc : docs) { + assertU(adoc(doc)); + if (random().nextBoolean()) { + assertU(commit()); + } + } + assertU(commit()); + } }