mirror of https://github.com/apache/lucene.git
SOLR-7452: convert bucket values in FacetStream from Integer to Long for calcite, make bucket labels in JSON Facet API consistent for facet refinement
This commit is contained in:
parent
d1db5f7af9
commit
a18a4ce245
|
@ -680,6 +680,7 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
|
||||||
private SimpleOrderedMap<Object> refineBucket(Object bucketVal, boolean skip, Map<String,Object> facetInfo) throws IOException {
|
private SimpleOrderedMap<Object> refineBucket(Object bucketVal, boolean skip, Map<String,Object> facetInfo) throws IOException {
|
||||||
SimpleOrderedMap<Object> bucket = new SimpleOrderedMap<>();
|
SimpleOrderedMap<Object> bucket = new SimpleOrderedMap<>();
|
||||||
FieldType ft = sf.getType();
|
FieldType ft = sf.getType();
|
||||||
|
bucketVal = ft.toNativeType(bucketVal); // refinement info passed in as JSON will cause int->long and float->double
|
||||||
bucket.add("val", bucketVal);
|
bucket.add("val", bucketVal);
|
||||||
// String internal = ft.toInternal( tobj.toString() ); // TODO - we need a better way to get from object to query...
|
// String internal = ft.toInternal( tobj.toString() ); // TODO - we need a better way to get from object to query...
|
||||||
|
|
||||||
|
|
|
@ -533,6 +533,10 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
|
||||||
|
|
||||||
public IntCalc(final SchemaField f) { super(f); }
|
public IntCalc(final SchemaField f) { super(f); }
|
||||||
@Override
|
@Override
|
||||||
|
public Comparable bitsToValue(long bits) {
|
||||||
|
return (int)bits;
|
||||||
|
}
|
||||||
|
@Override
|
||||||
protected Integer parseStr(String rawval) {
|
protected Integer parseStr(String rawval) {
|
||||||
return Integer.valueOf(rawval);
|
return Integer.valueOf(rawval);
|
||||||
}
|
}
|
||||||
|
@ -700,7 +704,7 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
|
||||||
|
|
||||||
SimpleOrderedMap<Object> bucket = new SimpleOrderedMap<>();
|
SimpleOrderedMap<Object> bucket = new SimpleOrderedMap<>();
|
||||||
FieldType ft = sf.getType();
|
FieldType ft = sf.getType();
|
||||||
bucket.add("val", bucketVal);
|
bucket.add("val", range.low); // use "low" instead of bucketVal because it will be the right type (we may have been passed back long instead of int for example)
|
||||||
// String internal = ft.toInternal( tobj.toString() ); // TODO - we need a better way to get from object to query...
|
// String internal = ft.toInternal( tobj.toString() ); // TODO - we need a better way to get from object to query...
|
||||||
|
|
||||||
Query domainQ = sf.getType().getRangeQuery(null, sf, range.low == null ? null : calc.formatValue(range.low), range.high==null ? null : calc.formatValue(range.high), range.includeLower, range.includeUpper);
|
Query domainQ = sf.getType().getRangeQuery(null, sf, range.low == null ? null : calc.formatValue(range.low), range.high==null ? null : calc.formatValue(range.high), range.includeLower, range.includeUpper);
|
||||||
|
|
|
@ -236,18 +236,18 @@ public class TestJsonFacetRefinement extends SolrTestCaseHS {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testBasicRefinement() throws Exception {
|
public void testBasicRefinement() throws Exception {
|
||||||
ModifiableSolrParams p = params("cat_s", "cat_s", "xy_s", "xy_s", "num_d", "num_d", "qw_s", "qw_s", "er_s","er_s");
|
ModifiableSolrParams p = params("cat_s", "cat_s", "cat_i", "cat_i", "xy_s", "xy_s", "num_d", "num_d", "qw_s", "qw_s", "er_s","er_s");
|
||||||
doBasicRefinement( p );
|
doBasicRefinement( p );
|
||||||
|
|
||||||
p.set("terms","method:dvhash,");
|
p.set("terms","method:dvhash,");
|
||||||
doBasicRefinement( p );
|
doBasicRefinement( p );
|
||||||
|
|
||||||
// multi-valued strings
|
// multi-valued
|
||||||
p = params("cat_s", "cat_ss", "xy_s", "xy_ss", "num_d", "num_d", "qw_s", "qw_ss", "er_s","er_ss");
|
p = params("cat_s", "cat_ss", "cat_i", "cat_is", "xy_s", "xy_ss", "num_d", "num_d", "qw_s", "qw_ss", "er_s","er_ss");
|
||||||
doBasicRefinement( p );
|
doBasicRefinement( p );
|
||||||
|
|
||||||
// single valued docvalues
|
// single valued docvalues
|
||||||
p = params("cat_s", "cat_sd", "xy_s", "xy_sd", "num_d", "num_dd", "qw_s", "qw_sd", "er_s","er_sd");
|
p = params("cat_s", "cat_sd", "cat_i", "cat_id", "xy_s", "xy_sd", "num_d", "num_dd", "qw_s", "qw_sd", "er_s","er_sd");
|
||||||
doBasicRefinement( p );
|
doBasicRefinement( p );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -262,21 +262,22 @@ public class TestJsonFacetRefinement extends SolrTestCaseHS {
|
||||||
client.deleteByQuery("*:*", null);
|
client.deleteByQuery("*:*", null);
|
||||||
|
|
||||||
String cat_s = p.get("cat_s");
|
String cat_s = p.get("cat_s");
|
||||||
|
String cat_i = p.get("cat_i"); // just like cat_s, but a number
|
||||||
String xy_s = p.get("xy_s");
|
String xy_s = p.get("xy_s");
|
||||||
String qw_s = p.get("qw_s");
|
String qw_s = p.get("qw_s");
|
||||||
String er_s = p.get("er_s"); // this field is designed to test numBuckets refinement... the first phase will only have a single bucket returned for the top count bucket of cat_s
|
String er_s = p.get("er_s"); // this field is designed to test numBuckets refinement... the first phase will only have a single bucket returned for the top count bucket of cat_s
|
||||||
String num_d = p.get("num_d");
|
String num_d = p.get("num_d");
|
||||||
|
|
||||||
clients.get(0).add( sdoc("id", "01", "all_s","all", cat_s, "A", xy_s, "X" ,num_d, -1, qw_s, "Q", er_s,"E") ); // A wins count tie
|
clients.get(0).add( sdoc("id", "01", "all_s","all", cat_s, "A", cat_i,1, xy_s, "X" ,num_d, -1, qw_s, "Q", er_s,"E") ); // A wins count tie
|
||||||
clients.get(0).add( sdoc("id", "02", "all_s","all", cat_s, "B", xy_s, "Y", num_d, 3 ) );
|
clients.get(0).add( sdoc("id", "02", "all_s","all", cat_s, "B", cat_i,2, xy_s, "Y", num_d, 3 ) );
|
||||||
|
|
||||||
clients.get(1).add( sdoc("id", "11", "all_s","all", cat_s, "B", xy_s, "X", num_d, -5 , er_s,"E") ); // B highest count
|
clients.get(1).add( sdoc("id", "11", "all_s","all", cat_s, "B", cat_i,2, xy_s, "X", num_d, -5 , er_s,"E") ); // B highest count
|
||||||
clients.get(1).add( sdoc("id", "12", "all_s","all", cat_s, "B", xy_s, "Y", num_d, -11, qw_s, "W" ) );
|
clients.get(1).add( sdoc("id", "12", "all_s","all", cat_s, "B", cat_i,2, xy_s, "Y", num_d, -11, qw_s, "W" ) );
|
||||||
clients.get(1).add( sdoc("id", "13", "all_s","all", cat_s, "A", xy_s, "X", num_d, 7 , er_s,"R") ); // "R" will only be picked up via refinement when parent facet is cat_s
|
clients.get(1).add( sdoc("id", "13", "all_s","all", cat_s, "A", cat_i,1, xy_s, "X", num_d, 7 , er_s,"R") ); // "R" will only be picked up via refinement when parent facet is cat_s
|
||||||
|
|
||||||
clients.get(2).add( sdoc("id", "21", "all_s","all", cat_s, "A", xy_s, "X", num_d, 17, qw_s, "W", er_s,"E") ); // A highest count
|
clients.get(2).add( sdoc("id", "21", "all_s","all", cat_s, "A", cat_i,1, xy_s, "X", num_d, 17, qw_s, "W", er_s,"E") ); // A highest count
|
||||||
clients.get(2).add( sdoc("id", "22", "all_s","all", cat_s, "A", xy_s, "Y", num_d, -19 ) );
|
clients.get(2).add( sdoc("id", "22", "all_s","all", cat_s, "A", cat_i,1, xy_s, "Y", num_d, -19 ) );
|
||||||
clients.get(2).add( sdoc("id", "23", "all_s","all", cat_s, "B", xy_s, "X", num_d, 11 ) );
|
clients.get(2).add( sdoc("id", "23", "all_s","all", cat_s, "B", cat_i,2, xy_s, "X", num_d, 11 ) );
|
||||||
|
|
||||||
client.commit();
|
client.commit();
|
||||||
|
|
||||||
|
@ -315,6 +316,17 @@ public class TestJsonFacetRefinement extends SolrTestCaseHS {
|
||||||
"}"
|
"}"
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// same as above, but with an integer field instead of a string
|
||||||
|
client.testJQ(params(p, "q", "*:*",
|
||||||
|
"json.facet", "{" +
|
||||||
|
"cat0:{${terms} type:terms, field:${cat_i}, sort:'count desc', limit:1, overrequest:0, refine:true}" +
|
||||||
|
"}"
|
||||||
|
)
|
||||||
|
, "facets=={ count:8" +
|
||||||
|
", cat0:{ buckets:[ {val:1,count:4} ] }" + // w/o overrequest, we need refining to get the correct count.
|
||||||
|
"}"
|
||||||
|
);
|
||||||
|
|
||||||
// basic refining test through/under a query facet
|
// basic refining test through/under a query facet
|
||||||
client.testJQ(params(p, "q", "*:*",
|
client.testJQ(params(p, "q", "*:*",
|
||||||
"json.facet", "{" +
|
"json.facet", "{" +
|
||||||
|
|
|
@ -507,6 +507,12 @@ public class TestJsonFacets extends SolrTestCaseHS {
|
||||||
if (terms_method != null) {
|
if (terms_method != null) {
|
||||||
terms=terms+terms_method;
|
terms=terms+terms_method;
|
||||||
}
|
}
|
||||||
|
String refine_method = p.get("refine_method");
|
||||||
|
if (refine_method == null && random().nextBoolean()) {
|
||||||
|
refine_method = "refine:true,";
|
||||||
|
}
|
||||||
|
if (refine_method != null) terms = terms + refine_method;
|
||||||
|
|
||||||
p.set("terms", terms);
|
p.set("terms", terms);
|
||||||
// "${terms}" should be put at the beginning of generic terms facets.
|
// "${terms}" should be put at the beginning of generic terms facets.
|
||||||
// It may specify "method=..." or "limit:-1", so should not be used if the facet explicitly specifies.
|
// It may specify "method=..." or "limit:-1", so should not be used if the facet explicitly specifies.
|
||||||
|
|
|
@ -477,6 +477,9 @@ public class FacetStream extends TupleStream implements Expressible {
|
||||||
for(int b=0; b<allBuckets.size(); b++) {
|
for(int b=0; b<allBuckets.size(); b++) {
|
||||||
NamedList bucket = (NamedList)allBuckets.get(b);
|
NamedList bucket = (NamedList)allBuckets.get(b);
|
||||||
Object val = bucket.get("val");
|
Object val = bucket.get("val");
|
||||||
|
if (val instanceof Integer) {
|
||||||
|
val=((Integer)val).longValue(); // calcite currently expects Long values here
|
||||||
|
}
|
||||||
Tuple t = currentTuple.clone();
|
Tuple t = currentTuple.clone();
|
||||||
t.put(bucketName, val);
|
t.put(bucketName, val);
|
||||||
int nextLevel = level+1;
|
int nextLevel = level+1;
|
||||||
|
|
Loading…
Reference in New Issue