SOLR-12020: fix refinement of terms facet on date field (don't use Date.toString)

This commit is contained in:
yonik 2018-03-05 11:03:48 -05:00
parent e0d6465af9
commit fc2fd1dc25
4 changed files with 40 additions and 3 deletions

View File

@ -247,6 +247,9 @@ Bug Fixes
* SOLR-12011: Consistence problem when in-sync replicas are DOWN. (Cao Manh Dat)
* SOLR-12020: JSON Facet API: terms facet on date field fails in refinement phase with
"Invalid Date String" error. (yonik)
Optimizations
----------------------

View File

@ -18,8 +18,10 @@
package org.apache.solr.search.facet;
import java.io.IOException;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
@ -682,9 +684,10 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
FieldType ft = sf.getType();
bucketVal = ft.toNativeType(bucketVal); // refinement info passed in as JSON will cause int->long and float->double
bucket.add("val", bucketVal);
// String internal = ft.toInternal( tobj.toString() ); // TODO - we need a better way to get from object to query...
Query domainQ = ft.getFieldQuery(null, sf, bucketVal.toString());
// fieldQuery currently relies on a string input of the value...
String bucketStr = bucketVal instanceof Date ? Instant.ofEpochMilli(((Date)bucketVal).getTime()).toString() : bucketVal.toString();
Query domainQ = ft.getFieldQuery(null, sf, bucketStr);
fillBucket(bucket, domainQ, null, skip, facetInfo);

View File

@ -17,7 +17,9 @@
package org.apache.solr.search.facet;
import java.io.IOException;
import java.time.Instant;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@ -38,7 +40,9 @@ import org.apache.solr.handler.component.ShardResponse;
import org.apache.solr.search.QueryContext;
import org.apache.solr.search.SyntaxError;
import org.apache.solr.util.RTimer;
import org.noggit.CharArr;
import org.noggit.JSONUtil;
import org.noggit.JSONWriter;
import org.noggit.ObjectBuilder;
public class FacetModule extends SearchComponent {
@ -234,7 +238,23 @@ public class FacetModule extends SearchComponent {
Map<String,Object> finfo = new HashMap<>(1);
finfo.put(FACET_REFINE, refinement);
String finfoStr = JSONUtil.toJSON(finfo, -1);
// String finfoStr = JSONUtil.toJSON(finfo, -1); // this doesn't handle formatting of Date objects the way we want
CharArr out = new CharArr();
JSONWriter jsonWriter = new JSONWriter(out, -1) {
@Override
public void handleUnknownClass(Object o) {
// handle date formatting correctly
if (o instanceof Date) {
String s = Instant.ofEpochMilli(((Date)o).getTime()).toString();
writeString(s);
return;
}
super.handleUnknownClass(o);
}
};
jsonWriter.write(finfo);
String finfoStr = out.toString();
// System.err.println("##################### REFINE=" + finfoStr);
shardsRefineRequest.params.add(FACET_INFO, finfoStr);

View File

@ -746,6 +746,17 @@ public class TestJsonFacets extends SolrTestCaseHS {
);
}
// test field faceting on date field
client.testJQ(params(p, "q", "*:*"
, "json.facet", "{" +
" f1:{${terms} type:field, field:${date}}" +
"}"
)
, "facets=={count:6 " +
",f1:{ buckets:[ {val:'2001-01-01T01:01:01Z', count:1},{val:'2001-02-03T01:02:03Z', count:1},{val:'2002-02-02T02:02:02Z', count:1},{val:'2002-03-01T03:02:01Z', count:1},{val:'2003-03-03T03:03:03Z', count:1} ] }" +
"}"
);
// percentiles 0,10,50,90,100
// catA: 2.0 2.2 3.0 3.8 4.0