mirror of https://github.com/apache/lucene.git
SOLR-13403: fix NPE in terms for DatePointField
* This fixes NPE and adds support for DatePointField in terms component
This commit is contained in:
parent
441af3e7aa
commit
c65451115c
|
@ -51,6 +51,8 @@ Bug Fixes
|
|||
|
||||
* SOLR-13827: Fail on unknown operation in Request Parameters API (Munendra S N, noble)
|
||||
|
||||
* SOLR-13403: Fix NPE in TermsComponent for DatePointField (yonik, Munendra S N)
|
||||
|
||||
Other Changes
|
||||
---------------------
|
||||
|
||||
|
|
|
@ -50,6 +50,7 @@ import org.apache.solr.common.params.TermsParams;
|
|||
import org.apache.solr.common.util.NamedList;
|
||||
import org.apache.solr.common.util.SimpleOrderedMap;
|
||||
import org.apache.solr.common.util.StrUtils;
|
||||
import org.apache.solr.common.util.Utils;
|
||||
import org.apache.solr.handler.component.HttpShardHandlerFactory.WhitelistHostChecker;
|
||||
import org.apache.solr.request.SimpleFacets.CountPair;
|
||||
import org.apache.solr.schema.FieldType;
|
||||
|
@ -201,7 +202,7 @@ public class TermsComponent extends SearchComponent {
|
|||
SchemaField sf = rb.req.getSchema().getFieldOrNull(field);
|
||||
if (sf != null && sf.getType().isPointField()) {
|
||||
// FIXME: terms.ttf=true is not supported for pointFields
|
||||
if (lowerStr!=null || upperStr!=null || prefix!=null || regexp!=null) {
|
||||
if (lowerStr != null || upperStr != null || prefix != null || regexp != null) {
|
||||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
|
||||
String.format(Locale.ROOT, "The terms component does not support Points-based fields with sorting or with parameters %s,%s,%s,%s ", TermsParams.TERMS_LOWER, TermsParams.TERMS_UPPER, TermsParams.TERMS_PREFIX_STR, TermsParams.TERMS_REGEXP_STR));
|
||||
}
|
||||
|
@ -221,7 +222,7 @@ public class TermsComponent extends SearchComponent {
|
|||
}
|
||||
|
||||
for (CountPair<MutableValue, Integer> item : queue) {
|
||||
fieldTerms.add(item.key.toString(), item.val);
|
||||
fieldTerms.add(Utils.OBJECT_TO_STRING.apply(item.key.toObject()), item.val);
|
||||
}
|
||||
continue;
|
||||
} else {
|
||||
|
@ -237,7 +238,7 @@ public class TermsComponent extends SearchComponent {
|
|||
if (count < 0) break;
|
||||
if (count < freqmin || count > freqmax) continue;
|
||||
if (++num > limit) break;
|
||||
ew.put(mv.toString(), (int)count); // match the numeric type of terms
|
||||
ew.put(Utils.OBJECT_TO_STRING.apply(mv.toObject()), (int)count); // match the numeric type of terms
|
||||
}
|
||||
});
|
||||
***/
|
||||
|
@ -250,7 +251,7 @@ public class TermsComponent extends SearchComponent {
|
|||
if (count < 0) break;
|
||||
if (count < freqmin || count > freqmax) continue;
|
||||
if (++num > limit) break;
|
||||
fieldTerms.add(mv.toString(), (int)count); // match the numeric type of terms
|
||||
fieldTerms.add(Utils.OBJECT_TO_STRING.apply(mv.toObject()), (int)count); // match the numeric type of terms
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
|
|
@ -80,6 +80,7 @@ public class PointMerger {
|
|||
seg = new DoubleSeg(pv, capacity);
|
||||
break;
|
||||
case DATE:
|
||||
seg = new DateSeg(pv, capacity);
|
||||
break;
|
||||
}
|
||||
int count = seg.setNextValue();
|
||||
|
|
|
@ -841,6 +841,11 @@
|
|||
<dynamicField name="*_ds_ni_p" type="pdouble" indexed="false" stored="true" docValues="true" multiValued="true"/>
|
||||
<dynamicField name="*_sortable" type="sortable_text" indexed="true" multiValued="false" stored="true" />
|
||||
|
||||
<dynamicField name="*_date_p" type="pdate" indexed="true" stored="true" docValues="true" multiValued="false"/>
|
||||
<dynamicField name="*_dates_p" type="pdate" indexed="true" stored="true" docValues="true" multiValued="true"/>
|
||||
<dynamicField name="*_date_ni_p" type="pdate" indexed="false" stored="true" docValues="true" multiValued="false"/>
|
||||
<dynamicField name="*_dates_ni_p" type="pdate" indexed="false" stored="true" docValues="true" multiValued="true"/>
|
||||
|
||||
<copyField source="single_i_dvn" dest="copy_single_i_dvn"/>
|
||||
<copyField source="single_d_dvn" dest="copy_single_d_dvn"/>
|
||||
<copyField source="single_s_dvn" dest="copy_single_s_dvn"/>
|
||||
|
|
|
@ -40,13 +40,13 @@ public class DistributedTermsComponentTest extends BaseDistributedSearchTestCase
|
|||
Random random = random();
|
||||
del("*:*");
|
||||
index(id, random.nextInt(), "b_t", "snake a,b spider shark snail slug seal", "foo_i", "1");
|
||||
index(id, random.nextInt(), "b_t", "snake spider shark snail slug", "foo_i", "2");
|
||||
index(id, random.nextInt(), "b_t", "snake spider shark snail slug", "foo_i", "2", "foo_date_p", "2015-01-03T14:30:00Z");
|
||||
index(id, random.nextInt(), "b_t", "snake spider shark snail", "foo_i", "3");
|
||||
index(id, random.nextInt(), "b_t", "snake spider shark", "foo_i", "2");
|
||||
index(id, random.nextInt(), "b_t", "snake spider", "c_t", "snake spider");
|
||||
index(id, random.nextInt(), "b_t", "snake", "c_t", "snake");
|
||||
index(id, random.nextInt(), "b_t", "ant zebra", "c_t", "ant zebra");
|
||||
index(id, random.nextInt(), "b_t", "zebra", "c_t", "zebra");
|
||||
index(id, random.nextInt(), "b_t", "snake spider shark", "foo_i", "2", "foo_date_p", "2014-03-15T12:00:00Z");
|
||||
index(id, random.nextInt(), "b_t", "snake spider", "c_t", "snake spider", "foo_date_p", "2014-03-15T12:00:00Z");
|
||||
index(id, random.nextInt(), "b_t", "snake", "c_t", "snake", "foo_date_p", "2014-03-15T12:00:00Z");
|
||||
index(id, random.nextInt(), "b_t", "ant zebra", "c_t", "ant zebra", "foo_date_p", "2015-01-03T14:30:00Z");
|
||||
index(id, random.nextInt(), "b_t", "zebra", "c_t", "zebra", "foo_date_p", "2015-01-03T14:30:00Z");
|
||||
commit();
|
||||
|
||||
handle.clear();
|
||||
|
@ -66,6 +66,11 @@ public class DistributedTermsComponentTest extends BaseDistributedSearchTestCase
|
|||
query("qt", "/terms", "shards.qt", "/terms", "terms", "true", "terms.fl", "foo_i", "terms.stats", "true","terms.list", "2,3,1");
|
||||
query("qt", "/terms", "shards.qt", "/terms", "terms", "true", "terms.fl", "b_t", "terms.list", "snake,zebra", "terms.ttf", "true");
|
||||
query("qt", "/terms", "shards.qt", "/terms", "terms", "true", "terms.fl", "b_t", "terms.fl", "c_t", "terms.list", "snake,ant,zebra", "terms.ttf", "true");
|
||||
|
||||
// for date point field
|
||||
query("qt", "/terms", "shards.qt", "/terms", "terms", "true", "terms.fl", "foo_date_p");
|
||||
// terms.ttf=true doesn't work for point fields
|
||||
//query("qt", "/terms", "shards.qt", "/terms", "terms", "true", "terms.fl", "foo_date_p", "terms.ttf", "true");
|
||||
}
|
||||
|
||||
protected QueryResponse query(Object... q) throws Exception {
|
||||
|
|
|
@ -582,4 +582,28 @@ public class TermsComponentTest extends SolrTestCaseJ4 {
|
|||
private static String createShardQueryParamsString(ModifiableSolrParams params) {
|
||||
return TermsComponent.createShardQuery(params).params.toString();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDatePointField() throws Exception {
|
||||
String[] dates = new String[]{"2015-01-03T14:30:00Z", "2014-03-15T12:00:00Z"};
|
||||
for (int i = 0; i < 100; i++) {
|
||||
assertU(adoc("id", Integer.toString(100000+i), "foo_pdt", dates[i % 2]) );
|
||||
if (random().nextInt(10) == 0) assertU(commit()); // make multiple segments
|
||||
}
|
||||
assertU(commit());
|
||||
assertU(adoc("id", Integer.toString(100102), "foo_pdt", dates[1]));
|
||||
assertU(commit());
|
||||
|
||||
try {
|
||||
assertQ(req("indent","true", "qt","/terms", "terms","true",
|
||||
"terms.fl","foo_pdt", "terms.sort","count"),
|
||||
"count(//lst[@name='foo_pdt']/*)=2",
|
||||
"//lst[@name='foo_pdt']/int[1][@name='" + dates[1] + "'][.='51']",
|
||||
"//lst[@name='foo_pdt']/int[2][@name='" + dates[0] + "'][.='50']"
|
||||
);
|
||||
} finally {
|
||||
assertU(delQ("foo_pdt:[* TO *]"));
|
||||
assertU(commit());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -36,12 +36,14 @@ import java.util.ArrayList;
|
|||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.NoSuchElementException;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.TreeMap;
|
||||
import java.util.TreeSet;
|
||||
|
@ -324,6 +326,13 @@ public class Utils {
|
|||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Util function to convert {@link Object} to {@link String}
|
||||
* Specially handles {@link Date} to string conversion
|
||||
*/
|
||||
public static final Function<Object, String> OBJECT_TO_STRING =
|
||||
obj -> ((obj instanceof Date) ? Objects.toString(((Date) obj).toInstant()) : Objects.toString(obj));
|
||||
|
||||
public static Object fromJSON(InputStream is, Function<JSONParser, ObjectBuilder> objBuilderProvider) {
|
||||
try {
|
||||
return objBuilderProvider.apply(getJSONParser((new InputStreamReader(is, StandardCharsets.UTF_8)))).getVal();
|
||||
|
@ -646,7 +655,6 @@ public class Utils {
|
|||
* @param input the json with new values
|
||||
* @return whether there was any change made to sink or not.
|
||||
*/
|
||||
|
||||
public static boolean mergeJson(Map<String, Object> sink, Map<String, Object> input) {
|
||||
boolean isModified = false;
|
||||
for (Map.Entry<String, Object> e : input.entrySet()) {
|
||||
|
@ -728,7 +736,7 @@ public class Utils {
|
|||
}
|
||||
|
||||
public static final InputStreamConsumer<?> JAVABINCONSUMER = is -> new JavaBinCodec().unmarshal(is);
|
||||
public static final InputStreamConsumer<?> JSONCONSUMER = is -> Utils.fromJSON(is);
|
||||
public static final InputStreamConsumer<?> JSONCONSUMER = Utils::fromJSON;
|
||||
|
||||
public static InputStreamConsumer<ByteBuffer> newBytesConsumer(int maxSize) {
|
||||
return is -> {
|
||||
|
|
Loading…
Reference in New Issue