mirror of https://github.com/apache/lucene.git
SOLR-12535: index time boosts in JSON are no longer accepted
This commit is contained in:
parent
1d0a086217
commit
106d300052
|
@ -65,6 +65,10 @@ Upgrade Notes
|
|||
SchemaSimilarityFactory, then LegacyBM25Similarity is automatically selected for 'luceneMatchVersion' < 8.0.0.
|
||||
See also explanation in Reference Guide chapter "Other Schema Elements".
|
||||
|
||||
* SOLR-12535: Solr no longer accepts index time boosts in JSON provided to Solr. This used to be provided like so:
|
||||
{'id':'1', 'val_s':{'value':'foo', 'boost':2.0}} but will now produce an error. A object/map structure will now only
|
||||
be interpreted as a child document or an atomic update; nothing else. A uniqueKey is currently required on all child
|
||||
documents to be interpreted as such, though this may change in the future. (David Smiley)
|
||||
|
||||
New Features
|
||||
----------------------
|
||||
|
@ -137,6 +141,8 @@ Other Changes
|
|||
|
||||
* SOLR-13036: Fix retry logic in JettySolrRunner (Gus Heck)
|
||||
|
||||
* SOLR-12535: Solr no longer accepts index time boosts in JSON provided to Solr. (David Smiley)
|
||||
|
||||
================== 7.7.0 ==================
|
||||
|
||||
Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
|
||||
|
|
|
@ -558,94 +558,18 @@ public class JsonLoader extends ContentStreamLoader {
|
|||
sdoc.addChildDocument(parseDoc(ev));
|
||||
}
|
||||
} else {
|
||||
SolrInputField sif = new SolrInputField(fieldName);
|
||||
parseFieldValue(sif);
|
||||
// pulling out the pieces may seem weird, but it's because
|
||||
ev = parser.nextEvent();
|
||||
Object val = parseFieldValue(ev, fieldName);
|
||||
// SolrInputDocument.addField will do the right thing
|
||||
// if the doc already has another value for this field
|
||||
// (ie: repeating fieldname keys)
|
||||
sdoc.addField(sif.getName(), sif.getValue());
|
||||
sdoc.addField(fieldName, val);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
private void parseFieldValue(SolrInputField sif) throws IOException {
|
||||
int ev = parser.nextEvent();
|
||||
if (ev == JSONParser.OBJECT_START) {
|
||||
parseExtendedFieldValue(ev, sif);
|
||||
} else {
|
||||
Object val = parseNormalFieldValue(ev, sif);
|
||||
sif.setValue(val);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A method to either extract an index time boost (deprecated), a map for atomic update, or a child document.
|
||||
* firstly, a solr document SolrInputDocument constructed. It is then determined whether the document is indeed a childDocument(if it has a unique field).
|
||||
* If so, it is added.
|
||||
* Otherwise the document is looped over as a map, and is then parsed as an Atomic Update if that is the case.
|
||||
* @param ev json parser event
|
||||
* @param sif input field to add value to.
|
||||
* @throws IOException in case of parsing exception.
|
||||
*/
|
||||
private void parseExtendedFieldValue(int ev, SolrInputField sif) throws IOException {
|
||||
assert ev == JSONParser.OBJECT_START;
|
||||
|
||||
SolrInputDocument extendedSolrDocument = parseDoc(ev);
|
||||
|
||||
if (isChildDoc(extendedSolrDocument)) {
|
||||
sif.addValue(extendedSolrDocument);
|
||||
return;
|
||||
}
|
||||
|
||||
Object normalFieldValue = null;
|
||||
Map<String, Object> extendedInfo = null;
|
||||
|
||||
for (SolrInputField entry: extendedSolrDocument) {
|
||||
Object val = entry.getValue();
|
||||
String label = entry.getName();
|
||||
if ("boost".equals(label)) {
|
||||
Object boostVal = val;
|
||||
if (!(boostVal instanceof Double)) {
|
||||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Boost should have number. "
|
||||
+ "Unexpected value: " + boostVal.toString() + "field=" + label);
|
||||
}
|
||||
|
||||
String message = "Ignoring field boost: " + boostVal.toString() + " as index-time boosts are not supported anymore";
|
||||
if (WARNED_ABOUT_INDEX_TIME_BOOSTS.compareAndSet(false, true)) {
|
||||
log.warn(message);
|
||||
} else {
|
||||
log.debug(message);
|
||||
}
|
||||
} else if ("value".equals(label)) {
|
||||
normalFieldValue = val;
|
||||
} else {
|
||||
// If we encounter other unknown map keys, then use a map
|
||||
if (extendedInfo == null) {
|
||||
extendedInfo = new HashMap<>(2);
|
||||
}
|
||||
// for now, the only extended info will be field values
|
||||
// we could either store this as an Object or a SolrInputField
|
||||
extendedInfo.put(label, val);
|
||||
}
|
||||
if (extendedInfo != null) {
|
||||
if (normalFieldValue != null) {
|
||||
extendedInfo.put("value", normalFieldValue);
|
||||
}
|
||||
sif.setValue(extendedInfo);
|
||||
} else {
|
||||
sif.setValue(normalFieldValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private Object parseNormalFieldValue(int ev, SolrInputField sif) throws IOException {
|
||||
return ev == JSONParser.ARRAY_START ? parseArrayFieldValue(ev, sif): parseSingleFieldValue(ev, sif);
|
||||
}
|
||||
|
||||
private Object parseSingleFieldValue(int ev, SolrInputField sif) throws IOException {
|
||||
private Object parseFieldValue(int ev, String fieldName) throws IOException {
|
||||
switch (ev) {
|
||||
case JSONParser.STRING:
|
||||
return parser.getString();
|
||||
|
@ -661,18 +585,16 @@ public class JsonLoader extends ContentStreamLoader {
|
|||
parser.getNull();
|
||||
return null;
|
||||
case JSONParser.ARRAY_START:
|
||||
return parseArrayFieldValue(ev, sif);
|
||||
return parseArrayFieldValue(ev, fieldName);
|
||||
case JSONParser.OBJECT_START:
|
||||
parseExtendedFieldValue(ev, sif);
|
||||
return sif.getValue();
|
||||
return parseObjectFieldValue(ev, fieldName);
|
||||
default:
|
||||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Error parsing JSON field value. "
|
||||
+ "Unexpected " + JSONParser.getEventString(ev) + " at [" + parser.getPosition() + "], field=" + sif.getName());
|
||||
+ "Unexpected " + JSONParser.getEventString(ev) + " at [" + parser.getPosition() + "], field=" + fieldName);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private List<Object> parseArrayFieldValue(int ev, SolrInputField sif) throws IOException {
|
||||
private List<Object> parseArrayFieldValue(int ev, String fieldName) throws IOException {
|
||||
assert ev == JSONParser.ARRAY_START;
|
||||
|
||||
ArrayList lst = new ArrayList(2);
|
||||
|
@ -681,9 +603,27 @@ public class JsonLoader extends ContentStreamLoader {
|
|||
if (ev == JSONParser.ARRAY_END) {
|
||||
return lst;
|
||||
}
|
||||
Object val = parseSingleFieldValue(ev, sif);
|
||||
lst.add(val);
|
||||
sif.setValue(null);
|
||||
lst.add(parseFieldValue(ev, fieldName));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses this object as either a map for atomic update, or a child document.
|
||||
*/
|
||||
private Object parseObjectFieldValue(int ev, String fieldName) throws IOException {
|
||||
assert ev == JSONParser.OBJECT_START;
|
||||
|
||||
SolrInputDocument extendedSolrDocument = parseDoc(ev);
|
||||
// is this a partial update or a child doc?
|
||||
if (isChildDoc(extendedSolrDocument)) {
|
||||
return extendedSolrDocument;
|
||||
} else {
|
||||
//return extendedSolrDocument.toMap(new HashMap<>(extendedSolrDocument.size())); not quite right
|
||||
Map<String, Object> map = new HashMap<>(extendedSolrDocument.size());
|
||||
for (SolrInputField inputField : extendedSolrDocument) {
|
||||
map.put(inputField.getName(), inputField.getValue());
|
||||
}
|
||||
return map;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -52,15 +52,7 @@ public class JsonLoaderTest extends SolrTestCaseJ4 {
|
|||
" 'doc': {\n" +
|
||||
" 'bool': true,\n" +
|
||||
" 'f0': 'v0',\n" +
|
||||
" 'f2': {\n" +
|
||||
" 'boost': 2.3,\n" +
|
||||
" 'value': 'test'\n" +
|
||||
" },\n" +
|
||||
" 'array': [ 'aaa', 'bbb' ],\n" +
|
||||
" 'boosted': {\n" +
|
||||
" 'boost': 6.7,\n" + // make sure we still accept boosts
|
||||
" 'value': [ 'aaa', 'bbb' ]\n" +
|
||||
" }\n" +
|
||||
" 'array': [ 'aaa', 'bbb' ]\n" +
|
||||
" }\n" +
|
||||
"},\n" +
|
||||
"'add': {\n" +
|
||||
|
@ -98,19 +90,13 @@ public class JsonLoaderTest extends SolrTestCaseJ4 {
|
|||
assertEquals( 2, p.addCommands.size() );
|
||||
|
||||
AddUpdateCommand add = p.addCommands.get(0);
|
||||
SolrInputDocument d = add.solrDoc;
|
||||
SolrInputField f = d.getField( "boosted" );
|
||||
assertEquals(2, f.getValues().size());
|
||||
assertEquals("SolrInputDocument(fields: [bool=true, f0=v0, array=[aaa, bbb]])", add.solrDoc.toString());
|
||||
|
||||
//
|
||||
add = p.addCommands.get(1);
|
||||
d = add.solrDoc;
|
||||
f = d.getField( "f1" );
|
||||
assertEquals(2, f.getValues().size());
|
||||
assertEquals("SolrInputDocument(fields: [f1=[v1, v2], f2=null])", add.solrDoc.toString());
|
||||
assertEquals(false, add.overwrite);
|
||||
|
||||
assertEquals(0, d.getField("f2").getValueCount());
|
||||
|
||||
// parse the commit commands
|
||||
assertEquals( 2, p.commitCommands.size() );
|
||||
CommitUpdateCommand commit = p.commitCommands.get( 0 );
|
||||
|
@ -235,26 +221,14 @@ public class JsonLoaderTest extends SolrTestCaseJ4 {
|
|||
|
||||
// list
|
||||
checkFieldValueOrdering((pre+ "'f':[45,67,89]" +post)
|
||||
.replace('\'', '"'),
|
||||
1.0F);
|
||||
.replace('\'', '"')
|
||||
);
|
||||
// dup fieldname keys
|
||||
checkFieldValueOrdering((pre+ "'f':45,'f':67,'f':89" +post)
|
||||
.replace('\'', '"'),
|
||||
1.0F);
|
||||
// extended w/boost
|
||||
checkFieldValueOrdering((pre+ "'f':{'boost':4.0,'value':[45,67,89]}" +post)
|
||||
.replace('\'', '"'),
|
||||
4.0F);
|
||||
// dup keys extended w/ multiplicitive boost
|
||||
checkFieldValueOrdering((pre+
|
||||
"'f':{'boost':2.0,'value':[45,67]}," +
|
||||
"'f':{'boost':2.0,'value':89}"
|
||||
+post)
|
||||
.replace('\'', '"'),
|
||||
4.0F);
|
||||
|
||||
.replace('\'', '"')
|
||||
);
|
||||
}
|
||||
private void checkFieldValueOrdering(String rawJson, float fBoost) throws Exception {
|
||||
private void checkFieldValueOrdering(String rawJson) throws Exception {
|
||||
SolrQueryRequest req = req();
|
||||
SolrQueryResponse rsp = new SolrQueryResponse();
|
||||
BufferingRequestProcessor p = new BufferingRequestProcessor(null);
|
||||
|
@ -265,7 +239,7 @@ public class JsonLoaderTest extends SolrTestCaseJ4 {
|
|||
SolrInputDocument d = p.addCommands.get(0).solrDoc;
|
||||
assertEquals(2, d.getFieldNames().size());
|
||||
assertEquals("1", d.getFieldValue("id"));
|
||||
assertEquals(new Object[] {45L, 67L, 89L} , d.getFieldValues("f").toArray());
|
||||
assertArrayEquals(new Object[] {45L, 67L, 89L} , d.getFieldValues("f").toArray());
|
||||
|
||||
d = p.addCommands.get(1).solrDoc;
|
||||
assertEquals(1, d.getFieldNames().size());
|
||||
|
@ -520,7 +494,7 @@ public class JsonLoaderTest extends SolrTestCaseJ4 {
|
|||
assertEquals(Collections.singletonList(expected), doc.getFieldValues(field));
|
||||
}
|
||||
|
||||
public void testExtendedFieldValues() throws Exception {
|
||||
public void testAtomicUpdateFieldValue() throws Exception {
|
||||
String str = "[{'id':'1', 'val_s':{'add':'foo'}}]".replace('\'', '"');
|
||||
SolrQueryRequest req = req();
|
||||
SolrQueryResponse rsp = new SolrQueryResponse();
|
||||
|
@ -533,14 +507,7 @@ public class JsonLoaderTest extends SolrTestCaseJ4 {
|
|||
AddUpdateCommand add = p.addCommands.get(0);
|
||||
assertEquals(add.commitWithin, -1);
|
||||
assertEquals(add.overwrite, true);
|
||||
SolrInputDocument d = add.solrDoc;
|
||||
|
||||
SolrInputField f = d.getField( "id" );
|
||||
assertEquals("1", f.getValue());
|
||||
|
||||
f = d.getField( "val_s" );
|
||||
Map<String,Object> map = (Map<String,Object>)f.getValue();
|
||||
assertEquals("foo",map.get("add"));
|
||||
assertEquals("SolrInputDocument(fields: [id=1, val_s={add=foo}])", add.solrDoc.toString());
|
||||
|
||||
req.close();
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue