SOLR-5285: Added a new [child ...] DocTransformer for optionally including Block-Join decendent documents inline in the results of a search

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1601028 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Chris M. Hostetter 2014-06-06 22:44:02 +00:00
parent 87c8a344a5
commit db538d864c
15 changed files with 933 additions and 64 deletions

View File

@ -126,6 +126,13 @@ New Features
* SOLR-6088: Add query re-ranking with the ReRankingQParserPlugin
(Joel Bernstein)
* SOLR-5285: Added a new [child ...] DocTransformer for optionally including
Block-Join decendent documents inline in the results of a search. This works
independent of whether the search itself is a block-join related query and is
supported by he xml, json, and javabin response formats.
(Varun Thacker via hossman)
Bug Fixes
----------------------

View File

@ -35,6 +35,7 @@ import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.schema.SchemaField;
import org.apache.solr.search.ReturnFields;
import org.apache.solr.search.SolrReturnFields;
/**
*
@ -355,6 +356,21 @@ class JSONWriter extends TextResponseWriter {
writeVal(fname, val);
}
}
if(doc.hasChildDocuments()) {
if(first == false) {
writeMapSeparator();
indent();
}
writeKey("_childDocuments_", true);
writeArrayOpener(doc.getChildDocumentCount());
List<SolrDocument> childDocs = doc.getChildDocuments();
ReturnFields rf = new SolrReturnFields();
for(int i=0; i<childDocs.size(); i++) {
writeSolrDocument(null, childDocs.get(i), rf, i);
}
writeArrayCloser();
}
decLevel();
writeMapCloser();

View File

@ -0,0 +1,57 @@
package org.apache.solr.response;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.lucene.index.StorableField;
import org.apache.lucene.index.StoredDocument;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.schema.SchemaField;
import java.util.ArrayList;
import java.util.List;
public class ResponseWriterUtil {
/**
* Utility method for converting a {@link StoredDocument} from the index into a
* {@link SolrDocument} suitable for inclusion in a {@link SolrQueryResponse}
*/
public static final SolrDocument toSolrDocument( StoredDocument doc, final IndexSchema schema ) {
SolrDocument out = new SolrDocument();
for( StorableField f : doc.getFields()) {
// Make sure multivalued fields are represented as lists
Object existing = out.get(f.name());
if (existing == null) {
SchemaField sf = schema.getFieldOrNull(f.name());
if (sf != null && sf.multiValued()) {
List<Object> vals = new ArrayList<>();
vals.add( f );
out.setField( f.name(), vals );
}
else{
out.setField( f.name(), f );
}
}
else {
out.addField( f.name(), f );
}
}
return out;
}
}

View File

@ -80,8 +80,6 @@ public abstract class TextResponseWriter {
returnFields = rsp.getReturnFields();
}
/** done with this ResponseWriter... make sure any buffers are flushed to writer */
public void close() throws IOException {
writer.flushBuffer();
@ -226,28 +224,9 @@ public abstract class TextResponseWriter {
writeEndDocumentList();
}
public final SolrDocument toSolrDocument( StoredDocument doc )
public final SolrDocument toSolrDocument( StoredDocument doc )
{
SolrDocument out = new SolrDocument();
for( StorableField f : doc.getFields()) {
// Make sure multivalued fields are represented as lists
Object existing = out.get(f.name());
if (existing == null) {
SchemaField sf = schema.getFieldOrNull(f.name());
if (sf != null && sf.multiValued()) {
List<Object> vals = new ArrayList<>();
vals.add( f );
out.setField( f.name(), vals );
}
else{
out.setField( f.name(), f );
}
}
else {
out.addField( f.name(), f );
}
}
return out;
return ResponseWriterUtil.toSolrDocument(doc, schema);
}
public final void writeDocuments(String name, ResultContext res, ReturnFields fields ) throws IOException {

View File

@ -31,6 +31,7 @@ import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.XML;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.search.ReturnFields;
import org.apache.solr.search.SolrReturnFields;
/**
@ -198,6 +199,12 @@ public class XMLWriter extends TextResponseWriter {
}
writeVal(fname, val);
}
if(doc.hasChildDocuments()) {
for(SolrDocument childDoc : doc.getChildDocuments()) {
writeSolrDocument(null, childDoc, new SolrReturnFields(), idx);
}
}
decLevel();
writer.write("</doc>");

View File

@ -0,0 +1,172 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.response.transform;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.LazyDocument;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.StorableField;
import org.apache.lucene.index.StoredDocument;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryWrapperFilter;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.WildcardQuery;
import org.apache.lucene.search.join.FixedBitSetCachingWrapperFilter;
import org.apache.lucene.search.join.ToChildBlockJoinQuery;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.UnicodeUtil;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.ResponseWriterUtil;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.schema.SchemaField;
import org.apache.solr.search.DocIterator;
import org.apache.solr.search.DocList;
import org.apache.solr.search.QParser;
import org.apache.solr.search.SyntaxError;
/**
*
* @since solr 4.9
*
* This transformer returns all descendants of each parent document in a flat list nested inside the parent document.
*
*
* The "parentFilter" parameter is mandatory.
* Optionally you can provide a "childFilter" param to filter out which child documents should be returned and a
* "limit" param which provides an option to specify the number of child documents
* to be returned per parent document. By default it's set to 10.
*
* Examples -
* [child parentFilter="fieldName:fieldValue"]
* [child parentFilter="fieldName:fieldValue" childFilter="fieldName:fieldValue"]
* [child parentFilter="fieldName:fieldValue" childFilter="fieldName:fieldValue" limit=20]
*/
public class ChildDocTransformerFactory extends TransformerFactory {
@Override
public DocTransformer create(String field, SolrParams params, SolrQueryRequest req) {
SchemaField uniqueKeyField = req.getSchema().getUniqueKeyField();
if(uniqueKeyField == null) {
throw new SolrException( ErrorCode.BAD_REQUEST,
" ChildDocTransformer requires the schema to have a uniqueKeyField." );
}
String idField = uniqueKeyField.getName();
String parentFilter = params.get( "parentFilter" );
if( parentFilter == null ) {
throw new SolrException( ErrorCode.BAD_REQUEST, "Parent filter should be sent as parentFilter=filterCondition" );
}
String childFilter = params.get( "childFilter" );
int limit = params.getInt( "limit", 10 );
Filter parentsFilter = null;
try {
Query parentFilterQuery = QParser.getParser( parentFilter, null, req).getQuery();
parentsFilter = new FixedBitSetCachingWrapperFilter(new QueryWrapperFilter(parentFilterQuery));
} catch (SyntaxError syntaxError) {
throw new SolrException( ErrorCode.BAD_REQUEST, "Failed to create correct parent filter query" );
}
Query childFilterQuery = null;
if(childFilter != null) {
try {
childFilterQuery = QParser.getParser( childFilter, null, req).getQuery();
} catch (SyntaxError syntaxError) {
throw new SolrException( ErrorCode.BAD_REQUEST, "Failed to create correct child filter query" );
}
}
return new ChildDocTransformer( field, parentsFilter, idField, req.getSchema(), childFilterQuery, limit);
}
}
class ChildDocTransformer extends TransformerWithContext {
private final String name;
private final String idField;
private final IndexSchema schema;
private Filter parentsFilter;
private Query childFilterQuery;
private int limit;
public ChildDocTransformer( String name, final Filter parentsFilter, String idField, IndexSchema schema,
final Query childFilterQuery, int limit) {
this.name = name;
this.idField = idField;
this.schema = schema;
this.parentsFilter = parentsFilter;
this.childFilterQuery = childFilterQuery;
this.limit = limit;
}
@Override
public String getName() {
return name;
}
@Override
public void transform(SolrDocument doc, int docid) {
String parentId;
Object parentIdField = doc.get(idField);
if (parentIdField instanceof StoredField) {
parentId = ((StoredField) parentIdField).stringValue();
} else if (parentIdField instanceof Field){
parentId = ((Field) parentIdField).stringValue();
if(parentId == null) {
parentId = ((Field) parentIdField).binaryValue().utf8ToString();
}
} else {
parentId = (String) parentIdField;
}
try {
Query parentQuery = new TermQuery(new Term(idField, schema.getFieldType(idField).readableToIndexed(parentId)));
Query query = new ToChildBlockJoinQuery(parentQuery, parentsFilter, false);
DocList children = context.searcher.getDocList(query, childFilterQuery, new Sort(), 0, limit);
if(children.matches() > 0) {
DocIterator i = children.iterator();
while(i.hasNext()) {
Integer childDocNum = i.next();
StoredDocument childDoc = context.searcher.doc(childDocNum);
SolrDocument solrChildDoc = ResponseWriterUtil.toSolrDocument(childDoc, schema);
// TODO: future enhancement...
// support an fl local param in the transformer, which is used to build
// a private ReturnFields instance that we use to prune unwanted field
// names from solrChildDoc
doc.addChildDocument(solrChildDoc);
}
}
} catch (IOException e) {
doc.put(name, "Could not fetch child Documents");
}
}
}

View File

@ -47,5 +47,6 @@ public abstract class TransformerFactory implements NamedListInitializedPlugin
defaultFactories.put( "value", new ValueAugmenterFactory() );
defaultFactories.put( "docid", new DocIdAugmenterFactory() );
defaultFactories.put( "shard", new ShardAugmenterFactory() );
defaultFactories.put( "child", new ChildDocTransformerFactory() );
}
}

View File

@ -0,0 +1,233 @@
package org.apache.solr.response;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.lucene.util.TestUtil;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.common.SolrInputDocument;
import org.junit.After;
import org.junit.BeforeClass;
import org.junit.Test;
public class TestChildDocTransformer extends SolrTestCaseJ4 {
private static String ID_FIELD = "id";
private String[] titleVals = new String[2];
@BeforeClass
public static void beforeClass() throws Exception {
initCore("solrconfig.xml","schema.xml");
}
@After
public void cleanup() throws Exception {
assertU(delQ("*:*"));
assertU(commit());
}
@Test
public void testParentFilter() throws Exception {
for(int i=0; i<titleVals.length; i++) {
titleVals[i] = TestUtil.randomSimpleString(random(), 1, 20);
}
createIndex(titleVals);
testParentFilterJSON();
testParentFilterXML();
}
@Test
public void testAllParams() throws Exception {
createSimpleIndex();
testChildDoctransformerJSON();
testChildDoctransformerXML();
}
private void testChildDoctransformerXML() {
String test1[] = new String[] {
"//*[@numFound='1']",
"/response/result/doc[1]/doc[1]/int[@name='id']='2'" ,
"/response/result/doc[1]/doc[2]/int[@name='id']='3'" ,
"/response/result/doc[1]/doc[3]/int[@name='id']='4'" ,
"/response/result/doc[1]/doc[4]/int[@name='id']='5'" ,
"/response/result/doc[1]/doc[5]/int[@name='id']='6'" ,
"/response/result/doc[1]/doc[6]/int[@name='id']='7'"};
String test2[] = new String[] {
"//*[@numFound='1']",
"/response/result/doc[1]/doc[1]/int[@name='id']='2'" ,
"/response/result/doc[1]/doc[2]/int[@name='id']='4'" ,
"/response/result/doc[1]/doc[3]/int[@name='id']='6'" };
String test3[] = new String[] {
"//*[@numFound='1']",
"/response/result/doc[1]/doc[1]/int[@name='id']='3'" ,
"/response/result/doc[1]/doc[2]/int[@name='id']='5'" };
assertQ(req("q", "*:*", "fq", "subject:\"parentDocument\" ",
"fl", "*,[child parentFilter=\"subject:parentDocument\"]"), test1);
assertQ(req("q", "*:*", "fq", "subject:\"parentDocument\" ",
"fl", "subject,[child parentFilter=\"subject:parentDocument\" childFilter=\"title:foo\"]"), test2);
assertQ(req("q", "*:*", "fq", "subject:\"parentDocument\" ",
"fl", "subject,[child parentFilter=\"subject:parentDocument\" childFilter=\"title:bar\" limit=2]"), test3);
}
private void testChildDoctransformerJSON() throws Exception {
String[] test1 = new String[] {
"/response/docs/[0]/_childDocuments_/[0]/id==2",
"/response/docs/[0]/_childDocuments_/[1]/id==3",
"/response/docs/[0]/_childDocuments_/[2]/id==4",
"/response/docs/[0]/_childDocuments_/[3]/id==5",
"/response/docs/[0]/_childDocuments_/[4]/id==6",
"/response/docs/[0]/_childDocuments_/[5]/id==7"
};
String[] test2 = new String[] {
"/response/docs/[0]/_childDocuments_/[0]/id==2",
"/response/docs/[0]/_childDocuments_/[1]/id==4",
"/response/docs/[0]/_childDocuments_/[2]/id==6"
};
String[] test3 = new String[] {
"/response/docs/[0]/_childDocuments_/[0]/id==3",
"/response/docs/[0]/_childDocuments_/[1]/id==5"
};
assertJQ(req("q", "*:*", "fq", "subject:\"parentDocument\" ",
"fl", "*,[child parentFilter=\"subject:parentDocument\"]"), test1);
assertJQ(req("q", "*:*", "fq", "subject:\"parentDocument\" ",
"fl", "subject,[child parentFilter=\"subject:parentDocument\" childFilter=\"title:foo\"]"), test2);
assertJQ(req("q", "*:*", "fq", "subject:\"parentDocument\" ",
"fl", "subject,[child parentFilter=\"subject:parentDocument\" childFilter=\"title:bar\" limit=2]"), test3);
}
private void createSimpleIndex() {
SolrInputDocument parentDocument = new SolrInputDocument();
parentDocument.addField(ID_FIELD, "1");
parentDocument.addField("subject", "parentDocument");
for(int i=0; i< 6; i++) {
SolrInputDocument childDocument = new SolrInputDocument();
childDocument.addField(ID_FIELD, Integer.toString(i+2));
if(i%2==0) {
childDocument.addField("title", "foo");
} else {
childDocument.addField("title", "bar");
}
parentDocument.addChildDocument(childDocument);
}
try {
Long version = addAndGetVersion(parentDocument, null);
assertNotNull(version);
} catch (Exception e) {
fail("Failed to add document to the index");
}
assertU(commit());
assertQ(req("q", "*:*"), "//*[@numFound='" + 7 + "']");
}
private static void createIndex(String[] titleVals) {
String[] parentIDS = new String[] {"1", "4"};
String[] childDocIDS = new String[] {"2", "5"};
String[] grandChildIDS = new String[] {"3", "6"};
for(int i=0; i< parentIDS.length; i++) {
SolrInputDocument parentDocument = new SolrInputDocument();
parentDocument.addField(ID_FIELD, parentIDS[i]);
parentDocument.addField("subject", "parentDocument");
parentDocument.addField("title", titleVals[i]);
SolrInputDocument childDocument = new SolrInputDocument();
childDocument.addField(ID_FIELD, childDocIDS[i]);
childDocument.addField("cat", "childDocument");
childDocument.addField("title", titleVals[i]);
SolrInputDocument grandChildDocument = new SolrInputDocument();
grandChildDocument.addField(ID_FIELD, grandChildIDS[i]);
childDocument.addChildDocument(grandChildDocument);
parentDocument.addChildDocument(childDocument);
try {
Long version = addAndGetVersion(parentDocument, null);
assertNotNull(version);
} catch (Exception e) {
fail("Failed to add document to the index");
}
if (random().nextBoolean()) {
assertU(commit());
}
}
assertU(commit());
assertQ(req("q", "*:*"), "//*[@numFound='" + (parentIDS.length + childDocIDS.length + grandChildIDS.length) + "']");
}
private void testParentFilterJSON() throws Exception {
String[] tests = new String[] {
"/response/docs/[0]/_childDocuments_/[0]/id==3",
"/response/docs/[0]/_childDocuments_/[1]/id==2",
"/response/docs/[0]/_childDocuments_/[1]/cat/[0]/=='childDocument'",
"/response/docs/[0]/_childDocuments_/[1]/title/[0]/=='" + titleVals[0] + "'",
"/response/docs/[1]/_childDocuments_/[0]/id==6",
"/response/docs/[1]/_childDocuments_/[1]/id==5",
"/response/docs/[1]/_childDocuments_/[1]/cat/[0]/=='childDocument'",
"/response/docs/[1]/_childDocuments_/[1]/title/[0]/=='" + titleVals[1] + "'"
};
assertJQ(req("q", "*:*", "fq", "subject:\"parentDocument\" ",
"fl", "*,[child parentFilter=\"subject:parentDocument\"]"), tests);
assertJQ(req("q", "*:*", "fq", "subject:\"parentDocument\" ",
"fl", "subject,[child parentFilter=\"subject:parentDocument\"]"), tests);
}
private void testParentFilterXML() {
String tests[] = new String[] {
"//*[@numFound='2']",
"/response/result/doc[1]/doc[1]/int[@name='id']='3'" ,
"/response/result/doc[1]/doc[2]/int[@name='id']='2'" ,
"/response/result/doc[1]/doc/arr[@name='cat']/str[1]='childDocument'" ,
"/response/result/doc[1]/doc/arr[@name='title']/str[1]='" + titleVals[0] + "'" ,
"/response/result/doc[2]/doc[1]/int[@name='id']='6'",
"/response/result/doc[2]/doc[2]/int[@name='id']='5'",
"/response/result/doc[2]/doc/arr[@name='cat']/str[1]='childDocument'",
"/response/result/doc[2]/doc/arr[@name='title']/str[1]='" + titleVals[1] + "'"};
assertQ(req("q", "*:*", "fq", "subject:\"parentDocument\" ",
"fl", "*,[child parentFilter=\"subject:parentDocument\"]"), tests);
assertQ(req("q", "*:*", "fq", "subject:\"parentDocument\" ",
"fl", "subject,[child parentFilter=\"subject:parentDocument\"]"), tests);
}
}

View File

@ -410,6 +410,15 @@ public class XMLResponseParser extends ResponseParser
break;
}
}
//Nested documents
while( type == KnownType.DOC) {
doc.addChildDocument(readDocument(parser));
int event = parser.next();
if (event == XMLStreamConstants.END_ELEMENT) { //Doc ends
return doc;
}
}
if( name == null ) {
throw new XMLStreamException( "requires 'name' attribute: "+parser.getLocalName(), parser.getLocation() );

View File

@ -23,6 +23,7 @@ import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
@ -44,6 +45,8 @@ public class SolrDocument implements Map<String,Object>, Iterable<Map.Entry<Stri
{
private final Map<String,Object> _fields;
private List<SolrDocument> _childDocuments;
public SolrDocument()
{
_fields = new LinkedHashMap<>();
@ -68,6 +71,10 @@ public class SolrDocument implements Map<String,Object>, Iterable<Map.Entry<Stri
public void clear()
{
_fields.clear();
if(_childDocuments != null) {
_childDocuments.clear();
}
}
/**
@ -359,4 +366,31 @@ public class SolrDocument implements Map<String,Object>, Iterable<Map.Entry<Stri
public Collection<Object> values() {
return _fields.values();
}
public void addChildDocument(SolrDocument child) {
if (_childDocuments == null) {
_childDocuments = new ArrayList<>();
}
_childDocuments.add(child);
}
public void addChildDocuments(Collection<SolrDocument> childs) {
for (SolrDocument child : childs) {
addChildDocument(child);
}
}
/** Returns the list of child documents, or null if none. */
public List<SolrDocument> getChildDocuments() {
return _childDocuments;
}
public boolean hasChildDocuments() {
boolean isEmpty = (_childDocuments == null || _childDocuments.isEmpty());
return !isEmpty;
}
public int getChildDocumentCount() {
return _childDocuments.size();
}
}

View File

@ -327,23 +327,38 @@ public class JavaBinCodec {
}
public void writeSolrDocument(SolrDocument doc) throws IOException {
List<SolrDocument> children = doc.getChildDocuments();
int sz = doc.size() + (children==null ? 0 : children.size());
writeTag(SOLRDOC);
writeTag(ORDERED_MAP, doc.size());
writeTag(ORDERED_MAP, sz);
for (Map.Entry<String, Object> entry : doc) {
String name = entry.getKey();
writeExternString(name);
Object val = entry.getValue();
writeVal(val);
}
if (children != null) {
for (SolrDocument child : children) {
writeSolrDocument(child);
}
}
}
public SolrDocument readSolrDocument(DataInputInputStream dis) throws IOException {
NamedList nl = (NamedList) readVal(dis);
tagByte = dis.readByte();
int size = readSize(dis);
SolrDocument doc = new SolrDocument();
for (int i = 0; i < nl.size(); i++) {
String name = nl.getName(i);
Object val = nl.getVal(i);
doc.setField(name, val);
for (int i = 0; i < size; i++) {
String fieldName;
Object obj = readVal(dis); // could be a field name, or a child document
if (obj instanceof SolrDocument) {
doc.addChildDocument((SolrDocument)obj);
continue;
} else {
fieldName = (String)obj;
}
Object fieldVal = readVal(dis);
doc.setField(fieldName, fieldVal);
}
return doc;
}
@ -409,7 +424,7 @@ public class JavaBinCodec {
writeVal(inputField.getValue());
}
if (children != null) {
for (SolrInputDocument child : sdoc.getChildDocuments()) {
for (SolrInputDocument child : children) {
writeSolrInputDocument(child);
}
}

View File

@ -0,0 +1 @@
 ¤à"id!1à'subject.parentDocument £á!2à#cat#foo ¡á!3 ¢á"22ã#bar

View File

@ -18,19 +18,8 @@
package org.apache.solr.client.solrj;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import com.google.common.collect.Maps;
import junit.framework.Assert;
import org.apache.lucene.util.TestUtil;
import org.apache.solr.SolrTestCaseJ4.SuppressSSL;
import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer;
@ -38,30 +27,43 @@ import org.apache.solr.client.solrj.impl.BinaryResponseParser;
import org.apache.solr.client.solrj.impl.ConcurrentUpdateSolrServer;
import org.apache.solr.client.solrj.impl.HttpSolrServer;
import org.apache.solr.client.solrj.impl.XMLResponseParser;
import org.apache.solr.client.solrj.request.LukeRequest;
import org.apache.solr.client.solrj.request.QueryRequest;
import org.apache.solr.client.solrj.response.FieldStatsInfo;
import org.apache.solr.client.solrj.request.UpdateRequest;
import org.apache.solr.client.solrj.request.ContentStreamUpdateRequest;
import org.apache.solr.client.solrj.request.AbstractUpdateRequest;
import org.apache.solr.client.solrj.request.AbstractUpdateRequest.ACTION;
import org.apache.solr.client.solrj.request.ContentStreamUpdateRequest;
import org.apache.solr.client.solrj.request.LukeRequest;
import org.apache.solr.client.solrj.request.QueryRequest;
import org.apache.solr.client.solrj.request.UpdateRequest;
import org.apache.solr.client.solrj.response.FacetField;
import org.apache.solr.client.solrj.response.FieldStatsInfo;
import org.apache.solr.client.solrj.response.LukeResponse;
import org.apache.solr.client.solrj.response.PivotField;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.client.solrj.response.FacetField;
import org.apache.solr.client.solrj.response.UpdateResponse;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.params.AnalysisParams;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.FacetParams;
import org.apache.solr.common.util.NamedList;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
/**
* This should include tests against the example solr config
*
@ -1259,5 +1261,189 @@ abstract public class SolrExampleTests extends SolrExampleTestsBase
"true",
((NamedList) resp.getResponseHeader().get("params")).get("debug"));
}
@Test
public void testChildDoctransformer() throws IOException, SolrServerException {
SolrServer server = getSolrServer();
server.deleteByQuery("*:*");
server.commit();
int numRootDocs = TestUtil.nextInt(random(), 10, 100);
int maxDepth = TestUtil.nextInt(random(), 2, 5);
Map<String,SolrInputDocument> allDocs = new HashMap<>();
for (int i =0; i < numRootDocs; i++) {
server.add(genNestedDocuments(allDocs, 0, maxDepth));
}
server.commit();
// sanity check
SolrQuery q = new SolrQuery("*:*");
QueryResponse resp = server.query(q);
assertEquals("Doc count does not match",
allDocs.size(), resp.getResults().getNumFound());
// base check - we know there is an exact number of these root docs
q = new SolrQuery("level_i:0");
q.setFields("*", "[child parentFilter=\"level_i:0\"]");
resp = server.query(q);
assertEquals("topLevel count does not match", numRootDocs,
resp.getResults().getNumFound());
for (SolrDocument outDoc : resp.getResults()) {
String docId = (String)outDoc.getFieldValue("id");
SolrInputDocument origDoc = allDocs.get(docId);
assertNotNull("docId not found: " + docId, origDoc);
assertEquals("kids mismatch",
origDoc.hasChildDocuments(), outDoc.hasChildDocuments());
if (outDoc.hasChildDocuments()) {
for (SolrDocument kid : outDoc.getChildDocuments()) {
String kidId = (String)kid.getFieldValue("id");
SolrInputDocument origChild = findDecendent(origDoc, kidId);
assertNotNull(docId + " doesn't have decendent " + kidId,
origChild);
}
}
}
// simple check: direct verification of direct children on random docs
{
int parentLevel = TestUtil.nextInt(random(), 0, maxDepth);
int kidLevel = parentLevel+1;
String parentFilter = "level_i:" + parentLevel;
String childFilter = "level_i:" + kidLevel;
int maxKidCount = TestUtil.nextInt(random(), 1, 37);
q = new SolrQuery("*:*");
q.setFilterQueries(parentFilter);
q.setFields("id,[child parentFilter=\"" + parentFilter +
"\" childFilter=\"" + childFilter +
"\" limit=\"" + maxKidCount + "\"]");
resp = server.query(q);
for (SolrDocument outDoc : resp.getResults()) {
String docId = (String)outDoc.getFieldValue("id");
SolrInputDocument origDoc = allDocs.get(docId);
assertNotNull("docId not found: " + docId, origDoc);
assertEquals("kids mismatch",
origDoc.hasChildDocuments(), outDoc.hasChildDocuments());
if (outDoc.hasChildDocuments()) {
// since we know we are looking at our direct children
// we can verify the count
int numOrigKids = origDoc.getChildDocuments().size();
int numOutKids = outDoc.getChildDocuments().size();
assertEquals("Num kids mismatch: " + numOrigKids + "/" + maxKidCount,
(maxKidCount < numOrigKids ? maxKidCount : numOrigKids),
numOutKids);
for (SolrDocument kid : outDoc.getChildDocuments()) {
String kidId = (String)kid.getFieldValue("id");
assertEquals("kid is the wrong level",
kidLevel, (int)kid.getFieldValue("level_i"));
SolrInputDocument origChild = findDecendent(origDoc, kidId);
assertNotNull(docId + " doesn't have decendent " + kidId,
origChild);
}
}
}
}
// fully randomized
// verifications are driven only by the results
{
int parentLevel = TestUtil.nextInt(random(), 0, maxDepth-1);
int kidLevelMin = TestUtil.nextInt(random(), parentLevel, maxDepth);
int kidLevelMax = TestUtil.nextInt(random(), kidLevelMin, maxDepth);
String parentFilter = "level_i:" + parentLevel;
String childFilter = "level_i:[" + kidLevelMin + " TO " + kidLevelMax + "]";
int maxKidCount = TestUtil.nextInt(random(), 1, 7);
q = new SolrQuery("*:*");
if (random().nextBoolean()) {
String name = names[TestUtil.nextInt(random(), 0, names.length-1)];
q = new SolrQuery("name:" + name);
}
q.setFilterQueries(parentFilter);
q.setFields("id,[child parentFilter=\"" + parentFilter +
"\" childFilter=\"" + childFilter +
"\" limit=\"" + maxKidCount + "\"]");
resp = server.query(q);
for (SolrDocument outDoc : resp.getResults()) {
String docId = (String)outDoc.getFieldValue("id");
SolrInputDocument origDoc = allDocs.get(docId);
assertNotNull("docId not found: " + docId, origDoc);
// we can't always assert origHasKids==outHasKids, original kids
// might not go deep enough for childFilter...
if (outDoc.hasChildDocuments()) {
// ...however if there are out kids, there *have* to be orig kids
assertTrue("orig doc had no kids at all", origDoc.hasChildDocuments());
for (SolrDocument kid : outDoc.getChildDocuments()) {
String kidId = (String)kid.getFieldValue("id");
int kidLevel = (int)kid.getFieldValue("level_i");
assertTrue("kid level to high: " + kidLevelMax + "<" + kidLevel,
kidLevel <= kidLevelMax);
assertTrue("kid level to low: " + kidLevelMin + ">" + kidLevel,
kidLevelMin <= kidLevel);
SolrInputDocument origChild = findDecendent(origDoc, kidId);
assertNotNull(docId + " doesn't have decendent " + kidId,
origChild);
}
}
}
}
}
/**
* Depth first search of a SolrInputDocument looking for a decendent by id,
* returns null if it's not a decendent
*/
private SolrInputDocument findDecendent(SolrInputDocument parent, String childId) {
if (childId.equals(parent.getFieldValue("id"))) {
return parent;
}
if (! parent.hasChildDocuments() ) {
return null;
}
for (SolrInputDocument kid : parent.getChildDocuments()) {
SolrInputDocument result = findDecendent(kid, childId);
if (null != result) {
return result;
}
}
return null;
}
/** used by genNestedDocuments */
private int idCounter = 0;
/** used by genNestedDocuments */
private static final String[] names
= new String[] { "java","pyhon","scala","ruby","clojure" };
/**
* recursive method for generating a document, which may also have child documents;
* adds all documents constructed (including decendents) to allDocs via their id
*/
private SolrInputDocument genNestedDocuments(Map<String,SolrInputDocument> allDocs,
int thisLevel,
int maxDepth) {
String id = "" + (idCounter++);
SolrInputDocument sdoc = new SolrInputDocument();
allDocs.put(id, sdoc);
sdoc.addField("id", id);
sdoc.addField("level_i", thisLevel);
sdoc.addField("name", names[TestUtil.nextInt(random(), 0, names.length-1)]);
if (0 < maxDepth) {
// NOTE: range include negative to increase odds of no kids
int numKids = TestUtil.nextInt(random(), -2, 7);
for(int i=0; i<numKids; i++) {
sdoc.addChildDocument(genNestedDocuments(allDocs, thisLevel+1, maxDepth-1));
}
}
return sdoc;
}
}

View File

@ -44,10 +44,13 @@ import org.junit.Test;
public class TestJavaBinCodec extends SolrTestCaseJ4 {
private static final String SOLRJ_JAVABIN_BACKCOMPAT_BIN = "/solrj/javabin_backcompat.bin";
private final String BIN_FILE_LOCATION = "./solr/solrj/src/test-files/solrj/javabin_backcompat.bin";
private static final String SOLRJ_JAVABIN_BACKCOMPAT_BIN = "/solrj/javabin_backcompat.bin";
private final String BIN_FILE_LOCATION = "./solr/solrj/src/test-files/solrj/javabin_backcompat.bin";
public void testStrings() throws Exception {
private static final String SOLRJ_JAVABIN_BACKCOMPAT_BIN_CHILD_DOCS = "/solrj/javabin_backcompat_child_docs.bin";
private final String BIN_FILE_LOCATION_CHILD_DOCS = "./solr/solrj/src/test-files/solrj/javabin_backcompat_child_docs.bin";
public void testStrings() throws Exception {
JavaBinCodec javabin = new JavaBinCodec();
for (int i = 0; i < 10000 * RANDOM_MULTIPLIER; i++) {
String s = TestUtil.randomUnicodeString(random());
@ -59,6 +62,29 @@ private final String BIN_FILE_LOCATION = "./solr/solrj/src/test-files/solrj/java
}
}
private SolrDocument generateSolrDocumentWithChildDocs() {
SolrDocument parentDocument = new SolrDocument();
parentDocument.addField("id", "1");
parentDocument.addField("subject", "parentDocument");
SolrDocument childDocument = new SolrDocument();
childDocument.addField("id", "2");
childDocument.addField("cat", "foo");
SolrDocument secondKid = new SolrDocument();
secondKid.addField("id", "22");
secondKid.addField("cat", "bar");
SolrDocument grandChildDocument = new SolrDocument();
grandChildDocument.addField("id", "3");
childDocument.addChildDocument(grandChildDocument);
parentDocument.addChildDocument(childDocument);
parentDocument.addChildDocument(secondKid);
return parentDocument;
}
private List<Object> generateAllDataTypes() {
List<Object> types = new ArrayList<>();
@ -137,7 +163,7 @@ private final String BIN_FILE_LOCATION = "./solr/solrj/src/test-files/solrj/java
}
@Test
public void testBackCompat() {
public void testBackCompat() throws IOException {
JavaBinCodec javabin = new JavaBinCodec(){
@Override
public List<Object> readIterator(DataInputInputStream fis) throws IOException {
@ -170,13 +196,31 @@ private final String BIN_FILE_LOCATION = "./solr/solrj/src/test-files/solrj/java
}
} catch (IOException e) {
fail(e.getMessage());
throw e;
}
}
@Test
public void testForwardCompat() {
public void testBackCompatForSolrDocumentWithChildDocs() throws IOException {
JavaBinCodec javabin = new JavaBinCodec(){
@Override
public List<Object> readIterator(DataInputInputStream fis) throws IOException {
return super.readIterator(fis);
}
};
try {
InputStream is = getClass().getResourceAsStream(SOLRJ_JAVABIN_BACKCOMPAT_BIN_CHILD_DOCS);
SolrDocument sdoc = (SolrDocument) javabin.unmarshal(is);
SolrDocument matchSolrDoc = generateSolrDocumentWithChildDocs();
assertTrue(assertSolrDocumentEquals(sdoc, matchSolrDoc));
} catch (IOException e) {
throw e;
}
}
@Test
public void testForwardCompat() throws IOException {
JavaBinCodec javabin = new JavaBinCodec();
ByteArrayOutputStream os = new ByteArrayOutputStream();
@ -189,17 +233,76 @@ private final String BIN_FILE_LOCATION = "./solr/solrj/src/test-files/solrj/java
byte[] currentFormatBytes = IOUtils.toByteArray(is);
for (int i = 1; i < currentFormatBytes.length; i++) {//ignore the first byte. It is version information
assertEquals(currentFormatBytes[i], newFormatBytes[i]);
assertEquals(newFormatBytes[i], currentFormatBytes[i]);
}
} catch (IOException e) {
e.printStackTrace();
fail(e.getMessage());
throw e;
}
}
public void genBinaryFile() throws IOException {
@Test
public void testForwardCompatForSolrDocumentWithChildDocs() throws IOException {
JavaBinCodec javabin = new JavaBinCodec();
ByteArrayOutputStream os = new ByteArrayOutputStream();
SolrDocument sdoc = generateSolrDocumentWithChildDocs();
try {
javabin.marshal(sdoc, os);
byte[] newFormatBytes = os.toByteArray();
InputStream is = getClass().getResourceAsStream(SOLRJ_JAVABIN_BACKCOMPAT_BIN_CHILD_DOCS);
byte[] currentFormatBytes = IOUtils.toByteArray(is);
for (int i = 1; i < currentFormatBytes.length; i++) {//ignore the first byte. It is version information
assertEquals(newFormatBytes[i], currentFormatBytes[i]);
}
} catch (IOException e) {
throw e;
}
}
@Test
public void testResponseChildDocuments() throws IOException {
JavaBinCodec javabin = new JavaBinCodec();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
javabin.marshal(generateSolrDocumentWithChildDocs(), baos);
SolrDocument result = (SolrDocument) javabin.unmarshal(new ByteArrayInputStream(baos.toByteArray()));
assertEquals(2, result.size());
assertEquals("1", result.getFieldValue("id"));
assertEquals("parentDocument", result.getFieldValue("subject"));
assertTrue(result.hasChildDocuments());
List<SolrDocument> childDocuments = result.getChildDocuments();
assertNotNull(childDocuments);
assertEquals(2, childDocuments.size());
assertEquals(2, childDocuments.get(0).size());
assertEquals("2", childDocuments.get(0).getFieldValue("id"));
assertEquals("foo", childDocuments.get(0).getFieldValue("cat"));
assertEquals(2, childDocuments.get(1).size());
assertEquals("22", childDocuments.get(1).getFieldValue("id"));
assertEquals("bar", childDocuments.get(1).getFieldValue("cat"));
assertFalse(childDocuments.get(1).hasChildDocuments());
assertNull(childDocuments.get(1).getChildDocuments());
assertTrue(childDocuments.get(0).hasChildDocuments());
List<SolrDocument> grandChildDocuments = childDocuments.get(0).getChildDocuments();
assertNotNull(grandChildDocuments);
assertEquals(1, grandChildDocuments.size());
assertEquals(1, grandChildDocuments.get(0).size());
assertEquals("3", grandChildDocuments.get(0).getFieldValue("id"));
assertFalse(grandChildDocuments.get(0).hasChildDocuments());
assertNull(grandChildDocuments.get(0).getChildDocuments());
}
public void genBinaryFiles() throws IOException {
JavaBinCodec javabin = new JavaBinCodec();
ByteArrayOutputStream os = new ByteArrayOutputStream();
@ -211,11 +314,22 @@ private final String BIN_FILE_LOCATION = "./solr/solrj/src/test-files/solrj/java
BufferedOutputStream bos = new BufferedOutputStream(fs);
bos.write(out);
bos.close();
//Binary file with child documents
javabin = new JavaBinCodec();
SolrDocument sdoc = generateSolrDocumentWithChildDocs();
os = new ByteArrayOutputStream();
javabin.marshal(sdoc, os);
fs = new FileOutputStream(new File(BIN_FILE_LOCATION_CHILD_DOCS));
bos = new BufferedOutputStream(fs);
bos.write(os.toByteArray());
bos.close();
}
public static void main(String[] args) throws IOException {
TestJavaBinCodec test = new TestJavaBinCodec();
test.genBinaryFile();
test.genBinaryFiles();
}
}

View File

@ -1080,9 +1080,20 @@ public abstract class SolrTestCaseJ4 extends LuceneTestCase {
private static Pattern escapedSingleQuotePattern = Pattern.compile("\\\\\'");
/** Creates JSON from a SolrInputDocument. Doesn't currently handle boosts. */
/** Creates JSON from a SolrInputDocument. Doesn't currently handle boosts.
* @see #json(SolrInputDocument,CharArr)
*/
public static String json(SolrInputDocument doc) {
CharArr out = new CharArr();
CharArr out = new CharArr();
json(doc, out);
return out.toString();
}
/**
* Appends to the <code>out</code> array with JSON from the <code>doc</code>.
* Doesn't currently handle boosts, but does recursively handle child documents
*/
public static void json(SolrInputDocument doc, CharArr out) {
try {
out.append('{');
boolean firstField = true;
@ -1105,11 +1116,22 @@ public abstract class SolrTestCaseJ4 extends LuceneTestCase {
out.append(JSONUtil.toJSON(sfield.getValue()));
}
}
boolean firstChildDoc = true;
if(doc.hasChildDocuments()) {
out.append(",\"_childDocuments_\": [");
List<SolrInputDocument> childDocuments = doc.getChildDocuments();
for(SolrInputDocument childDocument : childDocuments) {
if (firstChildDoc) firstChildDoc=false;
else out.append(',');
json(childDocument, out);
}
out.append(']');
}
out.append('}');
} catch (IOException e) {
// should never happen
}
return out.toString();
}
/** Creates a JSON add command from a SolrInputDocument list. Doesn't currently handle boosts. */
@ -1883,7 +1905,23 @@ public abstract class SolrTestCaseJ4 extends LuceneTestCase {
}
}
return true;
if(solrDocument1.getChildDocuments() == null && solrDocument2.getChildDocuments() == null) {
return true;
}
if(solrDocument1.getChildDocuments() == null || solrDocument2.getChildDocuments() == null) {
return false;
} else if(solrDocument1.getChildDocuments().size() != solrDocument2.getChildDocuments().size()) {
return false;
} else {
Iterator<SolrDocument> childDocsIter1 = solrDocument1.getChildDocuments().iterator();
Iterator<SolrDocument> childDocsIter2 = solrDocument2.getChildDocuments().iterator();
while(childDocsIter1.hasNext()) {
if(!assertSolrDocumentEquals(childDocsIter1.next(), childDocsIter2.next())) {
return false;
}
}
return true;
}
}
public boolean assertSolrDocumentList(Object expected, Object actual) {