SOLR-7662: Refactored response writing to consolidate the logic in one place

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1685856 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Noble Paul 2015-06-16 15:17:14 +00:00
parent cc1edb0d16
commit 8ab21aa657
14 changed files with 320 additions and 288 deletions

View File

@ -666,6 +666,8 @@ Other Changes
* SOLR-7183: Fix Locale blacklisting for Minikdc based tests. (Ishan Chattopadhyaya, hossman
via Anshum Gupta)
* SOLR-7662: Refactored response writing to consolidate the logic in one place (Noble Paul)
================== 5.1.0 ==================
Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release

View File

@ -222,7 +222,7 @@ public class EmbeddedSolrServer extends SolrClient {
super.writeSolrDocumentList(docs);
}
}.marshal(rsp.getValues(), out);
}.setWritableDocFields(resolver). marshal(rsp.getValues(), out);
InputStream in = new ByteArrayInputStream(out.toByteArray());
return (NamedList<Object>) new JavaBinCodec(resolver).unmarshal(in);

View File

@ -16,21 +16,26 @@
*/
package org.apache.solr.response;
import java.io.*;
import java.util.*;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.Writer;
import java.util.ArrayList;
import java.util.List;
import org.apache.lucene.index.StorableField;
import org.apache.lucene.index.StoredDocument;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.index.IndexableField;
import org.apache.solr.client.solrj.impl.BinaryResponseParser;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.util.JavaBinCodec;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.transform.DocTransformer;
import org.apache.solr.response.transform.TransformContext;
import org.apache.solr.schema.*;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.schema.SchemaField;
import org.apache.solr.search.DocList;
import org.apache.solr.search.ReturnFields;
import org.apache.solr.search.SolrIndexSearcher;
@ -41,15 +46,13 @@ import org.slf4j.LoggerFactory;
public class BinaryResponseWriter implements BinaryQueryResponseWriter {
private static final Logger LOG = LoggerFactory.getLogger(BinaryResponseWriter.class);
public static final Set<Class> KNOWN_TYPES = new HashSet<>();
@Override
public void write(OutputStream out, SolrQueryRequest req, SolrQueryResponse response) throws IOException {
Resolver resolver = new Resolver(req, response.getReturnFields());
Boolean omitHeader = req.getParams().getBool(CommonParams.OMIT_HEADER);
if (omitHeader != null && omitHeader) response.getValues().remove("responseHeader");
JavaBinCodec codec = new JavaBinCodec(resolver);
codec.marshal(response.getValues(), out);
new JavaBinCodec(resolver).setWritableDocFields(resolver).marshal(response.getValues(), out);
}
@Override
@ -67,16 +70,12 @@ public class BinaryResponseWriter implements BinaryQueryResponseWriter {
/* NOOP */
}
public static class Resolver implements JavaBinCodec.ObjectResolver {
public static class Resolver implements JavaBinCodec.ObjectResolver , JavaBinCodec.WritableDocFields {
protected final SolrQueryRequest solrQueryRequest;
protected IndexSchema schema;
protected SolrIndexSearcher searcher;
protected final ReturnFields returnFields;
// transmit field values using FieldType.toObject()
// rather than the String from FieldType.toExternal()
boolean useFieldObjects = true;
public Resolver(SolrQueryRequest req, ReturnFields returnFields) {
solrQueryRequest = req;
this.returnFields = returnFields;
@ -95,76 +94,38 @@ public class BinaryResponseWriter implements BinaryQueryResponseWriter {
return null; // null means we completely handled it
}
if( o instanceof StorableField ) {
if(schema == null) schema = solrQueryRequest.getSchema();
if(schema == null) schema = solrQueryRequest.getSchema();
StorableField f = (StorableField)o;
SchemaField sf = schema.getFieldOrNull(f.name());
try {
o = getValue(sf, f);
}
catch (Exception e) {
o = DocsStreamer.getValue(sf, f);
} catch (Exception e) {
LOG.warn("Error reading a field : " + o, e);
}
}
if (o instanceof SolrDocument) {
// Remove any fields that were not requested.
// This typically happens when distributed search adds
// extra fields to an internal request
SolrDocument doc = (SolrDocument)o;
Iterator<Map.Entry<String, Object>> i = doc.iterator();
while ( i.hasNext() ) {
String fname = i.next().getKey();
if ( !returnFields.wantsField( fname ) ) {
i.remove();
}
}
return doc;
}
return o;
}
protected void writeResultsBody( ResultContext res, JavaBinCodec codec ) throws IOException
{
DocList ids = res.docs;
int sz = ids.size();
codec.writeTag(JavaBinCodec.ARR, sz);
if(searcher == null) searcher = solrQueryRequest.getSearcher();
if(schema == null) schema = solrQueryRequest.getSchema();
@Override
public boolean isWritable(String name) {
return returnFields.wantsField(name);
}
DocTransformer transformer = returnFields.getTransformer();
TransformContext context = new TransformContext();
context.query = res.query;
context.wantsScores = returnFields.wantsScore() && ids.hasScores();
context.req = solrQueryRequest;
context.searcher = searcher;
if( transformer != null ) {
transformer.setContext( context );
}
Set<String> fnames = returnFields.getLuceneFieldNames();
boolean onlyPseudoFields = (fnames == null && !returnFields.wantsAllFields() && !returnFields.hasPatternMatching())
|| (fnames != null && fnames.size() == 1 && SolrReturnFields.SCORE.equals(fnames.iterator().next()));
context.iterator = ids.iterator();
for (int i = 0; i < sz; i++) {
int id = context.iterator.nextDoc();
SolrDocument sdoc;
if (onlyPseudoFields) {
// no need to get stored fields of the document, see SOLR-5968
sdoc = new SolrDocument();
} else {
StoredDocument doc = searcher.doc(id, fnames);
sdoc = getDoc(doc);
}
if( transformer != null ) {
transformer.transform(sdoc, id);
}
codec.writeSolrDocument(sdoc);
}
if( transformer != null ) {
transformer.setContext( null );
@Override
public boolean wantsAllFields() {
return returnFields.wantsAllFields();
}
protected void writeResultsBody( ResultContext res, JavaBinCodec codec ) throws IOException {
codec.writeTag(JavaBinCodec.ARR, res.docs.size());
DocsStreamer docStreamer = new DocsStreamer(res.docs,res.query, solrQueryRequest, returnFields);
while (docStreamer.hasNext()) {
SolrDocument doc = docStreamer.next();
codec.writeSolrDocument(doc);
}
}
public void writeResults(ResultContext ctx, JavaBinCodec codec) throws IOException {
codec.writeTag(JavaBinCodec.SOLRDOCLST);
boolean wantsScores = returnFields.wantsScore() && ctx.docs.hasScores();
@ -183,59 +144,6 @@ public class BinaryResponseWriter implements BinaryQueryResponseWriter {
writeResultsBody( ctx, codec );
}
public SolrDocument getDoc(StoredDocument doc) {
SolrDocument solrDoc = new SolrDocument();
for (StorableField f : doc) {
String fieldName = f.name();
if( !returnFields.wantsField(fieldName) )
continue;
SchemaField sf = schema.getFieldOrNull(fieldName);
Object val = null;
try {
val = getValue(sf,f);
} catch (Exception e) {
// There is a chance of the underlying field not really matching the
// actual field type . So ,it can throw exception
LOG.warn("Error reading a field from document : " + solrDoc, e);
//if it happens log it and continue
continue;
}
if(sf != null && sf.multiValued() && !solrDoc.containsKey(fieldName)){
ArrayList l = new ArrayList();
l.add(val);
solrDoc.addField(fieldName, l);
} else {
solrDoc.addField(fieldName, val);
}
}
return solrDoc;
}
public Object getValue(SchemaField sf, StorableField f) throws Exception {
FieldType ft = null;
if(sf != null) ft =sf.getType();
if (ft == null) { // handle fields not in the schema
BytesRef bytesRef = f.binaryValue();
if (bytesRef != null) {
if (bytesRef.offset == 0 && bytesRef.length == bytesRef.bytes.length) {
return bytesRef.bytes;
} else {
final byte[] bytes = new byte[bytesRef.length];
System.arraycopy(bytesRef.bytes, bytesRef.offset, bytes, 0, bytesRef.length);
return bytes;
}
} else return f.stringValue();
} else {
if (useFieldObjects && KNOWN_TYPES.contains(ft.getClass())) {
return ft.toObject(f);
} else {
return ft.toExternal(f);
}
}
}
}
@ -253,7 +161,7 @@ public class BinaryResponseWriter implements BinaryQueryResponseWriter {
Resolver resolver = new Resolver(req, rsp.getReturnFields());
ByteArrayOutputStream out = new ByteArrayOutputStream();
new JavaBinCodec(resolver).marshal(rsp.getValues(), out);
new JavaBinCodec(resolver).setWritableDocFields(resolver).marshal(rsp.getValues(), out);
InputStream in = new ByteArrayInputStream(out.toByteArray());
return (NamedList<Object>) new JavaBinCodec(resolver).unmarshal(in);
@ -263,18 +171,4 @@ public class BinaryResponseWriter implements BinaryQueryResponseWriter {
}
}
static {
KNOWN_TYPES.add(BoolField.class);
KNOWN_TYPES.add(StrField.class);
KNOWN_TYPES.add(TextField.class);
KNOWN_TYPES.add(TrieField.class);
KNOWN_TYPES.add(TrieIntField.class);
KNOWN_TYPES.add(TrieLongField.class);
KNOWN_TYPES.add(TrieFloatField.class);
KNOWN_TYPES.add(TrieDoubleField.class);
KNOWN_TYPES.add(TrieDateField.class);
KNOWN_TYPES.add(BinaryField.class);
// We do not add UUIDField because UUID object is not a supported type in JavaBinCodec
// and if we write UUIDField.toObject, we wouldn't know how to handle it in the client side
}
}

View File

@ -0,0 +1,196 @@
package org.apache.solr.response;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.StorableField;
import org.apache.lucene.index.StoredDocument;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrException;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.transform.DocTransformer;
import org.apache.solr.response.transform.TransformContext;
import org.apache.solr.schema.BinaryField;
import org.apache.solr.schema.BoolField;
import org.apache.solr.schema.FieldType;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.schema.SchemaField;
import org.apache.solr.schema.StrField;
import org.apache.solr.schema.TextField;
import org.apache.solr.schema.TrieDateField;
import org.apache.solr.schema.TrieDoubleField;
import org.apache.solr.schema.TrieField;
import org.apache.solr.schema.TrieFloatField;
import org.apache.solr.schema.TrieIntField;
import org.apache.solr.schema.TrieLongField;
import org.apache.solr.search.DocIterator;
import org.apache.solr.search.DocList;
import org.apache.solr.search.ReturnFields;
import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.search.SolrReturnFields;
/**
* This streams SolrDocuments from a DocList and applies transformer
*/
public class DocsStreamer implements Iterator<SolrDocument> {
public static final Set<Class> KNOWN_TYPES = new HashSet<>();
private final DocList docs;
private SolrIndexSearcher searcher;
private final IndexSchema schema;
private DocTransformer transformer;
private DocIterator docIterator;
private boolean onlyPseudoFields;
private Set<String> fnames;
private TransformContext context;
private int idx = -1;
public DocsStreamer(DocList docList, Query query, SolrQueryRequest req, ReturnFields returnFields) {
this.docs = docList;
this.schema = req.getSchema();
searcher = req.getSearcher();
transformer = returnFields.getTransformer();
docIterator = docList.iterator();
context = new TransformContext();
context.query = query;
context.wantsScores = returnFields.wantsScore() && docList.hasScores();
context.req = req;
context.searcher = searcher;
context.iterator = docIterator;
fnames = returnFields.getLuceneFieldNames();
onlyPseudoFields = (fnames == null && !returnFields.wantsAllFields() && !returnFields.hasPatternMatching())
|| (fnames != null && fnames.size() == 1 && SolrReturnFields.SCORE.equals(fnames.iterator().next()));
if (transformer != null) transformer.setContext(context);
}
public boolean hasScores() {
return context.wantsScores;
}
public int currentIndex() {
return idx;
}
public boolean hasNext() {
return docIterator.hasNext();
}
public SolrDocument next() {
int id = docIterator.nextDoc();
idx++;
SolrDocument sdoc = null;
if (onlyPseudoFields) {
// no need to get stored fields of the document, see SOLR-5968
sdoc = new SolrDocument();
} else {
try {
StoredDocument doc = searcher.doc(id, fnames);
sdoc = getDoc(doc, schema);
} catch (IOException e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error reading document with docId " + id, e);
}
}
if (transformer != null) {
try {
transformer.transform(sdoc, id);
} catch (IOException e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error applying transformer", e);
}
}
return sdoc;
}
public static SolrDocument getDoc(StoredDocument doc, final IndexSchema schema) {
SolrDocument out = new SolrDocument();
for (StorableField f : doc.getFields()) {
// Make sure multivalued fields are represented as lists
Object existing = out.get(f.name());
if (existing == null) {
SchemaField sf = schema.getFieldOrNull(f.name());
if (sf != null && sf.multiValued()) {
List<Object> vals = new ArrayList<>();
vals.add(f);
out.setField(f.name(), vals);
} else {
out.setField(f.name(), f);
}
} else {
out.addField(f.name(), f);
}
}
return out;
}
@Override
public void remove() { //do nothing
}
public static Object getValue(SchemaField sf, StorableField f) {
FieldType ft = null;
if (sf != null) ft = sf.getType();
if (ft == null) { // handle fields not in the schema
BytesRef bytesRef = f.binaryValue();
if (bytesRef != null) {
if (bytesRef.offset == 0 && bytesRef.length == bytesRef.bytes.length) {
return bytesRef.bytes;
} else {
final byte[] bytes = new byte[bytesRef.length];
System.arraycopy(bytesRef.bytes, bytesRef.offset, bytes, 0, bytesRef.length);
return bytes;
}
} else return f.stringValue();
} else {
if (KNOWN_TYPES.contains(ft.getClass())) {
return ft.toObject(f);
} else {
return ft.toExternal(f);
}
}
}
static {
KNOWN_TYPES.add(BoolField.class);
KNOWN_TYPES.add(StrField.class);
KNOWN_TYPES.add(TextField.class);
KNOWN_TYPES.add(TrieField.class);
KNOWN_TYPES.add(TrieIntField.class);
KNOWN_TYPES.add(TrieLongField.class);
KNOWN_TYPES.add(TrieFloatField.class);
KNOWN_TYPES.add(TrieDoubleField.class);
KNOWN_TYPES.add(TrieDateField.class);
KNOWN_TYPES.add(BinaryField.class);
// We do not add UUIDField because UUID object is not a supported type in JavaBinCodec
// and if we write UUIDField.toObject, we wouldn't know how to handle it in the client side
}
}

View File

@ -332,10 +332,10 @@ class JSONWriter extends TextResponseWriter {
boolean first=true;
for (String fname : doc.getFieldNames()) {
if (!returnFields.wantsField(fname)) {
if (returnFields!= null && !returnFields.wantsField(fname)) {
continue;
}
if (first) {
first=false;
}
@ -365,9 +365,8 @@ class JSONWriter extends TextResponseWriter {
writeKey("_childDocuments_", true);
writeArrayOpener(doc.getChildDocumentCount());
List<SolrDocument> childDocs = doc.getChildDocuments();
ReturnFields rf = new SolrReturnFields();
for(int i=0; i<childDocs.size(); i++) {
writeSolrDocument(null, childDocs.get(i), rf, i);
writeSolrDocument(null, childDocs.get(i), null, i);
}
writeArrayCloser();
}

View File

@ -118,7 +118,7 @@ class PHPSerializedWriter extends JSONWriter {
}
@Override
public void writeSolrDocument(String name, SolrDocument doc, ReturnFields returnFields, int idx) throws IOException
public void writeSolrDocument(String name, SolrDocument doc, ReturnFields returnFields, int idx) throws IOException
{
writeKey(idx, false);
@ -126,7 +126,7 @@ class PHPSerializedWriter extends JSONWriter {
LinkedHashMap <String,Object> multi = new LinkedHashMap<>();
for (String fname : doc.getFieldNames()) {
if(!returnFields.wantsField(fname)){
if (returnFields != null && !returnFields.wantsField(fname)) {
continue;
}

View File

@ -1,69 +0,0 @@
package org.apache.solr.response;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.StorableField;
import org.apache.lucene.index.StoredDocument;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.schema.SchemaField;
import java.util.ArrayList;
import java.util.List;
public class ResponseWriterUtil {
/**
* Utility method for converting a {@link StoredDocument} from the index into a
* {@link SolrDocument} suitable for inclusion in a {@link SolrQueryResponse}
*/
public static final SolrDocument toSolrDocument( StoredDocument doc, final IndexSchema schema ) {
SolrDocument out = new SolrDocument();
for( StorableField f : doc.getFields()) {
// Make sure multivalued fields are represented as lists
Object existing = out.get(f.name());
if (existing == null) {
SchemaField sf = schema.getFieldOrNull(f.name());
if (sf != null && sf.multiValued()) {
List<Object> vals = new ArrayList<>();
vals.add( f );
out.setField( f.name(), vals );
}
else{
out.setField( f.name(), f );
}
}
else {
out.addField( f.name(), f );
}
}
return out;
}
public static String getAsString(String field, SolrDocument doc) {
Object v = doc.getFirstValue(field);
if(v != null) {
if(v instanceof StoredField) {
return ((StoredField)v).stringValue();
}
return v.toString();
}
return null;
}
}

View File

@ -34,8 +34,6 @@ import org.apache.solr.schema.TrieDateField;
import org.apache.solr.util.FastWriter;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.transform.DocTransformer;
import org.apache.solr.response.transform.TransformContext;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.schema.SchemaField;
import org.apache.solr.search.DocList;
@ -157,17 +155,10 @@ public abstract class TextResponseWriter {
} else if (val instanceof Date) {
writeDate(name,(Date)val);
} else if (val instanceof StoredDocument) {
SolrDocument doc = toSolrDocument( (StoredDocument)val );
DocTransformer transformer = returnFields.getTransformer();
if( transformer != null ) {
TransformContext context = new TransformContext();
context.req = req;
transformer.setContext(context);
transformer.transform(doc, -1);
}
writeSolrDocument(name, doc, returnFields, 0 );
SolrDocument doc = DocsStreamer.getDoc((StoredDocument) val, schema);
writeSolrDocument(name, doc,returnFields, 0 );
} else if (val instanceof SolrDocument) {
writeSolrDocument(name, (SolrDocument)val, returnFields, 0);
writeSolrDocument(name, (SolrDocument)val,returnFields, 0);
} else if (val instanceof ResultContext) {
// requires access to IndexReader
writeDocuments(name, (ResultContext)val, returnFields);
@ -217,7 +208,7 @@ public abstract class TextResponseWriter {
public abstract void writeStartDocumentList(String name, long start, int size, long numFound, Float maxScore) throws IOException;
public abstract void writeSolrDocument(String name, SolrDocument doc, ReturnFields returnFields, int idx) throws IOException;
public abstract void writeSolrDocument(String name, SolrDocument doc, ReturnFields returnFields, int idx) throws IOException;
public abstract void writeEndDocumentList() throws IOException;
@ -231,39 +222,15 @@ public abstract class TextResponseWriter {
writeEndDocumentList();
}
public final SolrDocument toSolrDocument( StoredDocument doc )
{
return ResponseWriterUtil.toSolrDocument(doc, schema);
}
public final void writeDocuments(String name, ResultContext res, ReturnFields fields ) throws IOException {
DocList ids = res.docs;
TransformContext context = new TransformContext();
context.query = res.query;
context.wantsScores = fields.wantsScore() && ids.hasScores();
context.req = req;
writeStartDocumentList(name, ids.offset(), ids.size(), ids.matches(),
context.wantsScores ? new Float(ids.maxScore()) : null );
DocTransformer transformer = fields.getTransformer();
context.searcher = req.getSearcher();
context.iterator = ids.iterator();
if( transformer != null ) {
transformer.setContext( context );
}
int sz = ids.size();
Set<String> fnames = fields.getLuceneFieldNames();
for (int i=0; i<sz; i++) {
int id = context.iterator.nextDoc();
StoredDocument doc = context.searcher.doc(id, fnames);
SolrDocument sdoc = toSolrDocument( doc );
if( transformer != null ) {
transformer.transform( sdoc, id);
}
writeSolrDocument( null, sdoc, returnFields, i );
}
if( transformer != null ) {
transformer.setContext( null );
DocsStreamer docsStreamer = new DocsStreamer(res.docs,res.query, req, fields);
writeStartDocumentList(name, ids.offset(), ids.size(), ids.matches(),
docsStreamer.hasScores() ? new Float(ids.maxScore()) : null);
while (docsStreamer.hasNext()) {
writeSolrDocument(null, docsStreamer.next(), returnFields, docsStreamer.currentIndex());
}
writeEndDocumentList();
}

View File

@ -191,10 +191,10 @@ public class XMLWriter extends TextResponseWriter {
incLevel();
for (String fname : doc.getFieldNames()) {
if (!returnFields.wantsField(fname)) {
if (returnFields!= null && !returnFields.wantsField(fname)) {
continue;
}
Object val = doc.getFieldValue(fname);
if( "_explain_".equals( fname ) ) {
System.out.println( val );

View File

@ -31,7 +31,7 @@ import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.ResponseWriterUtil;
import org.apache.solr.response.DocsStreamer;
import org.apache.solr.schema.FieldType;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.schema.SchemaField;
@ -139,7 +139,7 @@ class ChildDocTransformer extends TransformerWithContext {
while(i.hasNext()) {
Integer childDocNum = i.next();
StoredDocument childDoc = context.searcher.doc(childDocNum);
SolrDocument solrChildDoc = ResponseWriterUtil.toSolrDocument(childDoc, schema);
SolrDocument solrChildDoc = DocsStreamer.getDoc(childDoc, schema);
// TODO: future enhancement...
// support an fl local param in the transformer, which is used to build

View File

@ -21,7 +21,6 @@ import java.io.IOException;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.response.QueryResponseWriter;
import org.apache.solr.response.ResponseWriterUtil;
import org.apache.solr.search.SolrIndexSearcher;
/**

View File

@ -19,6 +19,7 @@ package org.apache.solr.request;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.util.Locale;
import java.util.Map;
import java.util.UUID;
import org.apache.solr.common.SolrDocument;
@ -89,7 +90,11 @@ public class TestBinaryResponseWriter extends AbstractSolrTestCase {
assertNotNull("obj is null", o);
assertTrue("obj is not doc", o instanceof SolrDocument);
SolrDocument out = (SolrDocument) o;
SolrDocument out = new SolrDocument();
for (Map.Entry<String, Object> e : in) {
if(r.isWritable(e.getKey())) out.put(e.getKey(),e.getValue());
}
assertTrue("id not found", out.getFieldNames().contains("id"));
assertTrue("ddd_s not found", out.getFieldNames().contains("ddd_s"));
assertEquals("Wrong number of fields found",

View File

@ -19,6 +19,9 @@ package org.apache.solr.response;
import java.io.IOException;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.StorableField;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrInputDocument;
@ -108,11 +111,21 @@ public class TestCustomDocTransformer extends SolrTestCaseJ4 {
public void transform(SolrDocument doc, int docid) throws IOException {
str.setLength(0);
for(String s : extra) {
String v = ResponseWriterUtil.getAsString(s, doc);
String v = getAsString(s, doc);
str.append(v).append('#');
}
System.out.println( "HELLO: "+str );
doc.setField(name, str.toString());
}
}
public static String getAsString(String field, SolrDocument doc) {
Object v = doc.getFirstValue(field);
if(v != null) {
if(v instanceof StorableField) {
return ((StorableField)v).stringValue();
}
return v.toString();
}
return null;
}
}

View File

@ -79,16 +79,24 @@ public class JavaBinCodec {
private static byte VERSION = 2;
private ObjectResolver resolver;
private final ObjectResolver resolver;
protected FastOutputStream daos;
private StringCache stringCache;
private WritableDocFields writableDocFields;
public JavaBinCodec() {
resolver =null;
writableDocFields =null;
}
public JavaBinCodec(ObjectResolver resolver) {
this(resolver, null);
}
public JavaBinCodec setWritableDocFields(WritableDocFields writableDocFields){
this.writableDocFields = writableDocFields;
return this;
}
public JavaBinCodec(ObjectResolver resolver, StringCache stringCache) {
this.resolver = resolver;
@ -272,18 +280,7 @@ public class JavaBinCodec {
}
if (val instanceof SolrDocument) {
//this needs special treatment to know which fields are to be written
if (resolver == null) {
writeSolrDocument((SolrDocument) val);
} else {
Object retVal = resolver.resolve(val, this);
if (retVal != null) {
if (retVal instanceof SolrDocument) {
writeSolrDocument((SolrDocument) retVal);
} else {
writeVal(retVal);
}
}
}
writeSolrDocument((SolrDocument) val);
return true;
}
if (val instanceof SolrInputDocument) {
@ -341,23 +338,46 @@ public class JavaBinCodec {
dis.readFully(arr);
return arr;
}
//use this to ignore the writable interface because , child docs will ignore the fl flag
// is it a good design?
private boolean ignoreWritable =false;
public void writeSolrDocument(SolrDocument doc) throws IOException {
List<SolrDocument> children = doc.getChildDocuments();
int sz = doc.size() + (children==null ? 0 : children.size());
int fieldsCount = 0;
if(writableDocFields == null || writableDocFields.wantsAllFields() || ignoreWritable){
fieldsCount = doc.size();
} else {
for (Entry<String, Object> e : doc) {
if(toWrite(e.getKey())) fieldsCount++;
}
}
int sz = fieldsCount + (children==null ? 0 : children.size());
writeTag(SOLRDOC);
writeTag(ORDERED_MAP, sz);
for (Map.Entry<String, Object> entry : doc) {
String name = entry.getKey();
writeExternString(name);
Object val = entry.getValue();
writeVal(val);
}
if (children != null) {
for (SolrDocument child : children) {
writeSolrDocument(child);
if(toWrite(name)) {
writeExternString(name);
Object val = entry.getValue();
writeVal(val);
}
}
if (children != null) {
try {
ignoreWritable = true;
for (SolrDocument child : children) {
writeSolrDocument(child);
}
} finally {
ignoreWritable = false;
}
}
}
protected boolean toWrite(String key) {
return writableDocFields == null || ignoreWritable || writableDocFields.isWritable(key);
}
public SolrDocument readSolrDocument(DataInputInputStream dis) throws IOException {
@ -841,6 +861,12 @@ public class JavaBinCodec {
public Object resolve(Object o, JavaBinCodec codec) throws IOException;
}
public interface WritableDocFields {
public boolean isWritable(String name);
public boolean wantsAllFields();
}
public static class StringCache {
private final Cache<StringBytes, String> cache;