SOLR-2857: add UpdateRequestHandler that handles xml,csv,json, and javabin

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1335768 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Ryan McKinley 2012-05-08 21:06:10 +00:00
parent e35df0b58f
commit 800f5a54a6
79 changed files with 1691 additions and 1525 deletions

View File

@ -361,10 +361,7 @@
To enable solr1.1 behavior, remove the /update handler or change its path To enable solr1.1 behavior, remove the /update handler or change its path
--> -->
<requestHandler name="/update" class="solr.XmlUpdateRequestHandler" /> <requestHandler name="/update" class="solr.UpdateRequestHandler" />
<!-- CSV update handler, loaded on demand -->
<requestHandler name="/update/csv" class="solr.CSVRequestHandler" startup="lazy" />
<!-- <!--

View File

@ -296,7 +296,7 @@
"update.processor.class" is the class name for the UpdateRequestProcessor. It is initalized "update.processor.class" is the class name for the UpdateRequestProcessor. It is initalized
only once. This can not be changed for each request. only once. This can not be changed for each request.
--> -->
<requestHandler name="/update" class="solr.XmlUpdateRequestHandler" > <requestHandler name="/update" class="solr.UpdateRequestHandler">
<!-- <!--
<str name="update.processor.class">org.apache.solr.handler.UpdateRequestProcessor</str> <str name="update.processor.class">org.apache.solr.handler.UpdateRequestProcessor</str>
--> -->

View File

@ -300,7 +300,7 @@
"update.processor.class" is the class name for the UpdateRequestProcessor. It is initalized "update.processor.class" is the class name for the UpdateRequestProcessor. It is initalized
only once. This can not be changed for each request. only once. This can not be changed for each request.
--> -->
<requestHandler name="/update" class="solr.XmlUpdateRequestHandler" > <requestHandler name="/update" class="solr.UpdateRequestHandler">
<!-- <!--
<str name="update.processor.class">org.apache.solr.handler.UpdateRequestProcessor</str> <str name="update.processor.class">org.apache.solr.handler.UpdateRequestProcessor</str>
--> -->

View File

@ -298,7 +298,7 @@
"update.processor.class" is the class name for the UpdateRequestProcessor. It is initalized "update.processor.class" is the class name for the UpdateRequestProcessor. It is initalized
only once. This can not be changed for each request. only once. This can not be changed for each request.
--> -->
<requestHandler name="/update" class="solr.XmlUpdateRequestHandler" > <requestHandler name="/update" class="solr.UpdateRequestHandler">
<!-- <!--
<str name="update.processor.class">org.apache.solr.handler.UpdateRequestProcessor</str> <str name="update.processor.class">org.apache.solr.handler.UpdateRequestProcessor</str>
--> -->

View File

@ -296,7 +296,7 @@
"update.processor.class" is the class name for the UpdateRequestProcessor. It is initalized "update.processor.class" is the class name for the UpdateRequestProcessor. It is initalized
only once. This can not be changed for each request. only once. This can not be changed for each request.
--> -->
<requestHandler name="/update" class="solr.XmlUpdateRequestHandler" > <requestHandler name="/update" class="solr.UpdateRequestHandler" >
<!-- <!--
<str name="update.processor.class">org.apache.solr.handler.UpdateRequestProcessor</str> <str name="update.processor.class">org.apache.solr.handler.UpdateRequestProcessor</str>
--> -->

View File

@ -28,7 +28,7 @@ import org.apache.solr.common.params.UpdateParams;
import org.apache.solr.common.util.ContentStream; import org.apache.solr.common.util.ContentStream;
import org.apache.solr.common.util.ContentStreamBase; import org.apache.solr.common.util.ContentStreamBase;
import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.NamedList;
import org.apache.solr.handler.ContentStreamLoader; import org.apache.solr.handler.loader.ContentStreamLoader;
import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse; import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.schema.IndexSchema; import org.apache.solr.schema.IndexSchema;
@ -134,8 +134,8 @@ public class ExtractingDocumentLoader extends ContentStreamLoader {
* @throws java.io.IOException * @throws java.io.IOException
*/ */
@Override @Override
public void load(SolrQueryRequest req, SolrQueryResponse rsp, ContentStream stream) throws IOException { public void load(SolrQueryRequest req, SolrQueryResponse rsp,
errHeader = "ExtractingDocumentLoader: " + stream.getSourceInfo(); ContentStream stream, UpdateRequestProcessor processor) throws Exception {
Parser parser = null; Parser parser = null;
String streamType = req.getParams().get(ExtractingParams.STREAM_TYPE, null); String streamType = req.getParams().get(ExtractingParams.STREAM_TYPE, null);
if (streamType != null) { if (streamType != null) {
@ -240,6 +240,4 @@ public class ExtractingDocumentLoader extends ContentStreamLoader {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Stream type of " + streamType + " didn't match any known parsers. Please supply the " + ExtractingParams.STREAM_TYPE + " parameter."); throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Stream type of " + streamType + " didn't match any known parsers. Please supply the " + ExtractingParams.STREAM_TYPE + " parameter.");
} }
} }
} }

View File

@ -26,7 +26,7 @@ import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.update.processor.UpdateRequestProcessor; import org.apache.solr.update.processor.UpdateRequestProcessor;
import org.apache.solr.util.plugin.SolrCoreAware; import org.apache.solr.util.plugin.SolrCoreAware;
import org.apache.solr.handler.ContentStreamHandlerBase; import org.apache.solr.handler.ContentStreamHandlerBase;
import org.apache.solr.handler.ContentStreamLoader; import org.apache.solr.handler.loader.ContentStreamLoader;
import org.apache.tika.config.TikaConfig; import org.apache.tika.config.TikaConfig;
import org.apache.tika.mime.MimeTypeException; import org.apache.tika.mime.MimeTypeException;
import org.slf4j.Logger; import org.slf4j.Logger;

View File

@ -194,10 +194,7 @@
</lst> </lst>
</requestHandler> </requestHandler>
<requestHandler name="/update" class="solr.XmlUpdateRequestHandler" /> <requestHandler name="/update" class="solr.UpdateRequestHandler" />
<requestHandler name="/update/csv" class="solr.CSVRequestHandler" startup="lazy">
<bool name="httpCaching">false</bool>
</requestHandler>
<requestHandler name="/update/extract" class="org.apache.solr.handler.extraction.ExtractingRequestHandler"/> <requestHandler name="/update/extract" class="org.apache.solr.handler.extraction.ExtractingRequestHandler"/>

View File

@ -301,7 +301,7 @@ public class ExtractingRequestHandlerTest extends SolrTestCaseJ4 {
BufferingRequestProcessor p = new BufferingRequestProcessor(null); BufferingRequestProcessor p = new BufferingRequestProcessor(null);
ExtractingDocumentLoader loader = (ExtractingDocumentLoader) handler.newLoader(req, p); ExtractingDocumentLoader loader = (ExtractingDocumentLoader) handler.newLoader(req, p);
loader.load(req, rsp, new ContentStreamBase.FileStream(getFile("extraction/version_control.txt"))); loader.load(req, rsp, new ContentStreamBase.FileStream(getFile("extraction/version_control.txt")),p);
AddUpdateCommand add = p.addCommands.get(0); AddUpdateCommand add = p.addCommands.get(0);
assertEquals(200, add.commitWithin); assertEquals(200, add.commitWithin);

View File

@ -55,7 +55,7 @@
</httpCaching> </httpCaching>
</requestDispatcher> </requestDispatcher>
<requestHandler name="/update" class="solr.XmlUpdateRequestHandler"> <requestHandler name="/update" class="solr.UpdateRequestHandler" >
<lst name="defaults"> <lst name="defaults">
<str name="update.chain">lang_id</str> <str name="update.chain">lang_id</str>
</lst> </lst>

View File

@ -705,15 +705,13 @@
code. To enable solr1.1 behavior, remove the /update handler or code. To enable solr1.1 behavior, remove the /update handler or
change its path change its path
--> -->
<requestHandler name="/update" class="solr.XmlUpdateRequestHandler"> <requestHandler name="/update" class="solr.UpdateRequestHandler" >
<lst name="defaults"> <lst name="defaults">
<str name="update.chain">uima</str> <str name="update.chain">uima</str>
</lst> </lst>
</requestHandler> </requestHandler>
<requestHandler name="/update/javabin" class="solr.BinaryUpdateRequestHandler" />
<!-- <!--
Analysis request handler. Since Solr 1.3. Use to return how a Analysis request handler. Since Solr 1.3. Use to return how a
document is analyzed. Useful for debugging and as a token server for document is analyzed. Useful for debugging and as a token server for
@ -803,7 +801,7 @@
signatureField and Solr will maintain uniqueness based on that signatureField and Solr will maintain uniqueness based on that
anyway. You have to link the chain to an update handler above to use anyway. You have to link the chain to an update handler above to use
it ie: <requestHandler name="/update it ie: <requestHandler name="/update
"class="solr.XmlUpdateRequestHandler"> <lst name="defaults"> <str "class="solr.UpdateRequestHandler"> <lst name="defaults"> <str
name="update.chain">dedupe</str> </lst> </requestHandler> name="update.chain">dedupe</str> </lst> </requestHandler>
--> -->

View File

@ -705,9 +705,7 @@
code. To enable solr1.1 behavior, remove the /update handler or code. To enable solr1.1 behavior, remove the /update handler or
change its path change its path
--> -->
<requestHandler name="/update" class="solr.XmlUpdateRequestHandler"/> <requestHandler name="/update" class="solr.UpdateRequestHandler" />
<requestHandler name="/update/javabin" class="solr.BinaryUpdateRequestHandler" />
<!-- <!--
Analysis request handler. Since Solr 1.3. Use to return how a Analysis request handler. Since Solr 1.3. Use to return how a
@ -798,7 +796,7 @@
signatureField and Solr will maintain uniqueness based on that signatureField and Solr will maintain uniqueness based on that
anyway. You have to link the chain to an update handler above to use anyway. You have to link the chain to an update handler above to use
it ie: <requestHandler name="/update it ie: <requestHandler name="/update
"class="solr.XmlUpdateRequestHandler"> <lst name="defaults"> <str "class="solr.UpdateRequestHandler"> <lst name="defaults"> <str
name="update.chain">dedupe</str> </lst> </requestHandler> name="update.chain">dedupe</str> </lst> </requestHandler>
--> -->

View File

@ -31,7 +31,7 @@ import org.apache.solr.common.params.UpdateParams;
import org.apache.solr.common.util.ContentStream; import org.apache.solr.common.util.ContentStream;
import org.apache.solr.common.util.ContentStreamBase; import org.apache.solr.common.util.ContentStreamBase;
import org.apache.solr.core.SolrCore; import org.apache.solr.core.SolrCore;
import org.apache.solr.handler.XmlUpdateRequestHandler; import org.apache.solr.handler.UpdateRequestHandler;
import org.apache.solr.request.SolrQueryRequestBase; import org.apache.solr.request.SolrQueryRequestBase;
import org.apache.solr.response.SolrQueryResponse; import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.uima.processor.SolrUIMAConfiguration.MapField; import org.apache.solr.uima.processor.SolrUIMAConfiguration.MapField;
@ -192,7 +192,7 @@ public class UIMAUpdateRequestProcessorTest extends SolrTestCaseJ4 {
SolrQueryRequestBase req = new SolrQueryRequestBase(h.getCore(), (SolrParams) mmparams) { SolrQueryRequestBase req = new SolrQueryRequestBase(h.getCore(), (SolrParams) mmparams) {
}; };
XmlUpdateRequestHandler handler = new XmlUpdateRequestHandler(); UpdateRequestHandler handler = new UpdateRequestHandler();
handler.init(null); handler.init(null);
ArrayList<ContentStream> streams = new ArrayList<ContentStream>(2); ArrayList<ContentStream> streams = new ArrayList<ContentStream>(2);
streams.add(new ContentStreamBase.StringStream(doc)); streams.add(new ContentStreamBase.StringStream(doc));

View File

@ -83,18 +83,8 @@
</lst> </lst>
</requestHandler> </requestHandler>
<requestHandler name="/update" class="solr.UpdateRequestHandler" />
<requestHandler name="/update"
class="solr.XmlUpdateRequestHandler">
</requestHandler>
<!-- Binary Update Request Handler
http://wiki.apache.org/solr/javabin
-->
<requestHandler name="/update/javabin"
class="solr.BinaryUpdateRequestHandler" />
<requestHandler name="/update/json"
class="solr.JsonUpdateRequestHandler"
startup="lazy" />
<!-- <!--
<queryResponseWriter name="xml" <queryResponseWriter name="xml"
default="true" default="true"

View File

@ -17,128 +17,24 @@
package org.apache.solr.handler; package org.apache.solr.handler;
import org.apache.solr.client.solrj.request.JavaBinUpdateRequestCodec; import org.apache.solr.common.util.NamedList;
import org.apache.solr.client.solrj.request.UpdateRequest;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.params.UpdateParams;
import org.apache.solr.common.util.ContentStream;
import org.apache.solr.common.util.FastInputStream;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.update.AddUpdateCommand;
import org.apache.solr.update.DeleteUpdateCommand;
import org.apache.solr.update.processor.UpdateRequestProcessor;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import java.util.List;
/** /**
* Update handler which uses the JavaBin format * Update handler which uses the JavaBin format
* *
*
* @see org.apache.solr.client.solrj.request.JavaBinUpdateRequestCodec * @see org.apache.solr.client.solrj.request.JavaBinUpdateRequestCodec
* @see org.apache.solr.common.util.JavaBinCodec * @see org.apache.solr.common.util.JavaBinCodec
* @since solr 1.4 *
* use {@link UpdateRequestHandler}
*/ */
public class BinaryUpdateRequestHandler extends ContentStreamHandlerBase { @Deprecated
public class BinaryUpdateRequestHandler extends UpdateRequestHandler {
@Override @Override
protected ContentStreamLoader newLoader(SolrQueryRequest req, final UpdateRequestProcessor processor) { public void init(NamedList args) {
return new ContentStreamLoader() { super.init(args);
@Override setAssumeContentType("application/javabin");
public void load(SolrQueryRequest req, SolrQueryResponse rsp, ContentStream stream) throws Exception { log.warn("Using deprecated class: "+this.getClass().getSimpleName()+" -- replace with UpdateRequestHandler");
InputStream is = null;
try {
is = stream.getStream();
parseAndLoadDocs(req, rsp, is, processor);
} finally {
if(is != null) {
is.close();
}
}
}
};
}
private void parseAndLoadDocs(final SolrQueryRequest req, SolrQueryResponse rsp, InputStream stream,
final UpdateRequestProcessor processor) throws IOException {
UpdateRequest update = null;
JavaBinUpdateRequestCodec.StreamingUpdateHandler handler = new JavaBinUpdateRequestCodec.StreamingUpdateHandler() {
private AddUpdateCommand addCmd = null;
public void update(SolrInputDocument document, UpdateRequest updateRequest) {
if (document == null) {
// Perhaps commit from the parameters
try {
RequestHandlerUtils.handleCommit(req, processor, updateRequest.getParams(), false);
RequestHandlerUtils.handleRollback(req, processor, updateRequest.getParams(), false);
} catch (IOException e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "ERROR handling commit/rollback");
}
return;
}
if (addCmd == null) {
addCmd = getAddCommand(req, updateRequest.getParams());
}
addCmd.solrDoc = document;
try {
processor.processAdd(addCmd);
addCmd.clear();
} catch (IOException e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "ERROR adding document " + document);
}
}
};
FastInputStream in = FastInputStream.wrap(stream);
for (; ; ) {
try {
update = new JavaBinUpdateRequestCodec().unmarshal(in, handler);
} catch (EOFException e) {
break; // this is expected
} catch (Exception e) {
log.error("Exception while processing update request", e);
break;
}
if (update.getDeleteById() != null || update.getDeleteQuery() != null) {
delete(req, update, processor);
}
}
}
private AddUpdateCommand getAddCommand(SolrQueryRequest req, SolrParams params) {
AddUpdateCommand addCmd = new AddUpdateCommand(req);
addCmd.overwrite = params.getBool(UpdateParams.OVERWRITE, true);
addCmd.commitWithin = params.getInt(UpdateParams.COMMIT_WITHIN, -1);
return addCmd;
}
private void delete(SolrQueryRequest req, UpdateRequest update, UpdateRequestProcessor processor) throws IOException {
SolrParams params = update.getParams();
DeleteUpdateCommand delcmd = new DeleteUpdateCommand(req);
if(params != null) {
delcmd.commitWithin = params.getInt(UpdateParams.COMMIT_WITHIN, -1);
}
if(update.getDeleteById() != null) {
for (String s : update.getDeleteById()) {
delcmd.id = s;
processor.processDelete(delcmd);
}
delcmd.id = null;
}
if(update.getDeleteQuery() != null) {
for (String s : update.getDeleteQuery()) {
delcmd.query = s;
processor.processDelete(delcmd);
}
}
} }
@Override @Override

View File

@ -17,40 +17,19 @@
package org.apache.solr.handler; package org.apache.solr.handler;
import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.common.util.NamedList;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.params.UpdateParams;
import org.apache.solr.common.util.StrUtils;
import org.apache.solr.common.util.ContentStream;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.schema.SchemaField;
import org.apache.solr.update.*;
import org.apache.solr.update.processor.UpdateRequestProcessor;
import org.apache.solr.internal.csv.CSVStrategy;
import org.apache.solr.internal.csv.CSVParser;
import org.apache.commons.io.IOUtils;
import java.util.regex.Pattern;
import java.util.List;
import java.util.HashMap;
import java.util.Iterator;
import java.io.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
* * use {@link UpdateRequestHandler}
*/ */
@Deprecated
public class CSVRequestHandler extends ContentStreamHandlerBase { public class CSVRequestHandler extends UpdateRequestHandler {
public static Logger log = LoggerFactory.getLogger(CSVRequestHandler.class);
@Override @Override
protected ContentStreamLoader newLoader(SolrQueryRequest req, UpdateRequestProcessor processor) { public void init(NamedList args) {
return new SingleThreadedCSVLoader(req, processor); super.init(args);
setAssumeContentType("application/csv");
log.warn("Using deprecated class: "+this.getClass().getSimpleName()+" -- replace with UpdateRequestHandler");
} }
//////////////////////// SolrInfoMBeans methods ////////////////////// //////////////////////// SolrInfoMBeans methods //////////////////////
@ -66,376 +45,6 @@ public class CSVRequestHandler extends ContentStreamHandlerBase {
} }
abstract class CSVLoader extends ContentStreamLoader {
public static final String SEPARATOR="separator";
public static final String FIELDNAMES="fieldnames";
public static final String HEADER="header";
public static final String SKIP="skip";
public static final String SKIPLINES="skipLines";
public static final String MAP="map";
public static final String TRIM="trim";
public static final String EMPTY="keepEmpty";
public static final String SPLIT="split";
public static final String ENCAPSULATOR="encapsulator";
public static final String ESCAPE="escape";
public static final String OVERWRITE="overwrite";
public static final String LITERALS_PREFIX = "literal.";
private static Pattern colonSplit = Pattern.compile(":");
private static Pattern commaSplit = Pattern.compile(",");
public static Logger log = LoggerFactory.getLogger(CSVRequestHandler.class);
final IndexSchema schema;
final SolrParams params;
final CSVStrategy strategy;
final UpdateRequestProcessor processor;
// hashmap to save any literal fields and their values
HashMap <SchemaField, String> literals;
String[] fieldnames;
SchemaField[] fields;
CSVLoader.FieldAdder[] adders;
int skipLines; // number of lines to skip at start of file
final AddUpdateCommand templateAdd;
/** Add a field to a document unless it's zero length.
* The FieldAdder hierarchy handles all the complexity of
* further transforming or splitting field values to keep the
* main logic loop clean. All implementations of add() must be
* MT-safe!
*/
private class FieldAdder {
void add(SolrInputDocument doc, int line, int column, String val) {
if (val.length() > 0) {
doc.addField(fields[column].getName(),val,1.0f);
}
}
}
/** add zero length fields */
private class FieldAdderEmpty extends CSVLoader.FieldAdder {
@Override
void add(SolrInputDocument doc, int line, int column, String val) {
doc.addField(fields[column].getName(),val,1.0f);
}
}
/** trim fields */
private class FieldTrimmer extends CSVLoader.FieldAdder {
private final CSVLoader.FieldAdder base;
FieldTrimmer(CSVLoader.FieldAdder base) { this.base=base; }
@Override
void add(SolrInputDocument doc, int line, int column, String val) {
base.add(doc, line, column, val.trim());
}
}
/** map a single value.
* for just a couple of mappings, this is probably faster than
* using a HashMap.
*/
private class FieldMapperSingle extends CSVLoader.FieldAdder {
private final String from;
private final String to;
private final CSVLoader.FieldAdder base;
FieldMapperSingle(String from, String to, CSVLoader.FieldAdder base) {
this.from=from;
this.to=to;
this.base=base;
}
@Override
void add(SolrInputDocument doc, int line, int column, String val) {
if (from.equals(val)) val=to;
base.add(doc,line,column,val);
}
}
/** Split a single value into multiple values based on
* a CSVStrategy.
*/
private class FieldSplitter extends CSVLoader.FieldAdder {
private final CSVStrategy strategy;
private final CSVLoader.FieldAdder base;
FieldSplitter(CSVStrategy strategy, CSVLoader.FieldAdder base) {
this.strategy = strategy;
this.base = base;
}
@Override
void add(SolrInputDocument doc, int line, int column, String val) {
CSVParser parser = new CSVParser(new StringReader(val), strategy);
try {
String[] vals = parser.getLine();
if (vals!=null) {
for (String v: vals) base.add(doc,line,column,v);
} else {
base.add(doc,line,column,val);
}
} catch (IOException e) {
throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,e);
}
}
}
String errHeader="CSVLoader:";
CSVLoader(SolrQueryRequest req, UpdateRequestProcessor processor) {
this.processor = processor;
this.params = req.getParams();
schema = req.getSchema();
this.literals = new HashMap<SchemaField, String>();
templateAdd = new AddUpdateCommand(req);
templateAdd.overwrite=params.getBool(OVERWRITE,true);
templateAdd.commitWithin = params.getInt(UpdateParams.COMMIT_WITHIN, -1);
strategy = new CSVStrategy(',', '"', CSVStrategy.COMMENTS_DISABLED, CSVStrategy.ESCAPE_DISABLED, false, false, false, true);
String sep = params.get(SEPARATOR);
if (sep!=null) {
if (sep.length()!=1) throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,"Invalid separator:'"+sep+"'");
strategy.setDelimiter(sep.charAt(0));
}
String encapsulator = params.get(ENCAPSULATOR);
if (encapsulator!=null) {
if (encapsulator.length()!=1) throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,"Invalid encapsulator:'"+encapsulator+"'");
}
String escape = params.get(ESCAPE);
if (escape!=null) {
if (escape.length()!=1) throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,"Invalid escape:'"+escape+"'");
}
// if only encapsulator or escape is set, disable the other escaping mechanism
if (encapsulator == null && escape != null) {
strategy.setEncapsulator( CSVStrategy.ENCAPSULATOR_DISABLED);
strategy.setEscape(escape.charAt(0));
} else {
if (encapsulator != null) {
strategy.setEncapsulator(encapsulator.charAt(0));
}
if (escape != null) {
char ch = escape.charAt(0);
strategy.setEscape(ch);
if (ch == '\\') {
// If the escape is the standard backslash, then also enable
// unicode escapes (it's harmless since 'u' would not otherwise
// be escaped.
strategy.setUnicodeEscapeInterpretation(true);
}
}
}
String fn = params.get(FIELDNAMES);
fieldnames = fn != null ? commaSplit.split(fn,-1) : null;
Boolean hasHeader = params.getBool(HEADER);
skipLines = params.getInt(SKIPLINES,0);
if (fieldnames==null) {
if (null == hasHeader) {
// assume the file has the headers if they aren't supplied in the args
hasHeader=true;
} else if (!hasHeader) {
throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,"CSVLoader: must specify fieldnames=<fields>* or header=true");
}
} else {
// if the fieldnames were supplied and the file has a header, we need to
// skip over that header.
if (hasHeader!=null && hasHeader) skipLines++;
prepareFields();
}
}
/** create the FieldAdders that control how each field is indexed */
void prepareFields() {
// Possible future optimization: for really rapid incremental indexing
// from a POST, one could cache all of this setup info based on the params.
// The link from FieldAdder to this would need to be severed for that to happen.
fields = new SchemaField[fieldnames.length];
adders = new CSVLoader.FieldAdder[fieldnames.length];
String skipStr = params.get(SKIP);
List<String> skipFields = skipStr==null ? null : StrUtils.splitSmart(skipStr,',');
CSVLoader.FieldAdder adder = new CSVLoader.FieldAdder();
CSVLoader.FieldAdder adderKeepEmpty = new CSVLoader.FieldAdderEmpty();
for (int i=0; i<fields.length; i++) {
String fname = fieldnames[i];
// to skip a field, leave the entries in fields and addrs null
if (fname.length()==0 || (skipFields!=null && skipFields.contains(fname))) continue;
fields[i] = schema.getField(fname);
boolean keepEmpty = params.getFieldBool(fname,EMPTY,false);
adders[i] = keepEmpty ? adderKeepEmpty : adder;
// Order that operations are applied: split -> trim -> map -> add
// so create in reverse order.
// Creation of FieldAdders could be optimized and shared among fields
String[] fmap = params.getFieldParams(fname,MAP);
if (fmap!=null) {
for (String mapRule : fmap) {
String[] mapArgs = colonSplit.split(mapRule,-1);
if (mapArgs.length!=2)
throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "Map rules must be of the form 'from:to' ,got '"+mapRule+"'");
adders[i] = new CSVLoader.FieldMapperSingle(mapArgs[0], mapArgs[1], adders[i]);
}
}
if (params.getFieldBool(fname,TRIM,false)) {
adders[i] = new CSVLoader.FieldTrimmer(adders[i]);
}
if (params.getFieldBool(fname,SPLIT,false)) {
String sepStr = params.getFieldParam(fname,SEPARATOR);
char fsep = sepStr==null || sepStr.length()==0 ? ',' : sepStr.charAt(0);
String encStr = params.getFieldParam(fname,ENCAPSULATOR);
char fenc = encStr==null || encStr.length()==0 ? (char)-2 : encStr.charAt(0);
String escStr = params.getFieldParam(fname,ESCAPE);
char fesc = escStr==null || escStr.length()==0 ? CSVStrategy.ESCAPE_DISABLED : escStr.charAt(0);
CSVStrategy fstrat = new CSVStrategy(fsep,fenc,CSVStrategy.COMMENTS_DISABLED,fesc, false, false, false, false);
adders[i] = new CSVLoader.FieldSplitter(fstrat, adders[i]);
}
}
// look for any literal fields - literal.foo=xyzzy
Iterator<String> paramNames = params.getParameterNamesIterator();
while (paramNames.hasNext()) {
String pname = paramNames.next();
if (!pname.startsWith(LITERALS_PREFIX)) continue;
String name = pname.substring(LITERALS_PREFIX.length());
SchemaField sf = schema.getFieldOrNull(name);
if(sf == null)
throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,"Invalid field name for literal:'"+ name +"'");
literals.put(sf, params.get(pname));
}
}
private void input_err(String msg, String[] line, int lineno) {
StringBuilder sb = new StringBuilder();
sb.append(errHeader).append(", line=").append(lineno).append(",").append(msg).append("\n\tvalues={");
for (String val: line) {
sb.append("'").append(val).append("',"); }
sb.append('}');
throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,sb.toString());
}
private void input_err(String msg, String[] lines, int lineNo, Throwable e) {
StringBuilder sb = new StringBuilder();
sb.append(errHeader).append(", line=").append(lineNo).append(",").append(msg).append("\n\tvalues={");
if (lines != null) {
for (String val : lines) {
sb.append("'").append(val).append("',");
}
} else {
sb.append("NO LINES AVAILABLE");
}
sb.append('}');
throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,sb.toString(), e);
}
/** load the CSV input */
@Override
public void load(SolrQueryRequest req, SolrQueryResponse rsp, ContentStream stream) throws IOException {
errHeader = "CSVLoader: input=" + stream.getSourceInfo();
Reader reader = null;
try {
reader = stream.getReader();
if (skipLines>0) {
if (!(reader instanceof BufferedReader)) {
reader = new BufferedReader(reader);
}
BufferedReader r = (BufferedReader)reader;
for (int i=0; i<skipLines; i++) {
r.readLine();
}
}
CSVParser parser = new CSVParser(reader, strategy);
// parse the fieldnames from the header of the file
if (fieldnames==null) {
fieldnames = parser.getLine();
if (fieldnames==null) {
throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,"Expected fieldnames in CSV input");
}
prepareFields();
}
// read the rest of the CSV file
for(;;) {
int line = parser.getLineNumber(); // for error reporting in MT mode
String[] vals = null;
try {
vals = parser.getLine();
} catch (IOException e) {
//Catch the exception and rethrow it with more line information
input_err("can't read line: " + line, null, line, e);
}
if (vals==null) break;
if (vals.length != fields.length) {
input_err("expected "+fields.length+" values but got "+vals.length, vals, line);
}
addDoc(line,vals);
}
} finally{
if (reader != null) {
IOUtils.closeQuietly(reader);
}
}
}
/** called for each line of values (document) */
abstract void addDoc(int line, String[] vals) throws IOException;
/** this must be MT safe... may be called concurrently from multiple threads. */
void doAdd(int line, String[] vals, SolrInputDocument doc, AddUpdateCommand template) throws IOException {
// the line number is passed simply for error reporting in MT mode.
// first, create the lucene document
for (int i=0; i<vals.length; i++) {
if (fields[i]==null) continue; // ignore this field
String val = vals[i];
adders[i].add(doc, line, i, val);
}
// add any literals
for (SchemaField sf : literals.keySet()) {
String fn = sf.getName();
String val = literals.get(sf);
doc.addField(fn, val);
}
template.solrDoc = doc;
processor.processAdd(template);
}
}
class SingleThreadedCSVLoader extends CSVLoader {
SingleThreadedCSVLoader(SolrQueryRequest req, UpdateRequestProcessor processor) {
super(req, processor);
}
@Override
void addDoc(int line, String[] vals) throws IOException {
templateAdd.clear();
SolrInputDocument doc = new SolrInputDocument();
doAdd(line, vals, doc, templateAdd);
}
}

View File

@ -21,6 +21,7 @@ import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.params.UpdateParams; import org.apache.solr.common.params.UpdateParams;
import org.apache.solr.common.util.ContentStream; import org.apache.solr.common.util.ContentStream;
import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.NamedList;
import org.apache.solr.handler.loader.ContentStreamLoader;
import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse; import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.update.processor.UpdateRequestProcessor; import org.apache.solr.update.processor.UpdateRequestProcessor;
@ -30,9 +31,9 @@ import org.slf4j.LoggerFactory;
/** /**
* Shares common code between various handlers that manipulate {@link org.apache.solr.common.util.ContentStream} objects. * Shares common code between various handlers that manipulate
* * {@link org.apache.solr.common.util.ContentStream} objects.
**/ */
public abstract class ContentStreamHandlerBase extends RequestHandlerBase { public abstract class ContentStreamHandlerBase extends RequestHandlerBase {
public static Logger log = LoggerFactory.getLogger(ContentStreamHandlerBase.class); public static Logger log = LoggerFactory.getLogger(ContentStreamHandlerBase.class);
@ -70,7 +71,7 @@ public abstract class ContentStreamHandlerBase extends RequestHandlerBase {
} else { } else {
for (ContentStream stream : streams) { for (ContentStream stream : streams) {
documentLoader.load(req, rsp, stream); documentLoader.load(req, rsp, stream, processor);
} }
// Perhaps commit from the parameters // Perhaps commit from the parameters

View File

@ -1,460 +0,0 @@
package org.apache.solr.handler;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import java.util.*;
import org.apache.commons.io.IOUtils;
import org.apache.noggit.JSONParser;
import org.apache.noggit.JSONUtil;
import org.apache.noggit.ObjectBuilder;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.SolrInputField;
import org.apache.solr.common.params.*;
import org.apache.solr.common.util.ContentStream;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.update.AddUpdateCommand;
import org.apache.solr.update.CommitUpdateCommand;
import org.apache.solr.update.DeleteUpdateCommand;
import org.apache.solr.update.RollbackUpdateCommand;
import org.apache.solr.update.processor.UpdateRequestProcessor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @since solr 4.0
*/
class JsonLoader extends ContentStreamLoader {
final static Logger log = LoggerFactory.getLogger( JsonLoader.class );
protected final UpdateRequestProcessor processor;
protected final SolrQueryRequest req;
protected JSONParser parser;
protected final int commitWithin;
protected final boolean overwrite;
public JsonLoader(SolrQueryRequest req, UpdateRequestProcessor processor) {
this.processor = processor;
this.req = req;
commitWithin = req.getParams().getInt(UpdateParams.COMMIT_WITHIN, -1);
overwrite = req.getParams().getBool(UpdateParams.OVERWRITE, true);
}
@Override
public void load(SolrQueryRequest req, SolrQueryResponse rsp, ContentStream stream) throws Exception {
errHeader = "JSONLoader: " + stream.getSourceInfo();
Reader reader = null;
try {
reader = stream.getReader();
if (log.isTraceEnabled()) {
String body = IOUtils.toString(reader);
log.trace("body", body);
reader = new StringReader(body);
}
parser = new JSONParser(reader);
this.processUpdate();
}
finally {
IOUtils.closeQuietly(reader);
}
}
@SuppressWarnings("fallthrough")
void processUpdate() throws IOException
{
int ev = parser.nextEvent();
while( ev != JSONParser.EOF ) {
switch( ev )
{
case JSONParser.ARRAY_START:
handleAdds();
break;
case JSONParser.STRING:
if( parser.wasKey() ) {
String v = parser.getString();
if( v.equals( XmlUpdateRequestHandler.ADD ) ) {
int ev2 = parser.nextEvent();
if (ev2 == JSONParser.OBJECT_START) {
processor.processAdd( parseAdd() );
} else if (ev2 == JSONParser.ARRAY_START) {
handleAdds();
} else {
assertEvent(ev2, JSONParser.OBJECT_START);
}
}
else if( v.equals( XmlUpdateRequestHandler.COMMIT ) ) {
CommitUpdateCommand cmd = new CommitUpdateCommand(req, false );
cmd.waitSearcher = true;
parseCommitOptions( cmd );
processor.processCommit( cmd );
}
else if( v.equals( XmlUpdateRequestHandler.OPTIMIZE ) ) {
CommitUpdateCommand cmd = new CommitUpdateCommand(req, true );
cmd.waitSearcher = true;
parseCommitOptions( cmd );
processor.processCommit( cmd );
}
else if( v.equals( XmlUpdateRequestHandler.DELETE ) ) {
processor.processDelete( parseDelete() );
}
else if( v.equals( XmlUpdateRequestHandler.ROLLBACK ) ) {
processor.processRollback( parseRollback() );
}
else {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Unknown command: "+v+" ["+parser.getPosition()+"]" );
}
break;
}
// fall through
case JSONParser.LONG:
case JSONParser.NUMBER:
case JSONParser.BIGNUMBER:
case JSONParser.BOOLEAN:
case JSONParser.NULL:
log.info( "can't have a value here! "
+JSONParser.getEventString(ev)+" "+parser.getPosition() );
case JSONParser.OBJECT_START:
case JSONParser.OBJECT_END:
case JSONParser.ARRAY_END:
break;
default:
log.info("Noggit UNKNOWN_EVENT_ID:"+ev);
break;
}
// read the next event
ev = parser.nextEvent();
}
}
DeleteUpdateCommand parseDelete() throws IOException {
assertNextEvent( JSONParser.OBJECT_START );
DeleteUpdateCommand cmd = new DeleteUpdateCommand(req);
cmd.commitWithin = commitWithin;
while( true ) {
int ev = parser.nextEvent();
if( ev == JSONParser.STRING ) {
String key = parser.getString();
if( parser.wasKey() ) {
if( "id".equals( key ) ) {
cmd.setId(parser.getString());
}
else if( "query".equals(key) ) {
cmd.setQuery(parser.getString());
}
else if( "commitWithin".equals(key) ) {
cmd.commitWithin = Integer.parseInt(parser.getString());
} else {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Unknown key: "+key+" ["+parser.getPosition()+"]" );
}
}
else {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
"invalid string: " + key
+" at ["+parser.getPosition()+"]" );
}
}
else if( ev == JSONParser.OBJECT_END ) {
if( cmd.getId() == null && cmd.getQuery() == null ) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Missing id or query for delete ["+parser.getPosition()+"]" );
}
return cmd;
}
else {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
"Got: "+JSONParser.getEventString( ev )
+" at ["+parser.getPosition()+"]" );
}
}
}
RollbackUpdateCommand parseRollback() throws IOException {
assertNextEvent( JSONParser.OBJECT_START );
assertNextEvent( JSONParser.OBJECT_END );
return new RollbackUpdateCommand(req);
}
void parseCommitOptions(CommitUpdateCommand cmd ) throws IOException
{
assertNextEvent( JSONParser.OBJECT_START );
final Map<String,Object> map = (Map)ObjectBuilder.getVal(parser);
// SolrParams currently expects string values...
SolrParams p = new SolrParams() {
@Override
public String get(String param) {
Object o = map.get(param);
return o == null ? null : o.toString();
}
@Override
public String[] getParams(String param) {
return new String[]{get(param)};
}
@Override
public Iterator<String> getParameterNamesIterator() {
return map.keySet().iterator();
}
};
RequestHandlerUtils.validateCommitParams(p);
p = SolrParams.wrapDefaults(p, req.getParams()); // default to the normal request params for commit options
RequestHandlerUtils.updateCommit(cmd, p);
}
AddUpdateCommand parseAdd() throws IOException
{
AddUpdateCommand cmd = new AddUpdateCommand(req);
cmd.commitWithin = commitWithin;
cmd.overwrite = overwrite;
float boost = 1.0f;
while( true ) {
int ev = parser.nextEvent();
if( ev == JSONParser.STRING ) {
if( parser.wasKey() ) {
String key = parser.getString();
if( "doc".equals( key ) ) {
if( cmd.solrDoc != null ) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "multiple docs in same add command" );
}
ev = assertNextEvent( JSONParser.OBJECT_START );
cmd.solrDoc = parseDoc( ev );
}
else if( XmlUpdateRequestHandler.OVERWRITE.equals( key ) ) {
cmd.overwrite = parser.getBoolean(); // reads next boolean
}
else if( XmlUpdateRequestHandler.COMMIT_WITHIN.equals( key ) ) {
cmd.commitWithin = (int)parser.getLong();
}
else if( "boost".equals( key ) ) {
boost = Float.parseFloat( parser.getNumberChars().toString() );
}
else {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Unknown key: "+key+" ["+parser.getPosition()+"]" );
}
}
else {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
"Should be a key "
+" at ["+parser.getPosition()+"]" );
}
}
else if( ev == JSONParser.OBJECT_END ) {
if( cmd.solrDoc == null ) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,"missing solr document. "+parser.getPosition() );
}
cmd.solrDoc.setDocumentBoost( boost );
return cmd;
}
else {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
"Got: "+JSONParser.getEventString( ev )
+" at ["+parser.getPosition()+"]" );
}
}
}
void handleAdds() throws IOException
{
while( true ) {
AddUpdateCommand cmd = new AddUpdateCommand(req);
cmd.commitWithin = commitWithin;
cmd.overwrite = overwrite;
int ev = parser.nextEvent();
if (ev == JSONParser.ARRAY_END) break;
assertEvent(ev, JSONParser.OBJECT_START);
cmd.solrDoc = parseDoc(ev);
processor.processAdd(cmd);
}
}
int assertNextEvent(int expected ) throws IOException
{
int got = parser.nextEvent();
assertEvent(got, expected);
return got;
}
void assertEvent(int ev, int expected) {
if( ev != expected ) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
"Expected: "+JSONParser.getEventString( expected )
+" but got "+JSONParser.getEventString( ev )
+" at ["+parser.getPosition()+"]" );
}
}
private SolrInputDocument parseDoc(int ev) throws IOException {
assert ev == JSONParser.OBJECT_START;
SolrInputDocument sdoc = new SolrInputDocument();
for (;;) {
SolrInputField sif = parseField();
if (sif == null) return sdoc;
SolrInputField prev = sdoc.put(sif.getName(), sif);
if (prev != null) {
// blech - repeated keys
sif.addValue(prev.getValue(), prev.getBoost());
}
}
}
private SolrInputField parseField() throws IOException {
int ev = parser.nextEvent();
if (ev == JSONParser.OBJECT_END) {
return null;
}
String fieldName = parser.getString();
SolrInputField sif = new SolrInputField(fieldName);
parseFieldValue(sif);
return sif;
}
private void parseFieldValue(SolrInputField sif) throws IOException {
int ev = parser.nextEvent();
if (ev == JSONParser.OBJECT_START) {
parseExtendedFieldValue(sif, ev);
} else {
Object val = parseNormalFieldValue(ev);
sif.setValue(val, 1.0f);
}
}
private void parseExtendedFieldValue(SolrInputField sif, int ev) throws IOException {
assert ev == JSONParser.OBJECT_START;
float boost = 1.0f;
Object normalFieldValue = null;
Map<String, Object> extendedInfo = null;
for (;;) {
ev = parser.nextEvent();
switch (ev) {
case JSONParser.STRING:
String label = parser.getString();
if ("boost".equals(label)) {
ev = parser.nextEvent();
if( ev != JSONParser.NUMBER &&
ev != JSONParser.LONG &&
ev != JSONParser.BIGNUMBER ) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "boost should have number! "+JSONParser.getEventString(ev) );
}
boost = (float)parser.getDouble();
} else if ("value".equals(label)) {
normalFieldValue = parseNormalFieldValue(parser.nextEvent());
} else {
// If we encounter other unknown map keys, then use a map
if (extendedInfo == null) {
extendedInfo = new HashMap<String, Object>(2);
}
// for now, the only extended info will be field values
// we could either store this as an Object or a SolrInputField
Object val = parseNormalFieldValue(parser.nextEvent());
extendedInfo.put(label, val);
}
break;
case JSONParser.OBJECT_END:
if (extendedInfo != null) {
if (normalFieldValue != null) {
extendedInfo.put("value",normalFieldValue);
}
sif.setValue(extendedInfo, boost);
} else {
sif.setValue(normalFieldValue, boost);
}
return;
default:
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Error parsing JSON extended field value. Unexpected "+JSONParser.getEventString(ev) );
}
}
}
private Object parseNormalFieldValue(int ev) throws IOException {
if (ev == JSONParser.ARRAY_START) {
List<Object> val = parseArrayFieldValue(ev);
return val;
} else {
Object val = parseSingleFieldValue(ev);
return val;
}
}
private Object parseSingleFieldValue(int ev) throws IOException {
switch (ev) {
case JSONParser.STRING:
return parser.getString();
case JSONParser.LONG:
case JSONParser.NUMBER:
case JSONParser.BIGNUMBER:
return parser.getNumberChars().toString();
case JSONParser.BOOLEAN:
return Boolean.toString(parser.getBoolean()); // for legacy reasons, single values s are expected to be strings
case JSONParser.NULL:
parser.getNull();
return null;
case JSONParser.ARRAY_START:
return parseArrayFieldValue(ev);
default:
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Error parsing JSON field value. Unexpected "+JSONParser.getEventString(ev) );
}
}
private List<Object> parseArrayFieldValue(int ev) throws IOException {
assert ev == JSONParser.ARRAY_START;
ArrayList lst = new ArrayList(2);
for (;;) {
ev = parser.nextEvent();
if (ev == JSONParser.ARRAY_END) {
return lst;
}
Object val = parseSingleFieldValue(ev);
lst.add(val);
}
}
}

View File

@ -18,26 +18,18 @@
package org.apache.solr.handler; package org.apache.solr.handler;
import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.NamedList;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.update.processor.UpdateRequestProcessor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
* Add documents to solr using the noggit JSON parser. * use {@link UpdateRequestHandler}
*/ */
public class JsonUpdateRequestHandler extends ContentStreamHandlerBase { @Deprecated
public static Logger log = LoggerFactory.getLogger(JsonUpdateRequestHandler.class); public class JsonUpdateRequestHandler extends UpdateRequestHandler {
@Override @Override
public void init(NamedList args) { public void init(NamedList args) {
super.init(args); super.init(args);
} setAssumeContentType("application/json");
log.warn("Using deprecated class: "+this.getClass().getSimpleName()+" -- replace with UpdateRequestHandler");
@Override
protected ContentStreamLoader newLoader(SolrQueryRequest req, UpdateRequestProcessor processor) {
return new JsonLoader(req, processor);
} }
//////////////////////// SolrInfoMBeans methods ////////////////////// //////////////////////// SolrInfoMBeans methods //////////////////////

View File

@ -0,0 +1,167 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.handler;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.MapSolrParams;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.params.UpdateParams;
import org.apache.solr.common.util.ContentStream;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.handler.loader.CSVLoader;
import org.apache.solr.handler.loader.ContentStreamLoader;
import org.apache.solr.handler.loader.JavabinLoader;
import org.apache.solr.handler.loader.JsonLoader;
import org.apache.solr.handler.loader.XMLLoader;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.update.processor.UpdateRequestProcessor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* UpdateHandler that uses content-type to pick the right Loader
*/
public class UpdateRequestHandler extends ContentStreamHandlerBase {
public static Logger log = LoggerFactory.getLogger(UpdateRequestHandler.class);
// XML Constants
public static final String ADD = "add";
public static final String DELETE = "delete";
public static final String OPTIMIZE = "optimize";
public static final String COMMIT = "commit";
public static final String ROLLBACK = "rollback";
public static final String WAIT_SEARCHER = "waitSearcher";
public static final String SOFT_COMMIT = "softCommit";
public static final String OVERWRITE = "overwrite";
public static final String VERSION = "version";
// NOTE: This constant is for use with the <add> XML tag, not the HTTP param with same name
public static final String COMMIT_WITHIN = "commitWithin";
Map<String,ContentStreamLoader> loaders = null;
ContentStreamLoader instance = new ContentStreamLoader() {
@Override
public void load(SolrQueryRequest req, SolrQueryResponse rsp,
ContentStream stream, UpdateRequestProcessor processor) throws Exception {
String type = req.getParams().get(UpdateParams.ASSUME_CONTENT_TYPE);
if(type == null) {
type = stream.getContentType();
}
if( type == null ) { // Normal requests will not get here.
throw new SolrException(ErrorCode.BAD_REQUEST, "Missing ContentType");
}
int idx = type.indexOf(';');
if(idx>0) {
type = type.substring(0,idx);
}
ContentStreamLoader loader = loaders.get(type);
if(loader==null) {
throw new SolrException(ErrorCode.BAD_REQUEST, "Unsupported ContentType: "
+type+ " Not in: "+loaders.keySet());
}
if(loader.getDefaultWT()!=null) {
setDefaultWT(req,loader);
}
loader.load(req, rsp, stream, processor);
}
private void setDefaultWT(SolrQueryRequest req, ContentStreamLoader loader) {
SolrParams params = req.getParams();
if( params.get(CommonParams.WT) == null ) {
String wt = loader.getDefaultWT();
// Make sure it is a valid writer
if(req.getCore().getQueryResponseWriter(wt)!=null) {
Map<String,String> map = new HashMap<String,String>(1);
map.put(CommonParams.WT, wt);
req.setParams(SolrParams.wrapDefaults(params,
new MapSolrParams(map)));
}
}
}
};
@Override
public void init(NamedList args) {
super.init(args);
// Since backed by a non-thread safe Map, it should not be modifiable
loaders = Collections.unmodifiableMap(createDefaultLoaders(args));
}
protected void setAssumeContentType(String ct) {
if(invariants==null) {
Map<String,String> map = new HashMap<String,String>();
map.put(UpdateParams.ASSUME_CONTENT_TYPE,ct);
invariants = new MapSolrParams(map);
}
else {
ModifiableSolrParams params = new ModifiableSolrParams(invariants);
params.set(UpdateParams.ASSUME_CONTENT_TYPE,ct);
invariants = params;
}
}
protected Map<String,ContentStreamLoader> createDefaultLoaders(NamedList args) {
SolrParams p = null;
if(args!=null) {
p = SolrParams.toSolrParams(args);
}
Map<String,ContentStreamLoader> registry = new HashMap<String,ContentStreamLoader>();
registry.put("application/xml", new XMLLoader().init(p) );
registry.put("application/json", new JsonLoader().init(p) );
registry.put("application/csv", new CSVLoader().init(p) );
registry.put("application/javabin", new JavabinLoader().init(p) );
registry.put("text/csv", registry.get("application/csv") );
registry.put("text/xml", registry.get("application/xml") );
registry.put("text/json", registry.get("application/json") );
return registry;
}
@Override
protected ContentStreamLoader newLoader(SolrQueryRequest req, final UpdateRequestProcessor processor) {
return instance;
}
//////////////////////// SolrInfoMBeans methods //////////////////////
@Override
public String getDescription() {
return "Add documents using XML (with XSLT), CSV, JSON, or javabin";
}
@Override
public String getSource() {
return "$URL$";
}
}

View File

@ -17,67 +17,21 @@
package org.apache.solr.handler; package org.apache.solr.handler;
import javax.xml.stream.XMLInputFactory;
import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.XMLErrorLogger;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.update.processor.UpdateRequestProcessor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
* Add documents to solr using the STAX XML parser. * Add documents to solr using the STAX XML parser.
*
* use {@link UpdateRequestHandler}
*/ */
public class XmlUpdateRequestHandler extends ContentStreamHandlerBase { @Deprecated
public static Logger log = LoggerFactory.getLogger(XmlUpdateRequestHandler.class); public class XmlUpdateRequestHandler extends UpdateRequestHandler {
private static final XMLErrorLogger xmllog = new XMLErrorLogger(log);
// XML Constants
public static final String ADD = "add";
public static final String DELETE = "delete";
public static final String OPTIMIZE = "optimize";
public static final String COMMIT = "commit";
public static final String ROLLBACK = "rollback";
public static final String WAIT_SEARCHER = "waitSearcher";
public static final String SOFT_COMMIT = "softCommit";
public static final String OVERWRITE = "overwrite";
public static final String VERSION = "version";
// NOTE: This constant is for use with the <add> XML tag, not the HTTP param with same name
public static final String COMMIT_WITHIN = "commitWithin";
XMLInputFactory inputFactory;
@Override @Override
public void init(NamedList args) { public void init(NamedList args) {
super.init(args); super.init(args);
setAssumeContentType("application/xml");
inputFactory = XMLInputFactory.newInstance(); log.warn("Using deprecated class: "+this.getClass().getSimpleName()+" -- replace with UpdateRequestHandler");
try {
// The java 1.6 bundled stax parser (sjsxp) does not currently have a thread-safe
// XMLInputFactory, as that implementation tries to cache and reuse the
// XMLStreamReader. Setting the parser-specific "reuse-instance" property to false
// prevents this.
// All other known open-source stax parsers (and the bea ref impl)
// have thread-safe factories.
inputFactory.setProperty("reuse-instance", Boolean.FALSE);
}
catch (IllegalArgumentException ex) {
// Other implementations will likely throw this exception since "reuse-instance"
// isimplementation specific.
log.debug("Unable to set the 'reuse-instance' property for the input chain: " + inputFactory);
}
inputFactory.setXMLReporter(xmllog);
}
@Override
protected ContentStreamLoader newLoader(SolrQueryRequest req, UpdateRequestProcessor processor) {
return new XMLLoader(processor, inputFactory);
} }
//////////////////////// SolrInfoMBeans methods ////////////////////// //////////////////////// SolrInfoMBeans methods //////////////////////

View File

@ -17,58 +17,21 @@
package org.apache.solr.handler; package org.apache.solr.handler;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.XMLErrorLogger;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.update.processor.UpdateRequestProcessor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.xml.stream.XMLInputFactory;
/** /**
* Add documents to solr using the STAX XML parser, transforming it with XSLT first * Add documents to solr using the STAX XML parser, transforming it with XSLT first
*
* use {@link UpdateRequestHandler}
*/ */
public class XsltUpdateRequestHandler extends ContentStreamHandlerBase { @Deprecated
public static Logger log = LoggerFactory.getLogger(XsltUpdateRequestHandler.class); public class XsltUpdateRequestHandler extends UpdateRequestHandler {
public static final XMLErrorLogger xmllog = new XMLErrorLogger(log);
public static final int XSLT_CACHE_DEFAULT = 60;
private static final String XSLT_CACHE_PARAM = "xsltCacheLifetimeSeconds";
XMLInputFactory inputFactory;
private Integer xsltCacheLifetimeSeconds;
@Override @Override
public void init(NamedList args) { public void init(NamedList args) {
super.init(args); super.init(args);
setAssumeContentType("application/xml");
inputFactory = XMLInputFactory.newInstance(); log.warn("Using deprecated class: "+this.getClass().getSimpleName()+" -- replace with UpdateRequestHandler");
try {
// The java 1.6 bundled stax parser (sjsxp) does not currently have a thread-safe
// XMLInputFactory, as that implementation tries to cache and reuse the
// XMLStreamReader. Setting the parser-specific "reuse-instance" property to false
// prevents this.
// All other known open-source stax parsers (and the bea ref impl)
// have thread-safe factories.
inputFactory.setProperty("reuse-instance", Boolean.FALSE);
}
catch (IllegalArgumentException ex) {
// Other implementations will likely throw this exception since "reuse-instance"
// isimplementation specific.
log.debug("Unable to set the 'reuse-instance' property for the input chain: " + inputFactory);
}
inputFactory.setXMLReporter(xmllog);
final SolrParams p = SolrParams.toSolrParams(args);
this.xsltCacheLifetimeSeconds = p.getInt(XSLT_CACHE_PARAM,XSLT_CACHE_DEFAULT);
log.info("xsltCacheLifetimeSeconds=" + xsltCacheLifetimeSeconds);
}
@Override
protected ContentStreamLoader newLoader(SolrQueryRequest req, UpdateRequestProcessor processor) {
return new XsltXMLLoader(processor, inputFactory, xsltCacheLifetimeSeconds);
} }
//////////////////////// SolrInfoMBeans methods ////////////////////// //////////////////////// SolrInfoMBeans methods //////////////////////

View File

@ -1,119 +0,0 @@
package org.apache.solr.handler;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.solr.update.processor.UpdateRequestProcessor;
import org.apache.solr.util.xslt.TransformerProvider;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.util.ContentStream;
import org.apache.solr.common.util.ContentStreamBase;
import org.apache.solr.common.util.XMLErrorLogger;
import org.apache.solr.core.SolrConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.commons.io.IOUtils;
import javax.xml.stream.XMLStreamReader;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLInputFactory;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerException;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.dom.DOMResult;
import javax.xml.transform.sax.SAXSource;
import org.xml.sax.InputSource;
import java.io.InputStream;
import java.io.IOException;
import java.util.Map;
/**
* Extends the XMLLoader by applying an XSLT transform before the
* XMLLoader actually loads the XML
*
**/
class XsltXMLLoader extends XMLLoader {
public static final String TRANSFORM_PARAM = "tr";
public static final String CONTEXT_TRANSFORMER_KEY = "xsltupdater.transformer";
private final Integer xsltCacheLifetimeSeconds;
public XsltXMLLoader(UpdateRequestProcessor processor, XMLInputFactory inputFactory, Integer xsltCacheLifetimeSeconds) {
super(processor, inputFactory);
this.xsltCacheLifetimeSeconds = xsltCacheLifetimeSeconds;
}
@Override
public void load(SolrQueryRequest req, SolrQueryResponse rsp, ContentStream stream) throws Exception {
final DOMResult result = new DOMResult();
final Transformer t = getTransformer(req);
InputStream is = null;
XMLStreamReader parser = null;
// first step: read XML and build DOM using Transformer (this is no overhead, as XSL always produces
// an internal result DOM tree, we just access it directly as input for StAX):
try {
is = stream.getStream();
final String charset = ContentStreamBase.getCharsetFromContentType(stream.getContentType());
final InputSource isrc = new InputSource(is);
isrc.setEncoding(charset);
final SAXSource source = new SAXSource(isrc);
t.transform(source, result);
} catch(TransformerException te) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, te.getMessage(), te);
} finally {
IOUtils.closeQuietly(is);
}
// second step feed the intermediate DOM tree into StAX parser:
try {
parser = inputFactory.createXMLStreamReader(new DOMSource(result.getNode()));
this.processUpdate(req, processor, parser);
} catch (XMLStreamException e) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e.getMessage(), e);
} finally {
if (parser != null) parser.close();
}
}
/** Get Transformer from request context, or from TransformerProvider.
* This allows either getContentType(...) or write(...) to instantiate the Transformer,
* depending on which one is called first, then the other one reuses the same Transformer
*/
protected Transformer getTransformer(SolrQueryRequest request) throws IOException {
final String xslt = request.getParams().get(TRANSFORM_PARAM,null);
if(xslt==null) {
throw new IOException("'" + TRANSFORM_PARAM + "' request parameter is required to use the XSLTResponseWriter");
}
// not the cleanest way to achieve this
SolrConfig solrConfig = request.getCore().getSolrConfig();
// no need to synchronize access to context, right?
// Nothing else happens with it at the same time
final Map<Object,Object> ctx = request.getContext();
Transformer result = (Transformer)ctx.get(CONTEXT_TRANSFORMER_KEY);
if(result==null) {
result = TransformerProvider.instance.getTransformer(solrConfig, xslt,xsltCacheLifetimeSeconds.intValue());
result.setErrorListener(XsltUpdateRequestHandler.xmllog);
ctx.put(CONTEXT_TRANSFORMER_KEY,result);
}
return result;
}
}

View File

@ -0,0 +1,46 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.handler.loader;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.util.ContentStream;
import org.apache.solr.update.processor.UpdateRequestProcessor;
import java.io.*;
public class CSVLoader extends ContentStreamLoader {
@Override
public void load(SolrQueryRequest req, SolrQueryResponse rsp,
ContentStream stream, UpdateRequestProcessor processor) throws Exception {
new SingleThreadedCSVLoader(req,processor).load(req, rsp, stream, processor);
}
}
class SingleThreadedCSVLoader extends CSVLoaderBase {
SingleThreadedCSVLoader(SolrQueryRequest req, UpdateRequestProcessor processor) {
super(req, processor);
}
@Override
void addDoc(int line, String[] vals) throws IOException {
templateAdd.clear();
SolrInputDocument doc = new SolrInputDocument();
doAdd(line, vals, doc, templateAdd);
}
}

View File

@ -0,0 +1,399 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.handler.loader;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.params.UpdateParams;
import org.apache.solr.common.util.StrUtils;
import org.apache.solr.common.util.ContentStream;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.schema.SchemaField;
import org.apache.solr.update.*;
import org.apache.solr.update.processor.UpdateRequestProcessor;
import org.apache.solr.internal.csv.CSVStrategy;
import org.apache.solr.internal.csv.CSVParser;
import org.apache.commons.io.IOUtils;
import java.util.regex.Pattern;
import java.util.List;
import java.util.HashMap;
import java.util.Iterator;
import java.io.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
abstract class CSVLoaderBase extends ContentStreamLoader {
public static final String SEPARATOR="separator";
public static final String FIELDNAMES="fieldnames";
public static final String HEADER="header";
public static final String SKIP="skip";
public static final String SKIPLINES="skipLines";
public static final String MAP="map";
public static final String TRIM="trim";
public static final String EMPTY="keepEmpty";
public static final String SPLIT="split";
public static final String ENCAPSULATOR="encapsulator";
public static final String ESCAPE="escape";
public static final String OVERWRITE="overwrite";
public static final String LITERALS_PREFIX = "literal.";
private static Pattern colonSplit = Pattern.compile(":");
private static Pattern commaSplit = Pattern.compile(",");
public static Logger log = LoggerFactory.getLogger(CSVLoaderBase.class);
final IndexSchema schema;
final SolrParams params;
final CSVStrategy strategy;
final UpdateRequestProcessor processor;
// hashmap to save any literal fields and their values
HashMap <SchemaField, String> literals;
String[] fieldnames;
SchemaField[] fields;
CSVLoaderBase.FieldAdder[] adders;
int skipLines; // number of lines to skip at start of file
final AddUpdateCommand templateAdd;
/** Add a field to a document unless it's zero length.
* The FieldAdder hierarchy handles all the complexity of
* further transforming or splitting field values to keep the
* main logic loop clean. All implementations of add() must be
* MT-safe!
*/
private class FieldAdder {
void add(SolrInputDocument doc, int line, int column, String val) {
if (val.length() > 0) {
doc.addField(fields[column].getName(),val,1.0f);
}
}
}
/** add zero length fields */
private class FieldAdderEmpty extends CSVLoaderBase.FieldAdder {
@Override
void add(SolrInputDocument doc, int line, int column, String val) {
doc.addField(fields[column].getName(),val,1.0f);
}
}
/** trim fields */
private class FieldTrimmer extends CSVLoaderBase.FieldAdder {
private final CSVLoaderBase.FieldAdder base;
FieldTrimmer(CSVLoaderBase.FieldAdder base) { this.base=base; }
@Override
void add(SolrInputDocument doc, int line, int column, String val) {
base.add(doc, line, column, val.trim());
}
}
/** map a single value.
* for just a couple of mappings, this is probably faster than
* using a HashMap.
*/
private class FieldMapperSingle extends CSVLoaderBase.FieldAdder {
private final String from;
private final String to;
private final CSVLoaderBase.FieldAdder base;
FieldMapperSingle(String from, String to, CSVLoaderBase.FieldAdder base) {
this.from=from;
this.to=to;
this.base=base;
}
@Override
void add(SolrInputDocument doc, int line, int column, String val) {
if (from.equals(val)) val=to;
base.add(doc,line,column,val);
}
}
/** Split a single value into multiple values based on
* a CSVStrategy.
*/
private class FieldSplitter extends CSVLoaderBase.FieldAdder {
private final CSVStrategy strategy;
private final CSVLoaderBase.FieldAdder base;
FieldSplitter(CSVStrategy strategy, CSVLoaderBase.FieldAdder base) {
this.strategy = strategy;
this.base = base;
}
@Override
void add(SolrInputDocument doc, int line, int column, String val) {
CSVParser parser = new CSVParser(new StringReader(val), strategy);
try {
String[] vals = parser.getLine();
if (vals!=null) {
for (String v: vals) base.add(doc,line,column,v);
} else {
base.add(doc,line,column,val);
}
} catch (IOException e) {
throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,e);
}
}
}
String errHeader="CSVLoader:";
CSVLoaderBase(SolrQueryRequest req, UpdateRequestProcessor processor) {
this.processor = processor;
this.params = req.getParams();
schema = req.getSchema();
this.literals = new HashMap<SchemaField, String>();
templateAdd = new AddUpdateCommand(req);
templateAdd.overwrite=params.getBool(OVERWRITE,true);
templateAdd.commitWithin = params.getInt(UpdateParams.COMMIT_WITHIN, -1);
strategy = new CSVStrategy(',', '"', CSVStrategy.COMMENTS_DISABLED, CSVStrategy.ESCAPE_DISABLED, false, false, false, true);
String sep = params.get(SEPARATOR);
if (sep!=null) {
if (sep.length()!=1) throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,"Invalid separator:'"+sep+"'");
strategy.setDelimiter(sep.charAt(0));
}
String encapsulator = params.get(ENCAPSULATOR);
if (encapsulator!=null) {
if (encapsulator.length()!=1) throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,"Invalid encapsulator:'"+encapsulator+"'");
}
String escape = params.get(ESCAPE);
if (escape!=null) {
if (escape.length()!=1) throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,"Invalid escape:'"+escape+"'");
}
// if only encapsulator or escape is set, disable the other escaping mechanism
if (encapsulator == null && escape != null) {
strategy.setEncapsulator( CSVStrategy.ENCAPSULATOR_DISABLED);
strategy.setEscape(escape.charAt(0));
} else {
if (encapsulator != null) {
strategy.setEncapsulator(encapsulator.charAt(0));
}
if (escape != null) {
char ch = escape.charAt(0);
strategy.setEscape(ch);
if (ch == '\\') {
// If the escape is the standard backslash, then also enable
// unicode escapes (it's harmless since 'u' would not otherwise
// be escaped.
strategy.setUnicodeEscapeInterpretation(true);
}
}
}
String fn = params.get(FIELDNAMES);
fieldnames = fn != null ? commaSplit.split(fn,-1) : null;
Boolean hasHeader = params.getBool(HEADER);
skipLines = params.getInt(SKIPLINES,0);
if (fieldnames==null) {
if (null == hasHeader) {
// assume the file has the headers if they aren't supplied in the args
hasHeader=true;
} else if (!hasHeader) {
throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,"CSVLoader: must specify fieldnames=<fields>* or header=true");
}
} else {
// if the fieldnames were supplied and the file has a header, we need to
// skip over that header.
if (hasHeader!=null && hasHeader) skipLines++;
prepareFields();
}
}
/** create the FieldAdders that control how each field is indexed */
void prepareFields() {
// Possible future optimization: for really rapid incremental indexing
// from a POST, one could cache all of this setup info based on the params.
// The link from FieldAdder to this would need to be severed for that to happen.
fields = new SchemaField[fieldnames.length];
adders = new CSVLoaderBase.FieldAdder[fieldnames.length];
String skipStr = params.get(SKIP);
List<String> skipFields = skipStr==null ? null : StrUtils.splitSmart(skipStr,',');
CSVLoaderBase.FieldAdder adder = new CSVLoaderBase.FieldAdder();
CSVLoaderBase.FieldAdder adderKeepEmpty = new CSVLoaderBase.FieldAdderEmpty();
for (int i=0; i<fields.length; i++) {
String fname = fieldnames[i];
// to skip a field, leave the entries in fields and addrs null
if (fname.length()==0 || (skipFields!=null && skipFields.contains(fname))) continue;
fields[i] = schema.getField(fname);
boolean keepEmpty = params.getFieldBool(fname,EMPTY,false);
adders[i] = keepEmpty ? adderKeepEmpty : adder;
// Order that operations are applied: split -> trim -> map -> add
// so create in reverse order.
// Creation of FieldAdders could be optimized and shared among fields
String[] fmap = params.getFieldParams(fname,MAP);
if (fmap!=null) {
for (String mapRule : fmap) {
String[] mapArgs = colonSplit.split(mapRule,-1);
if (mapArgs.length!=2)
throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "Map rules must be of the form 'from:to' ,got '"+mapRule+"'");
adders[i] = new CSVLoaderBase.FieldMapperSingle(mapArgs[0], mapArgs[1], adders[i]);
}
}
if (params.getFieldBool(fname,TRIM,false)) {
adders[i] = new CSVLoaderBase.FieldTrimmer(adders[i]);
}
if (params.getFieldBool(fname,SPLIT,false)) {
String sepStr = params.getFieldParam(fname,SEPARATOR);
char fsep = sepStr==null || sepStr.length()==0 ? ',' : sepStr.charAt(0);
String encStr = params.getFieldParam(fname,ENCAPSULATOR);
char fenc = encStr==null || encStr.length()==0 ? (char)-2 : encStr.charAt(0);
String escStr = params.getFieldParam(fname,ESCAPE);
char fesc = escStr==null || escStr.length()==0 ? CSVStrategy.ESCAPE_DISABLED : escStr.charAt(0);
CSVStrategy fstrat = new CSVStrategy(fsep,fenc,CSVStrategy.COMMENTS_DISABLED,fesc, false, false, false, false);
adders[i] = new CSVLoaderBase.FieldSplitter(fstrat, adders[i]);
}
}
// look for any literal fields - literal.foo=xyzzy
Iterator<String> paramNames = params.getParameterNamesIterator();
while (paramNames.hasNext()) {
String pname = paramNames.next();
if (!pname.startsWith(LITERALS_PREFIX)) continue;
String name = pname.substring(LITERALS_PREFIX.length());
SchemaField sf = schema.getFieldOrNull(name);
if(sf == null)
throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,"Invalid field name for literal:'"+ name +"'");
literals.put(sf, params.get(pname));
}
}
private void input_err(String msg, String[] line, int lineno) {
StringBuilder sb = new StringBuilder();
sb.append(errHeader).append(", line=").append(lineno).append(",").append(msg).append("\n\tvalues={");
for (String val: line) {
sb.append("'").append(val).append("',"); }
sb.append('}');
throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,sb.toString());
}
private void input_err(String msg, String[] lines, int lineNo, Throwable e) {
StringBuilder sb = new StringBuilder();
sb.append(errHeader).append(", line=").append(lineNo).append(",").append(msg).append("\n\tvalues={");
if (lines != null) {
for (String val : lines) {
sb.append("'").append(val).append("',");
}
} else {
sb.append("NO LINES AVAILABLE");
}
sb.append('}');
throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,sb.toString(), e);
}
/** load the CSV input */
@Override
public void load(SolrQueryRequest req, SolrQueryResponse rsp, ContentStream stream, UpdateRequestProcessor processor) throws IOException {
errHeader = "CSVLoader: input=" + stream.getSourceInfo();
Reader reader = null;
try {
reader = stream.getReader();
if (skipLines>0) {
if (!(reader instanceof BufferedReader)) {
reader = new BufferedReader(reader);
}
BufferedReader r = (BufferedReader)reader;
for (int i=0; i<skipLines; i++) {
r.readLine();
}
}
CSVParser parser = new CSVParser(reader, strategy);
// parse the fieldnames from the header of the file
if (fieldnames==null) {
fieldnames = parser.getLine();
if (fieldnames==null) {
throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,"Expected fieldnames in CSV input");
}
prepareFields();
}
// read the rest of the CSV file
for(;;) {
int line = parser.getLineNumber(); // for error reporting in MT mode
String[] vals = null;
try {
vals = parser.getLine();
} catch (IOException e) {
//Catch the exception and rethrow it with more line information
input_err("can't read line: " + line, null, line, e);
}
if (vals==null) break;
if (vals.length != fields.length) {
input_err("expected "+fields.length+" values but got "+vals.length, vals, line);
}
addDoc(line,vals);
}
} finally{
if (reader != null) {
IOUtils.closeQuietly(reader);
}
}
}
/** called for each line of values (document) */
abstract void addDoc(int line, String[] vals) throws IOException;
/** this must be MT safe... may be called concurrently from multiple threads. */
void doAdd(int line, String[] vals, SolrInputDocument doc, AddUpdateCommand template) throws IOException {
// the line number is passed simply for error reporting in MT mode.
// first, create the lucene document
for (int i=0; i<vals.length; i++) {
if (fields[i]==null) continue; // ignore this field
String val = vals[i];
adders[i].add(doc, line, i, val);
}
// add any literals
for (SchemaField sf : literals.keySet()) {
String fn = sf.getName();
String val = literals.get(sf);
doc.addField(fn, val);
}
template.solrDoc = doc;
processor.processAdd(template);
}
}

View File

@ -0,0 +1,57 @@
package org.apache.solr.handler.loader;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.ContentStream;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.update.processor.UpdateRequestProcessor;
/**
* Load a {@link org.apache.solr.common.util.ContentStream} into Solr
*
* This should be thread safe and can be called from multiple threads
*/
public abstract class ContentStreamLoader {
/**
* This should be called once for each RequestHandler
*/
public ContentStreamLoader init(SolrParams args) {
return this;
}
public String getDefaultWT() {
return null;
}
/**
* Loaders are responsible for closing the stream
*
* @param req The input {@link org.apache.solr.request.SolrQueryRequest}
* @param rsp The response, in case the Loader wishes to add anything
* @param stream The {@link org.apache.solr.common.util.ContentStream} to add
* @param processor The {@link UpdateRequestProcessor} to use
*/
public abstract void load(SolrQueryRequest req,
SolrQueryResponse rsp,
ContentStream stream,
UpdateRequestProcessor processor) throws Exception;
}

View File

@ -0,0 +1,139 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.handler.loader;
import org.apache.solr.client.solrj.request.JavaBinUpdateRequestCodec;
import org.apache.solr.client.solrj.request.UpdateRequest;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.params.UpdateParams;
import org.apache.solr.common.util.ContentStream;
import org.apache.solr.common.util.FastInputStream;
import org.apache.solr.handler.RequestHandlerUtils;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.update.AddUpdateCommand;
import org.apache.solr.update.DeleteUpdateCommand;
import org.apache.solr.update.processor.UpdateRequestProcessor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
/**
* Update handler which uses the JavaBin format
*
* @see org.apache.solr.client.solrj.request.JavaBinUpdateRequestCodec
* @see org.apache.solr.common.util.JavaBinCodec
*/
public class JavabinLoader extends ContentStreamLoader {
public static Logger log = LoggerFactory.getLogger(JavabinLoader.class);
@Override
public void load(SolrQueryRequest req, SolrQueryResponse rsp, ContentStream stream, UpdateRequestProcessor processor) throws Exception {
InputStream is = null;
try {
is = stream.getStream();
parseAndLoadDocs(req, rsp, is, processor);
} finally {
if(is != null) {
is.close();
}
}
}
private void parseAndLoadDocs(final SolrQueryRequest req, SolrQueryResponse rsp, InputStream stream,
final UpdateRequestProcessor processor) throws IOException {
UpdateRequest update = null;
JavaBinUpdateRequestCodec.StreamingUpdateHandler handler = new JavaBinUpdateRequestCodec.StreamingUpdateHandler() {
private AddUpdateCommand addCmd = null;
@Override
public void update(SolrInputDocument document, UpdateRequest updateRequest) {
if (document == null) {
// Perhaps commit from the parameters
try {
RequestHandlerUtils.handleCommit(req, processor, updateRequest.getParams(), false);
RequestHandlerUtils.handleRollback(req, processor, updateRequest.getParams(), false);
} catch (IOException e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "ERROR handling commit/rollback");
}
return;
}
if (addCmd == null) {
addCmd = getAddCommand(req, updateRequest.getParams());
}
addCmd.solrDoc = document;
try {
processor.processAdd(addCmd);
addCmd.clear();
} catch (IOException e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "ERROR adding document " + document);
}
}
};
FastInputStream in = FastInputStream.wrap(stream);
for (; ; ) {
try {
update = new JavaBinUpdateRequestCodec().unmarshal(in, handler);
} catch (EOFException e) {
break; // this is expected
} catch (Exception e) {
log.error("Exception while processing update request", e);
break;
}
if (update.getDeleteById() != null || update.getDeleteQuery() != null) {
delete(req, update, processor);
}
}
}
private AddUpdateCommand getAddCommand(SolrQueryRequest req, SolrParams params) {
AddUpdateCommand addCmd = new AddUpdateCommand(req);
addCmd.overwrite = params.getBool(UpdateParams.OVERWRITE, true);
addCmd.commitWithin = params.getInt(UpdateParams.COMMIT_WITHIN, -1);
return addCmd;
}
private void delete(SolrQueryRequest req, UpdateRequest update, UpdateRequestProcessor processor) throws IOException {
SolrParams params = update.getParams();
DeleteUpdateCommand delcmd = new DeleteUpdateCommand(req);
if(params != null) {
delcmd.commitWithin = params.getInt(UpdateParams.COMMIT_WITHIN, -1);
}
if(update.getDeleteById() != null) {
for (String s : update.getDeleteById()) {
delcmd.id = s;
processor.processDelete(delcmd);
}
delcmd.id = null;
}
if(update.getDeleteQuery() != null) {
for (String s : update.getDeleteQuery()) {
delcmd.query = s;
processor.processDelete(delcmd);
}
}
}
}

View File

@ -0,0 +1,479 @@
package org.apache.solr.handler.loader;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import java.util.*;
import org.apache.commons.io.IOUtils;
import org.apache.noggit.JSONParser;
import org.apache.noggit.JSONUtil;
import org.apache.noggit.ObjectBuilder;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.SolrInputField;
import org.apache.solr.common.params.*;
import org.apache.solr.common.util.ContentStream;
import org.apache.solr.handler.RequestHandlerUtils;
import org.apache.solr.handler.UpdateRequestHandler;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.update.AddUpdateCommand;
import org.apache.solr.update.CommitUpdateCommand;
import org.apache.solr.update.DeleteUpdateCommand;
import org.apache.solr.update.RollbackUpdateCommand;
import org.apache.solr.update.processor.UpdateRequestProcessor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @since solr 4.0
*/
public class JsonLoader extends ContentStreamLoader {
final static Logger log = LoggerFactory.getLogger( JsonLoader.class );
public String getDefaultWT() {
return "json";
}
@Override
public void load(SolrQueryRequest req, SolrQueryResponse rsp,
ContentStream stream, UpdateRequestProcessor processor) throws Exception {
new SingleThreadedJsonLoader(req,processor).load(req, rsp, stream, processor);
}
static class SingleThreadedJsonLoader extends ContentStreamLoader {
protected final UpdateRequestProcessor processor;
protected final SolrQueryRequest req;
protected JSONParser parser;
protected final int commitWithin;
protected final boolean overwrite;
public SingleThreadedJsonLoader(SolrQueryRequest req, UpdateRequestProcessor processor) {
this.processor = processor;
this.req = req;
commitWithin = req.getParams().getInt(UpdateParams.COMMIT_WITHIN, -1);
overwrite = req.getParams().getBool(UpdateParams.OVERWRITE, true);
}
@Override
public void load(SolrQueryRequest req,
SolrQueryResponse rsp,
ContentStream stream,
UpdateRequestProcessor processor) throws Exception {
Reader reader = null;
try {
reader = stream.getReader();
if (log.isTraceEnabled()) {
String body = IOUtils.toString(reader);
log.trace("body", body);
reader = new StringReader(body);
}
parser = new JSONParser(reader);
this.processUpdate();
}
finally {
IOUtils.closeQuietly(reader);
}
}
@SuppressWarnings("fallthrough")
void processUpdate() throws IOException
{
int ev = parser.nextEvent();
while( ev != JSONParser.EOF ) {
switch( ev )
{
case JSONParser.ARRAY_START:
handleAdds();
break;
case JSONParser.STRING:
if( parser.wasKey() ) {
String v = parser.getString();
if( v.equals( UpdateRequestHandler.ADD ) ) {
int ev2 = parser.nextEvent();
if (ev2 == JSONParser.OBJECT_START) {
processor.processAdd( parseAdd() );
} else if (ev2 == JSONParser.ARRAY_START) {
handleAdds();
} else {
assertEvent(ev2, JSONParser.OBJECT_START);
}
}
else if( v.equals( UpdateRequestHandler.COMMIT ) ) {
CommitUpdateCommand cmd = new CommitUpdateCommand(req, false );
cmd.waitSearcher = true;
parseCommitOptions( cmd );
processor.processCommit( cmd );
}
else if( v.equals( UpdateRequestHandler.OPTIMIZE ) ) {
CommitUpdateCommand cmd = new CommitUpdateCommand(req, true );
cmd.waitSearcher = true;
parseCommitOptions( cmd );
processor.processCommit( cmd );
}
else if( v.equals( UpdateRequestHandler.DELETE ) ) {
processor.processDelete( parseDelete() );
}
else if( v.equals( UpdateRequestHandler.ROLLBACK ) ) {
processor.processRollback( parseRollback() );
}
else {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Unknown command: "+v+" ["+parser.getPosition()+"]" );
}
break;
}
// fall through
case JSONParser.LONG:
case JSONParser.NUMBER:
case JSONParser.BIGNUMBER:
case JSONParser.BOOLEAN:
case JSONParser.NULL:
log.info( "can't have a value here! "
+JSONParser.getEventString(ev)+" "+parser.getPosition() );
case JSONParser.OBJECT_START:
case JSONParser.OBJECT_END:
case JSONParser.ARRAY_END:
break;
default:
log.info("Noggit UNKNOWN_EVENT_ID:"+ev);
break;
}
// read the next event
ev = parser.nextEvent();
}
}
DeleteUpdateCommand parseDelete() throws IOException {
assertNextEvent( JSONParser.OBJECT_START );
DeleteUpdateCommand cmd = new DeleteUpdateCommand(req);
cmd.commitWithin = commitWithin;
while( true ) {
int ev = parser.nextEvent();
if( ev == JSONParser.STRING ) {
String key = parser.getString();
if( parser.wasKey() ) {
if( "id".equals( key ) ) {
cmd.setId(parser.getString());
}
else if( "query".equals(key) ) {
cmd.setQuery(parser.getString());
}
else if( "commitWithin".equals(key) ) {
cmd.commitWithin = Integer.parseInt(parser.getString());
} else {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Unknown key: "+key+" ["+parser.getPosition()+"]" );
}
}
else {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
"invalid string: " + key
+" at ["+parser.getPosition()+"]" );
}
}
else if( ev == JSONParser.OBJECT_END ) {
if( cmd.getId() == null && cmd.getQuery() == null ) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Missing id or query for delete ["+parser.getPosition()+"]" );
}
return cmd;
}
else {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
"Got: "+JSONParser.getEventString( ev )
+" at ["+parser.getPosition()+"]" );
}
}
}
RollbackUpdateCommand parseRollback() throws IOException {
assertNextEvent( JSONParser.OBJECT_START );
assertNextEvent( JSONParser.OBJECT_END );
return new RollbackUpdateCommand(req);
}
void parseCommitOptions(CommitUpdateCommand cmd ) throws IOException
{
assertNextEvent( JSONParser.OBJECT_START );
final Map<String,Object> map = (Map)ObjectBuilder.getVal(parser);
// SolrParams currently expects string values...
SolrParams p = new SolrParams() {
@Override
public String get(String param) {
Object o = map.get(param);
return o == null ? null : o.toString();
}
@Override
public String[] getParams(String param) {
return new String[]{get(param)};
}
@Override
public Iterator<String> getParameterNamesIterator() {
return map.keySet().iterator();
}
};
RequestHandlerUtils.validateCommitParams(p);
p = SolrParams.wrapDefaults(p, req.getParams()); // default to the normal request params for commit options
RequestHandlerUtils.updateCommit(cmd, p);
}
AddUpdateCommand parseAdd() throws IOException
{
AddUpdateCommand cmd = new AddUpdateCommand(req);
cmd.commitWithin = commitWithin;
cmd.overwrite = overwrite;
float boost = 1.0f;
while( true ) {
int ev = parser.nextEvent();
if( ev == JSONParser.STRING ) {
if( parser.wasKey() ) {
String key = parser.getString();
if( "doc".equals( key ) ) {
if( cmd.solrDoc != null ) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "multiple docs in same add command" );
}
ev = assertNextEvent( JSONParser.OBJECT_START );
cmd.solrDoc = parseDoc( ev );
}
else if( UpdateRequestHandler.OVERWRITE.equals( key ) ) {
cmd.overwrite = parser.getBoolean(); // reads next boolean
}
else if( UpdateRequestHandler.COMMIT_WITHIN.equals( key ) ) {
cmd.commitWithin = (int)parser.getLong();
}
else if( "boost".equals( key ) ) {
boost = Float.parseFloat( parser.getNumberChars().toString() );
}
else {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Unknown key: "+key+" ["+parser.getPosition()+"]" );
}
}
else {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
"Should be a key "
+" at ["+parser.getPosition()+"]" );
}
}
else if( ev == JSONParser.OBJECT_END ) {
if( cmd.solrDoc == null ) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,"missing solr document. "+parser.getPosition() );
}
cmd.solrDoc.setDocumentBoost( boost );
return cmd;
}
else {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
"Got: "+JSONParser.getEventString( ev )
+" at ["+parser.getPosition()+"]" );
}
}
}
void handleAdds() throws IOException
{
while( true ) {
AddUpdateCommand cmd = new AddUpdateCommand(req);
cmd.commitWithin = commitWithin;
cmd.overwrite = overwrite;
int ev = parser.nextEvent();
if (ev == JSONParser.ARRAY_END) break;
assertEvent(ev, JSONParser.OBJECT_START);
cmd.solrDoc = parseDoc(ev);
processor.processAdd(cmd);
}
}
int assertNextEvent(int expected ) throws IOException
{
int got = parser.nextEvent();
assertEvent(got, expected);
return got;
}
void assertEvent(int ev, int expected) {
if( ev != expected ) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
"Expected: "+JSONParser.getEventString( expected )
+" but got "+JSONParser.getEventString( ev )
+" at ["+parser.getPosition()+"]" );
}
}
private SolrInputDocument parseDoc(int ev) throws IOException {
assert ev == JSONParser.OBJECT_START;
SolrInputDocument sdoc = new SolrInputDocument();
for (;;) {
SolrInputField sif = parseField();
if (sif == null) return sdoc;
SolrInputField prev = sdoc.put(sif.getName(), sif);
if (prev != null) {
// blech - repeated keys
sif.addValue(prev.getValue(), prev.getBoost());
}
}
}
private SolrInputField parseField() throws IOException {
int ev = parser.nextEvent();
if (ev == JSONParser.OBJECT_END) {
return null;
}
String fieldName = parser.getString();
SolrInputField sif = new SolrInputField(fieldName);
parseFieldValue(sif);
return sif;
}
private void parseFieldValue(SolrInputField sif) throws IOException {
int ev = parser.nextEvent();
if (ev == JSONParser.OBJECT_START) {
parseExtendedFieldValue(sif, ev);
} else {
Object val = parseNormalFieldValue(ev);
sif.setValue(val, 1.0f);
}
}
private void parseExtendedFieldValue(SolrInputField sif, int ev) throws IOException {
assert ev == JSONParser.OBJECT_START;
float boost = 1.0f;
Object normalFieldValue = null;
Map<String, Object> extendedInfo = null;
for (;;) {
ev = parser.nextEvent();
switch (ev) {
case JSONParser.STRING:
String label = parser.getString();
if ("boost".equals(label)) {
ev = parser.nextEvent();
if( ev != JSONParser.NUMBER &&
ev != JSONParser.LONG &&
ev != JSONParser.BIGNUMBER ) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "boost should have number! "+JSONParser.getEventString(ev) );
}
boost = (float)parser.getDouble();
} else if ("value".equals(label)) {
normalFieldValue = parseNormalFieldValue(parser.nextEvent());
} else {
// If we encounter other unknown map keys, then use a map
if (extendedInfo == null) {
extendedInfo = new HashMap<String, Object>(2);
}
// for now, the only extended info will be field values
// we could either store this as an Object or a SolrInputField
Object val = parseNormalFieldValue(parser.nextEvent());
extendedInfo.put(label, val);
}
break;
case JSONParser.OBJECT_END:
if (extendedInfo != null) {
if (normalFieldValue != null) {
extendedInfo.put("value",normalFieldValue);
}
sif.setValue(extendedInfo, boost);
} else {
sif.setValue(normalFieldValue, boost);
}
return;
default:
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Error parsing JSON extended field value. Unexpected "+JSONParser.getEventString(ev) );
}
}
}
private Object parseNormalFieldValue(int ev) throws IOException {
if (ev == JSONParser.ARRAY_START) {
List<Object> val = parseArrayFieldValue(ev);
return val;
} else {
Object val = parseSingleFieldValue(ev);
return val;
}
}
private Object parseSingleFieldValue(int ev) throws IOException {
switch (ev) {
case JSONParser.STRING:
return parser.getString();
case JSONParser.LONG:
case JSONParser.NUMBER:
case JSONParser.BIGNUMBER:
return parser.getNumberChars().toString();
case JSONParser.BOOLEAN:
return Boolean.toString(parser.getBoolean()); // for legacy reasons, single values s are expected to be strings
case JSONParser.NULL:
parser.getNull();
return null;
case JSONParser.ARRAY_START:
return parseArrayFieldValue(ev);
default:
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Error parsing JSON field value. Unexpected "+JSONParser.getEventString(ev) );
}
}
private List<Object> parseArrayFieldValue(int ev) throws IOException {
assert ev == JSONParser.ARRAY_START;
ArrayList lst = new ArrayList(2);
for (;;) {
ev = parser.nextEvent();
if (ev == JSONParser.ARRAY_END) {
return lst;
}
Object val = parseSingleFieldValue(ev);
lst.add(val);
}
}
}
}

View File

@ -1,4 +1,4 @@
package org.apache.solr.handler; package org.apache.solr.handler.loader;
/** /**
* Licensed to the Apache Software Foundation (ASF) under one or more * Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with * contributor license agreements. See the NOTICE file distributed with
@ -22,23 +22,38 @@ import org.apache.solr.update.AddUpdateCommand;
import org.apache.solr.update.CommitUpdateCommand; import org.apache.solr.update.CommitUpdateCommand;
import org.apache.solr.update.RollbackUpdateCommand; import org.apache.solr.update.RollbackUpdateCommand;
import org.apache.solr.update.DeleteUpdateCommand; import org.apache.solr.update.DeleteUpdateCommand;
import org.apache.solr.util.xslt.TransformerProvider;
import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse; import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.common.util.ContentStream; import org.apache.solr.common.util.ContentStream;
import org.apache.solr.common.util.ContentStreamBase; import org.apache.solr.common.util.ContentStreamBase;
import org.apache.solr.common.util.StrUtils; import org.apache.solr.common.util.StrUtils;
import org.apache.solr.common.util.XMLErrorLogger;
import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.params.UpdateParams; import org.apache.solr.common.params.UpdateParams;
import org.apache.solr.core.SolrConfig;
import org.apache.solr.handler.RequestHandlerUtils;
import org.apache.solr.handler.UpdateRequestHandler;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xml.sax.InputSource;
import javax.xml.stream.XMLStreamReader; import javax.xml.stream.XMLStreamReader;
import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamException;
import javax.xml.stream.FactoryConfigurationError; import javax.xml.stream.FactoryConfigurationError;
import javax.xml.stream.XMLStreamConstants; import javax.xml.stream.XMLStreamConstants;
import javax.xml.stream.XMLInputFactory; import javax.xml.stream.XMLInputFactory;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerConfigurationException; import javax.xml.transform.TransformerConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.transform.dom.DOMResult;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.sax.SAXSource;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.io.InputStream; import java.io.InputStream;
import java.io.IOException; import java.io.IOException;
@ -46,48 +61,129 @@ import java.util.HashMap;
import java.util.Map; import java.util.Map;
/** public class XMLLoader extends ContentStreamLoader {
* public static Logger log = LoggerFactory.getLogger(XMLLoader.class);
* static final XMLErrorLogger xmllog = new XMLErrorLogger(log);
**/
class XMLLoader extends ContentStreamLoader { public static final String CONTEXT_TRANSFORMER_KEY = "xsltupdater.transformer";
protected UpdateRequestProcessor processor;
protected XMLInputFactory inputFactory;
public XMLLoader(UpdateRequestProcessor processor, XMLInputFactory inputFactory) { private static final String XSLT_CACHE_PARAM = "xsltCacheLifetimeSeconds";
this.processor = processor;
this.inputFactory = inputFactory; public static final int XSLT_CACHE_DEFAULT = 60;
int xsltCacheLifetimeSeconds;
XMLInputFactory inputFactory;
@Override
public XMLLoader init(SolrParams args) {
inputFactory = XMLInputFactory.newInstance();
try {
// The java 1.6 bundled stax parser (sjsxp) does not currently have a thread-safe
// XMLInputFactory, as that implementation tries to cache and reuse the
// XMLStreamReader. Setting the parser-specific "reuse-instance" property to false
// prevents this.
// All other known open-source stax parsers (and the bea ref impl)
// have thread-safe factories.
inputFactory.setProperty("reuse-instance", Boolean.FALSE);
}
catch (IllegalArgumentException ex) {
// Other implementations will likely throw this exception since "reuse-instance"
// isimplementation specific.
log.debug("Unable to set the 'reuse-instance' property for the input chain: " + inputFactory);
}
inputFactory.setXMLReporter(xmllog);
xsltCacheLifetimeSeconds = XSLT_CACHE_DEFAULT;
if(args != null) {
xsltCacheLifetimeSeconds = args.getInt(XSLT_CACHE_PARAM,XSLT_CACHE_DEFAULT);
log.info("xsltCacheLifetimeSeconds=" + xsltCacheLifetimeSeconds);
}
return this;
}
public String getDefaultWT() {
return "xml";
} }
@Override @Override
public void load(SolrQueryRequest req, SolrQueryResponse rsp, ContentStream stream) throws Exception { public void load(SolrQueryRequest req, SolrQueryResponse rsp, ContentStream stream, UpdateRequestProcessor processor) throws Exception {
errHeader = "XMLLoader: " + stream.getSourceInfo(); final String charset = ContentStreamBase.getCharsetFromContentType(stream.getContentType());
InputStream is = null; InputStream is = null;
XMLStreamReader parser = null; XMLStreamReader parser = null;
try {
is = stream.getStream(); String tr = req.getParams().get(CommonParams.TR,null);
final String charset = ContentStreamBase.getCharsetFromContentType(stream.getContentType()); if(tr!=null) {
if (XmlUpdateRequestHandler.log.isTraceEnabled()) { Transformer t = getTransformer(tr,req);
final byte[] body = IOUtils.toByteArray(is); final DOMResult result = new DOMResult();
// TODO: The charset may be wrong, as the real charset is later
// determined by the XML parser, the content-type is only used as a hint! // first step: read XML and build DOM using Transformer (this is no overhead, as XSL always produces
XmlUpdateRequestHandler.log.trace("body", new String(body, (charset == null) ? // an internal result DOM tree, we just access it directly as input for StAX):
ContentStreamBase.DEFAULT_CHARSET : charset)); try {
is = stream.getStream();
final InputSource isrc = new InputSource(is);
isrc.setEncoding(charset);
final SAXSource source = new SAXSource(isrc);
t.transform(source, result);
} catch(TransformerException te) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, te.getMessage(), te);
} finally {
IOUtils.closeQuietly(is);
}
// second step feed the intermediate DOM tree into StAX parser:
try {
parser = inputFactory.createXMLStreamReader(new DOMSource(result.getNode()));
this.processUpdate(req, processor, parser);
} catch (XMLStreamException e) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e.getMessage(), e);
} finally {
if (parser != null) parser.close();
}
}
// Normal XML Loader
else {
try {
is = stream.getStream();
if (UpdateRequestHandler.log.isTraceEnabled()) {
final byte[] body = IOUtils.toByteArray(is);
// TODO: The charset may be wrong, as the real charset is later
// determined by the XML parser, the content-type is only used as a hint!
UpdateRequestHandler.log.trace("body", new String(body, (charset == null) ?
ContentStreamBase.DEFAULT_CHARSET : charset));
IOUtils.closeQuietly(is);
is = new ByteArrayInputStream(body);
}
parser = (charset == null) ?
inputFactory.createXMLStreamReader(is) : inputFactory.createXMLStreamReader(is, charset);
this.processUpdate(req, processor, parser);
} catch (XMLStreamException e) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e.getMessage(), e);
} finally {
if (parser != null) parser.close();
IOUtils.closeQuietly(is); IOUtils.closeQuietly(is);
is = new ByteArrayInputStream(body);
} }
parser = (charset == null) ?
inputFactory.createXMLStreamReader(is) : inputFactory.createXMLStreamReader(is, charset);
this.processUpdate(req, processor, parser);
} catch (XMLStreamException e) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e.getMessage(), e);
} finally {
if (parser != null) parser.close();
IOUtils.closeQuietly(is);
} }
} }
/** Get Transformer from request context, or from TransformerProvider.
* This allows either getContentType(...) or write(...) to instantiate the Transformer,
* depending on which one is called first, then the other one reuses the same Transformer
*/
Transformer getTransformer(String xslt, SolrQueryRequest request) throws IOException {
// not the cleanest way to achieve this
// no need to synchronize access to context, right?
// Nothing else happens with it at the same time
final Map<Object,Object> ctx = request.getContext();
Transformer result = (Transformer)ctx.get(CONTEXT_TRANSFORMER_KEY);
if(result==null) {
SolrConfig solrConfig = request.getCore().getSolrConfig();
result = TransformerProvider.instance.getTransformer(solrConfig, xslt, xsltCacheLifetimeSeconds);
result.setErrorListener(xmllog);
ctx.put(CONTEXT_TRANSFORMER_KEY,result);
}
return result;
}
/** /**
@ -108,8 +204,8 @@ class XMLLoader extends ContentStreamLoader {
case XMLStreamConstants.START_ELEMENT: case XMLStreamConstants.START_ELEMENT:
String currTag = parser.getLocalName(); String currTag = parser.getLocalName();
if (currTag.equals(XmlUpdateRequestHandler.ADD)) { if (currTag.equals(UpdateRequestHandler.ADD)) {
XmlUpdateRequestHandler.log.trace("SolrCore.update(add)"); log.trace("SolrCore.update(add)");
addCmd = new AddUpdateCommand(req); addCmd = new AddUpdateCommand(req);
@ -120,28 +216,28 @@ class XMLLoader extends ContentStreamLoader {
for (int i = 0; i < parser.getAttributeCount(); i++) { for (int i = 0; i < parser.getAttributeCount(); i++) {
String attrName = parser.getAttributeLocalName(i); String attrName = parser.getAttributeLocalName(i);
String attrVal = parser.getAttributeValue(i); String attrVal = parser.getAttributeValue(i);
if (XmlUpdateRequestHandler.OVERWRITE.equals(attrName)) { if (UpdateRequestHandler.OVERWRITE.equals(attrName)) {
addCmd.overwrite = StrUtils.parseBoolean(attrVal); addCmd.overwrite = StrUtils.parseBoolean(attrVal);
} else if (XmlUpdateRequestHandler.COMMIT_WITHIN.equals(attrName)) { } else if (UpdateRequestHandler.COMMIT_WITHIN.equals(attrName)) {
addCmd.commitWithin = Integer.parseInt(attrVal); addCmd.commitWithin = Integer.parseInt(attrVal);
} else { } else {
XmlUpdateRequestHandler.log.warn("Unknown attribute id in add:" + attrName); log.warn("Unknown attribute id in add:" + attrName);
} }
} }
} else if ("doc".equals(currTag)) { } else if ("doc".equals(currTag)) {
if(addCmd != null) { if(addCmd != null) {
XmlUpdateRequestHandler.log.trace("adding doc..."); log.trace("adding doc...");
addCmd.clear(); addCmd.clear();
addCmd.solrDoc = readDoc(parser); addCmd.solrDoc = readDoc(parser);
processor.processAdd(addCmd); processor.processAdd(addCmd);
} else { } else {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Unexpected <doc> tag without an <add> tag surrounding it."); throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Unexpected <doc> tag without an <add> tag surrounding it.");
} }
} else if (XmlUpdateRequestHandler.COMMIT.equals(currTag) || XmlUpdateRequestHandler.OPTIMIZE.equals(currTag)) { } else if (UpdateRequestHandler.COMMIT.equals(currTag) || UpdateRequestHandler.OPTIMIZE.equals(currTag)) {
XmlUpdateRequestHandler.log.trace("parsing " + currTag); log.trace("parsing " + currTag);
CommitUpdateCommand cmd = new CommitUpdateCommand(req, XmlUpdateRequestHandler.OPTIMIZE.equals(currTag)); CommitUpdateCommand cmd = new CommitUpdateCommand(req, UpdateRequestHandler.OPTIMIZE.equals(currTag));
ModifiableSolrParams mp = new ModifiableSolrParams(); ModifiableSolrParams mp = new ModifiableSolrParams();
for (int i = 0; i < parser.getAttributeCount(); i++) { for (int i = 0; i < parser.getAttributeCount(); i++) {
@ -156,15 +252,15 @@ class XMLLoader extends ContentStreamLoader {
processor.processCommit(cmd); processor.processCommit(cmd);
} // end commit } // end commit
else if (XmlUpdateRequestHandler.ROLLBACK.equals(currTag)) { else if (UpdateRequestHandler.ROLLBACK.equals(currTag)) {
XmlUpdateRequestHandler.log.trace("parsing " + currTag); log.trace("parsing " + currTag);
RollbackUpdateCommand cmd = new RollbackUpdateCommand(req); RollbackUpdateCommand cmd = new RollbackUpdateCommand(req);
processor.processRollback(cmd); processor.processRollback(cmd);
} // end rollback } // end rollback
else if (XmlUpdateRequestHandler.DELETE.equals(currTag)) { else if (UpdateRequestHandler.DELETE.equals(currTag)) {
XmlUpdateRequestHandler.log.trace("parsing delete"); log.trace("parsing delete");
processDelete(req, processor, parser); processDelete(req, processor, parser);
} // end delete } // end delete
break; break;
@ -190,10 +286,10 @@ class XMLLoader extends ContentStreamLoader {
// deprecated // deprecated
} else if ("fromCommitted".equals(attrName)) { } else if ("fromCommitted".equals(attrName)) {
// deprecated // deprecated
} else if (XmlUpdateRequestHandler.COMMIT_WITHIN.equals(attrName)) { } else if (UpdateRequestHandler.COMMIT_WITHIN.equals(attrName)) {
deleteCmd.commitWithin = Integer.parseInt(attrVal); deleteCmd.commitWithin = Integer.parseInt(attrVal);
} else { } else {
XmlUpdateRequestHandler.log.warn("unexpected attribute delete/@" + attrName); log.warn("unexpected attribute delete/@" + attrName);
} }
} }
@ -204,7 +300,7 @@ class XMLLoader extends ContentStreamLoader {
case XMLStreamConstants.START_ELEMENT: case XMLStreamConstants.START_ELEMENT:
String mode = parser.getLocalName(); String mode = parser.getLocalName();
if (!("id".equals(mode) || "query".equals(mode))) { if (!("id".equals(mode) || "query".equals(mode))) {
XmlUpdateRequestHandler.log.warn("unexpected XML tag /delete/" + mode); log.warn("unexpected XML tag /delete/" + mode);
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
"unexpected XML tag /delete/" + mode); "unexpected XML tag /delete/" + mode);
} }
@ -214,7 +310,7 @@ class XMLLoader extends ContentStreamLoader {
for (int i = 0; i < parser.getAttributeCount(); i++) { for (int i = 0; i < parser.getAttributeCount(); i++) {
String attrName = parser.getAttributeLocalName(i); String attrName = parser.getAttributeLocalName(i);
String attrVal = parser.getAttributeValue(i); String attrVal = parser.getAttributeValue(i);
if (XmlUpdateRequestHandler.VERSION.equals(attrName)) { if (UpdateRequestHandler.VERSION.equals(attrName)) {
deleteCmd.setVersion(Long.parseLong(attrVal)); deleteCmd.setVersion(Long.parseLong(attrVal));
} }
} }
@ -230,7 +326,7 @@ class XMLLoader extends ContentStreamLoader {
} else if ("delete".equals(currTag)) { } else if ("delete".equals(currTag)) {
return; return;
} else { } else {
XmlUpdateRequestHandler.log.warn("unexpected XML tag /delete/" + currTag); log.warn("unexpected XML tag /delete/" + currTag);
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
"unexpected XML tag /delete/" + currTag); "unexpected XML tag /delete/" + currTag);
} }
@ -254,7 +350,7 @@ class XMLLoader extends ContentStreamLoader {
* *
* @since solr 1.3 * @since solr 1.3
*/ */
SolrInputDocument readDoc(XMLStreamReader parser) throws XMLStreamException { public SolrInputDocument readDoc(XMLStreamReader parser) throws XMLStreamException {
SolrInputDocument doc = new SolrInputDocument(); SolrInputDocument doc = new SolrInputDocument();
String attrName = ""; String attrName = "";
@ -263,7 +359,7 @@ class XMLLoader extends ContentStreamLoader {
if ("boost".equals(attrName)) { if ("boost".equals(attrName)) {
doc.setDocumentBoost(Float.parseFloat(parser.getAttributeValue(i))); doc.setDocumentBoost(Float.parseFloat(parser.getAttributeValue(i)));
} else { } else {
XmlUpdateRequestHandler.log.warn("Unknown attribute doc/@" + attrName); log.warn("Unknown attribute doc/@" + attrName);
} }
} }
@ -302,7 +398,7 @@ class XMLLoader extends ContentStreamLoader {
text.setLength(0); text.setLength(0);
String localName = parser.getLocalName(); String localName = parser.getLocalName();
if (!"field".equals(localName)) { if (!"field".equals(localName)) {
XmlUpdateRequestHandler.log.warn("unexpected XML tag doc/" + localName); log.warn("unexpected XML tag doc/" + localName);
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
"unexpected XML tag doc/" + localName); "unexpected XML tag doc/" + localName);
} }
@ -321,12 +417,11 @@ class XMLLoader extends ContentStreamLoader {
} else if ("update".equals(attrName)) { } else if ("update".equals(attrName)) {
update = attrVal; update = attrVal;
} else { } else {
XmlUpdateRequestHandler.log.warn("Unknown attribute doc/field/@" + attrName); log.warn("Unknown attribute doc/field/@" + attrName);
} }
} }
break; break;
} }
} }
} }
} }

View File

@ -33,6 +33,7 @@ import javax.xml.transform.stream.StreamResult;
import javax.xml.transform.stream.StreamSource; import javax.xml.transform.stream.StreamSource;
import org.apache.solr.core.SolrConfig; import org.apache.solr.core.SolrConfig;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.XMLErrorLogger; import org.apache.solr.common.util.XMLErrorLogger;
@ -46,7 +47,6 @@ import org.apache.solr.util.xslt.TransformerProvider;
public class XSLTResponseWriter implements QueryResponseWriter { public class XSLTResponseWriter implements QueryResponseWriter {
public static final String DEFAULT_CONTENT_TYPE = "application/xml"; public static final String DEFAULT_CONTENT_TYPE = "application/xml";
public static final String TRANSFORM_PARAM = "tr";
public static final String CONTEXT_TRANSFORMER_KEY = "xsltwriter.transformer"; public static final String CONTEXT_TRANSFORMER_KEY = "xsltwriter.transformer";
private Integer xsltCacheLifetimeSeconds = null; private Integer xsltCacheLifetimeSeconds = null;
@ -116,9 +116,9 @@ public class XSLTResponseWriter implements QueryResponseWriter {
* depending on which one is called first, then the other one reuses the same Transformer * depending on which one is called first, then the other one reuses the same Transformer
*/ */
protected Transformer getTransformer(SolrQueryRequest request) throws IOException { protected Transformer getTransformer(SolrQueryRequest request) throws IOException {
final String xslt = request.getParams().get(TRANSFORM_PARAM,null); final String xslt = request.getParams().get(CommonParams.TR,null);
if(xslt==null) { if(xslt==null) {
throw new IOException("'" + TRANSFORM_PARAM + "' request parameter is required to use the XSLTResponseWriter"); throw new IOException("'" + CommonParams.TR + "' request parameter is required to use the XSLTResponseWriter");
} }
// not the cleanest way to achieve this // not the cleanest way to achieve this
SolrConfig solrConfig = request.getCore().getSolrConfig(); SolrConfig solrConfig = request.getCore().getSolrConfig();

View File

@ -123,7 +123,7 @@
</query> </query>
<requestHandler name="/update" class="solr.XmlUpdateRequestHandler" /> <requestHandler name="/update" class="solr.UpdateRequestHandler" />
<!-- test elevation --> <!-- test elevation -->

View File

@ -55,7 +55,7 @@
<requestHandler name="lazy" class="solr.StandardRequestHandler" startup="lazy"> <requestHandler name="lazy" class="solr.StandardRequestHandler" startup="lazy">
</requestHandler> </requestHandler>
<requestHandler name="/update" class="solr.XmlUpdateRequestHandler"/> <requestHandler name="/update" class="solr.UpdateRequestHandler" />
<!-- enable streaming for testing... --> <!-- enable streaming for testing... -->
<requestDispatcher handleSelect="true"> <requestDispatcher handleSelect="true">

View File

@ -28,7 +28,7 @@
<requestHandler name="standard" class="solr.StandardRequestHandler"/> <requestHandler name="standard" class="solr.StandardRequestHandler"/>
<updateHandler class="solr.DirectUpdateHandler2"/> <updateHandler class="solr.DirectUpdateHandler2"/>
<requestHandler name="/update" class="solr.XmlUpdateRequestHandler" /> <requestHandler name="/update" class="solr.UpdateRequestHandler" />
<requestHandler name="/reloadCache" <requestHandler name="/reloadCache"
class="org.apache.solr.search.function.FileFloatSource$ReloadCacheRequestHandler" /> class="org.apache.solr.search.function.FileFloatSource$ReloadCacheRequestHandler" />

View File

@ -63,7 +63,7 @@
</lst> </lst>
</requestHandler> </requestHandler>
<requestHandler name="/update" class="solr.XmlUpdateRequestHandler"/> <requestHandler name="/update" class="solr.UpdateRequestHandler" />
<!-- enable streaming for testing... --> <!-- enable streaming for testing... -->
<requestDispatcher handleSelect="true"> <requestDispatcher handleSelect="true">

View File

@ -38,7 +38,7 @@
<str name="maxNumberOfBackups">1</str> <str name="maxNumberOfBackups">1</str>
</requestHandler> </requestHandler>
<requestHandler name="/update" class="solr.XmlUpdateRequestHandler"/> <requestHandler name="/update" class="solr.UpdateRequestHandler" />
<!-- enable streaming for testing... --> <!-- enable streaming for testing... -->
<requestDispatcher handleSelect="true"> <requestDispatcher handleSelect="true">

View File

@ -63,7 +63,7 @@
</lst> </lst>
</requestHandler> </requestHandler>
<requestHandler name="/update" class="solr.XmlUpdateRequestHandler"/> <requestHandler name="/update" class="solr.UpdateRequestHandler" />
<!-- enable streaming for testing... --> <!-- enable streaming for testing... -->
<requestDispatcher handleSelect="true"> <requestDispatcher handleSelect="true">

View File

@ -63,7 +63,7 @@
</lst> </lst>
</requestHandler> </requestHandler>
<requestHandler name="/update" class="solr.XmlUpdateRequestHandler"/> <requestHandler name="/update" class="solr.UpdateRequestHandler" />
<!-- enable streaming for testing... --> <!-- enable streaming for testing... -->
<requestDispatcher handleSelect="true"> <requestDispatcher handleSelect="true">

View File

@ -64,7 +64,7 @@
</lst> </lst>
</requestHandler> </requestHandler>
<requestHandler name="/update" class="solr.XmlUpdateRequestHandler"/> <requestHandler name="/update" class="solr.UpdateRequestHandler" />
<!-- enable streaming for testing... --> <!-- enable streaming for testing... -->
<requestDispatcher handleSelect="true"> <requestDispatcher handleSelect="true">

View File

@ -47,7 +47,7 @@
<requestHandler name="lazy" class="solr.StandardRequestHandler" startup="lazy"> <requestHandler name="lazy" class="solr.StandardRequestHandler" startup="lazy">
</requestHandler> </requestHandler>
<requestHandler name="/update" class="solr.XmlUpdateRequestHandler"/> <requestHandler name="/update" class="solr.UpdateRequestHandler" />
<requestHandler name="/replication" class="solr.ReplicationHandler"> <requestHandler name="/replication" class="solr.ReplicationHandler">
<lst name="master"> <lst name="master">

View File

@ -47,7 +47,7 @@
<requestHandler name="lazy" class="solr.StandardRequestHandler" startup="lazy"> <requestHandler name="lazy" class="solr.StandardRequestHandler" startup="lazy">
</requestHandler> </requestHandler>
<requestHandler name="/update" class="solr.XmlUpdateRequestHandler"/> <requestHandler name="/update" class="solr.UpdateRequestHandler" />
<requestHandler name="/replication" class="solr.ReplicationHandler"> <requestHandler name="/replication" class="solr.ReplicationHandler">
<lst name="slave"> <lst name="slave">

View File

@ -47,10 +47,7 @@
<requestHandler name="lazy" class="solr.StandardRequestHandler" startup="lazy"> <requestHandler name="lazy" class="solr.StandardRequestHandler" startup="lazy">
</requestHandler> </requestHandler>
<requestHandler name="/update" class="solr.XmlUpdateRequestHandler"/> <requestHandler name="/update" class="solr.UpdateRequestHandler" />
<requestHandler name="/update/javabin" class="solr.BinaryUpdateRequestHandler"/>
<requestHandler name="/replication" class="solr.ReplicationHandler"> <requestHandler name="/replication" class="solr.ReplicationHandler">

View File

@ -53,7 +53,7 @@
<requestHandler name="lazy" class="solr.StandardRequestHandler" startup="lazy"> <requestHandler name="lazy" class="solr.StandardRequestHandler" startup="lazy">
</requestHandler> </requestHandler>
<requestHandler name="/update" class="solr.XmlUpdateRequestHandler"/> <requestHandler name="/update" class="solr.UpdateRequestHandler" />
<!-- enable streaming for testing... --> <!-- enable streaming for testing... -->
<requestDispatcher handleSelect="true"> <requestDispatcher handleSelect="true">

View File

@ -31,11 +31,7 @@
</lst> </lst>
</requestHandler> </requestHandler>
<requestHandler name="/update" class="solr.XmlUpdateRequestHandler"> <requestHandler name="/update" class="solr.UpdateRequestHandler" />
</requestHandler>
<requestHandler name="/update/json" class="solr.JsonUpdateRequestHandler">
</requestHandler>
<updateHandler class="solr.DirectUpdateHandler2"> <updateHandler class="solr.DirectUpdateHandler2">
<updateLog> <updateLog>

View File

@ -260,11 +260,7 @@
</lst> </lst>
</requestHandler> </requestHandler>
<requestHandler name="/update" class="solr.XmlUpdateRequestHandler" /> <requestHandler name="/update" class="solr.UpdateRequestHandler" />
<requestHandler name="/update/csv" class="solr.CSVRequestHandler" startup="lazy">
<bool name="httpCaching">false</bool>
</requestHandler>
<requestHandler name="/update/json" class="solr.JsonUpdateRequestHandler" />
<searchComponent name="spellcheck" class="org.apache.solr.handler.component.SpellCheckComponent"> <searchComponent name="spellcheck" class="org.apache.solr.handler.component.SpellCheckComponent">

View File

@ -70,10 +70,6 @@
<requestHandler name="standard" class="solr.SearchHandler" default="true"> <requestHandler name="standard" class="solr.SearchHandler" default="true">
</requestHandler> </requestHandler>
<requestHandler name="/update" class="solr.XmlUpdateRequestHandler" /> <requestHandler name="/update" class="solr.UpdateRequestHandler" />
<requestHandler name="/update/javabin" class="solr.BinaryUpdateRequestHandler" />
<!-- CSV update handler, loaded on demand -->
<requestHandler name="/update/csv" class="solr.CSVRequestHandler" startup="lazy" />
</config> </config>

View File

@ -49,7 +49,7 @@
<str name="fl">implicit</str> <str name="fl">implicit</str>
</lst> </lst>
</requestHandler> </requestHandler>
<requestHandler name="/update" class="solr.XmlUpdateRequestHandler" /> <requestHandler name="/update" class="solr.UpdateRequestHandler" />
<queryResponseWriter name="standard" class="solr.XMLResponseWriter"/> <queryResponseWriter name="standard" class="solr.XMLResponseWriter"/>

View File

@ -1236,9 +1236,9 @@ public class ConvertedLegacyTest extends SolrTestCaseJ4 {
// can mess up parsing (and it has in the past) // can mess up parsing (and it has in the past)
assertU(" <delete> <id>44</id> </delete>"); assertU("<delete> <id>44</id> </delete>");
assertU(" <add> <doc> <field name=\"id\">44</field> <field name=\"shouldbestored\">hi</field> </doc> </add>"); assertU("<add> <doc> <field name=\"id\">44</field> <field name=\"shouldbestored\">hi</field> </doc> </add>");
assertU(" <commit />"); assertU("<commit />");
// test adding multiple docs per add command // test adding multiple docs per add command

View File

@ -337,8 +337,8 @@ public class BasicDistributedZkTest extends AbstractDistributedZkTestCase {
throws MalformedURLException, SolrServerException, IOException { throws MalformedURLException, SolrServerException, IOException {
long startCommits = getNumCommits((HttpSolrServer) clients.get(0)); long startCommits = getNumCommits((HttpSolrServer) clients.get(0));
ContentStreamUpdateRequest up = new ContentStreamUpdateRequest("/update/csv"); ContentStreamUpdateRequest up = new ContentStreamUpdateRequest("/update");
up.addFile(getFile("books_numeric_ids.csv")); up.addFile(getFile("books_numeric_ids.csv"), "application/csv");
up.setCommitWithin(900000); up.setCommitWithin(900000);
up.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true); up.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
NamedList<Object> result = clients.get(0).request(up); NamedList<Object> result = clients.get(0).request(up);

View File

@ -77,14 +77,14 @@ public class RequestHandlersTest extends SolrTestCaseJ4 {
public void testPathNormalization() public void testPathNormalization()
{ {
SolrCore core = h.getCore(); SolrCore core = h.getCore();
SolrRequestHandler h1 = core.getRequestHandler("/update/csv" ); SolrRequestHandler h1 = core.getRequestHandler("/update" );
assertNotNull( h1 ); assertNotNull( h1 );
SolrRequestHandler h2 = core.getRequestHandler("/update/csv/" ); SolrRequestHandler h2 = core.getRequestHandler("/update/" );
assertNotNull( h2 ); assertNotNull( h2 );
assertEquals( h1, h2 ); // the same object assertEquals( h1, h2 ); // the same object
assertNull( core.getRequestHandler("/update/csv/asdgadsgas" ) ); // prefix assertNull( core.getRequestHandler("/update/asdgadsgas" ) ); // prefix
} }
} }

View File

@ -21,6 +21,8 @@ import org.apache.solr.client.solrj.impl.BinaryRequestWriter;
import org.apache.solr.client.solrj.request.UpdateRequest; import org.apache.solr.client.solrj.request.UpdateRequest;
import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.params.UpdateParams; import org.apache.solr.common.params.UpdateParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.handler.loader.ContentStreamLoader;
import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse; import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.update.AddUpdateCommand; import org.apache.solr.update.AddUpdateCommand;
@ -47,11 +49,12 @@ public class BinaryUpdateRequestHandlerTest extends SolrTestCaseJ4 {
BinaryRequestWriter brw = new BinaryRequestWriter(); BinaryRequestWriter brw = new BinaryRequestWriter();
BufferingRequestProcessor p = new BufferingRequestProcessor(null); BufferingRequestProcessor p = new BufferingRequestProcessor(null);
SolrQueryResponse rsp = new SolrQueryResponse(); SolrQueryResponse rsp = new SolrQueryResponse();
BinaryUpdateRequestHandler handler = new BinaryUpdateRequestHandler(); UpdateRequestHandler handler = new UpdateRequestHandler();
handler.init(new NamedList());
SolrQueryRequest req = req(); SolrQueryRequest req = req();
ContentStreamLoader csl = handler.newLoader(req, p); ContentStreamLoader csl = handler.newLoader(req, p);
csl.load(req, rsp, brw.getContentStream(ureq)); csl.load(req, rsp, brw.getContentStream(ureq), p);
AddUpdateCommand add = p.addCommands.get(0); AddUpdateCommand add = p.addCommands.get(0);
System.out.println(add.solrDoc); System.out.println(add.solrDoc);

View File

@ -19,6 +19,7 @@ package org.apache.solr.handler;
import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.common.util.ContentStreamBase; import org.apache.solr.common.util.ContentStreamBase;
import org.apache.solr.handler.loader.CSVLoader;
import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse; import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.update.AddUpdateCommand; import org.apache.solr.update.AddUpdateCommand;
@ -36,16 +37,14 @@ public class CSVRequestHandlerTest extends SolrTestCaseJ4 {
@Test @Test
public void testCommitWithin() throws Exception { public void testCommitWithin() throws Exception {
CSVRequestHandler handler = new CSVRequestHandler();
String csvString = "id;name\n123;hello"; String csvString = "id;name\n123;hello";
SolrQueryRequest req = req("separator", ";", SolrQueryRequest req = req("separator", ";",
"commitWithin", "200"); "commitWithin", "200");
SolrQueryResponse rsp = new SolrQueryResponse(); SolrQueryResponse rsp = new SolrQueryResponse();
BufferingRequestProcessor p = new BufferingRequestProcessor(null); BufferingRequestProcessor p = new BufferingRequestProcessor(null);
CSVLoader loader = (CSVLoader) handler.newLoader(req, p); CSVLoader loader = new CSVLoader();
loader.load(req, rsp, new ContentStreamBase.StringStream.StringStream(csvString)); loader.load(req, rsp, new ContentStreamBase.StringStream.StringStream(csvString), p);
AddUpdateCommand add = p.addCommands.get(0); AddUpdateCommand add = p.addCommands.get(0);
assertEquals(200, add.commitWithin); assertEquals(200, add.commitWithin);

View File

@ -21,6 +21,7 @@ import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.SolrInputField; import org.apache.solr.common.SolrInputField;
import org.apache.solr.common.util.ContentStreamBase; import org.apache.solr.common.util.ContentStreamBase;
import org.apache.solr.handler.loader.JsonLoader;
import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse; import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.update.AddUpdateCommand; import org.apache.solr.update.AddUpdateCommand;
@ -84,8 +85,8 @@ public class JsonLoaderTest extends SolrTestCaseJ4 {
SolrQueryRequest req = req(); SolrQueryRequest req = req();
SolrQueryResponse rsp = new SolrQueryResponse(); SolrQueryResponse rsp = new SolrQueryResponse();
BufferingRequestProcessor p = new BufferingRequestProcessor(null); BufferingRequestProcessor p = new BufferingRequestProcessor(null);
JsonLoader loader = new JsonLoader( req, p ); JsonLoader loader = new JsonLoader();
loader.load(req, rsp, new ContentStreamBase.StringStream(input)); loader.load(req, rsp, new ContentStreamBase.StringStream(input), p);
assertEquals( 2, p.addCommands.size() ); assertEquals( 2, p.addCommands.size() );
@ -153,8 +154,8 @@ public class JsonLoaderTest extends SolrTestCaseJ4 {
SolrQueryRequest req = req("commitWithin","100", "overwrite","false"); SolrQueryRequest req = req("commitWithin","100", "overwrite","false");
SolrQueryResponse rsp = new SolrQueryResponse(); SolrQueryResponse rsp = new SolrQueryResponse();
BufferingRequestProcessor p = new BufferingRequestProcessor(null); BufferingRequestProcessor p = new BufferingRequestProcessor(null);
JsonLoader loader = new JsonLoader( req, p ); JsonLoader loader = new JsonLoader();
loader.load(req, rsp, new ContentStreamBase.StringStream(str)); loader.load(req, rsp, new ContentStreamBase.StringStream(str), p);
assertEquals( 2, p.addCommands.size() ); assertEquals( 2, p.addCommands.size() );
@ -181,8 +182,8 @@ public class JsonLoaderTest extends SolrTestCaseJ4 {
SolrQueryRequest req = req(); SolrQueryRequest req = req();
SolrQueryResponse rsp = new SolrQueryResponse(); SolrQueryResponse rsp = new SolrQueryResponse();
BufferingRequestProcessor p = new BufferingRequestProcessor(null); BufferingRequestProcessor p = new BufferingRequestProcessor(null);
JsonLoader loader = new JsonLoader( req, p ); JsonLoader loader = new JsonLoader();
loader.load(req, rsp, new ContentStreamBase.StringStream(str)); loader.load(req, rsp, new ContentStreamBase.StringStream(str), p);
assertEquals( 2, p.addCommands.size() ); assertEquals( 2, p.addCommands.size() );
@ -208,8 +209,8 @@ public class JsonLoaderTest extends SolrTestCaseJ4 {
SolrQueryRequest req = req(); SolrQueryRequest req = req();
SolrQueryResponse rsp = new SolrQueryResponse(); SolrQueryResponse rsp = new SolrQueryResponse();
BufferingRequestProcessor p = new BufferingRequestProcessor(null); BufferingRequestProcessor p = new BufferingRequestProcessor(null);
JsonLoader loader = new JsonLoader( req, p ); JsonLoader loader = new JsonLoader();
loader.load(req, rsp, new ContentStreamBase.StringStream(str)); loader.load(req, rsp, new ContentStreamBase.StringStream(str), p);
assertEquals( 1, p.addCommands.size() ); assertEquals( 1, p.addCommands.size() );

View File

@ -88,16 +88,18 @@ public class TestCSVLoader extends SolrTestCaseJ4 {
// TODO: stop using locally defined streams once stream.file and // TODO: stop using locally defined streams once stream.file and
// stream.body work everywhere // stream.body work everywhere
List<ContentStream> cs = new ArrayList<ContentStream>(); List<ContentStream> cs = new ArrayList<ContentStream>(1);
cs.add(new ContentStreamBase.FileStream(new File(filename))); ContentStreamBase f = new ContentStreamBase.FileStream(new File(filename));
f.setContentType("text/csv");
cs.add(f);
req.setContentStreams(cs); req.setContentStreams(cs);
h.query("/update/csv",req); h.query("/update",req);
} }
@Test @Test
public void testCSVLoad() throws Exception { public void testCSVLoad() throws Exception {
makeFile("id\n100\n101\n102"); makeFile("id\n100\n101\n102");
loadLocal("stream.file",filename); loadLocal();
// check default commit of false // check default commit of false
assertQ(req("id:[100 TO 110]"),"//*[@numFound='0']"); assertQ(req("id:[100 TO 110]"),"//*[@numFound='0']");
assertU(commit()); assertU(commit());
@ -107,7 +109,7 @@ public class TestCSVLoader extends SolrTestCaseJ4 {
@Test @Test
public void testCommitFalse() throws Exception { public void testCommitFalse() throws Exception {
makeFile("id\n100\n101\n102"); makeFile("id\n100\n101\n102");
loadLocal("stream.file",filename,"commit","false"); loadLocal("commit","false");
assertQ(req("id:[100 TO 110]"),"//*[@numFound='0']"); assertQ(req("id:[100 TO 110]"),"//*[@numFound='0']");
assertU(commit()); assertU(commit());
assertQ(req("id:[100 TO 110]"),"//*[@numFound='3']"); assertQ(req("id:[100 TO 110]"),"//*[@numFound='3']");
@ -116,14 +118,14 @@ public class TestCSVLoader extends SolrTestCaseJ4 {
@Test @Test
public void testCommitTrue() throws Exception { public void testCommitTrue() throws Exception {
makeFile("id\n100\n101\n102"); makeFile("id\n100\n101\n102");
loadLocal("stream.file",filename,"commit","true"); loadLocal("commit","true");
assertQ(req("id:[100 TO 110]"),"//*[@numFound='3']"); assertQ(req("id:[100 TO 110]"),"//*[@numFound='3']");
} }
@Test @Test
public void testLiteral() throws Exception { public void testLiteral() throws Exception {
makeFile("id\n100"); makeFile("id\n100");
loadLocal("stream.file",filename,"commit","true", "literal.name","LITERAL_VALUE"); loadLocal("commit","true", "literal.name","LITERAL_VALUE");
assertQ(req("*:*"),"//doc/str[@name='name'][.='LITERAL_VALUE']"); assertQ(req("*:*"),"//doc/str[@name='name'][.='LITERAL_VALUE']");
} }
@ -133,7 +135,7 @@ public class TestCSVLoader extends SolrTestCaseJ4 {
lrf.args.put(CommonParams.VERSION,"2.2"); lrf.args.put(CommonParams.VERSION,"2.2");
makeFile("id,str_s\n100,\"quoted\"\n101,\n102,\"\"\n103,"); makeFile("id,str_s\n100,\"quoted\"\n101,\n102,\"\"\n103,");
loadLocal("stream.file",filename,"commit","true"); loadLocal("commit","true");
assertQ(req("id:[100 TO 110]"),"//*[@numFound='4']"); assertQ(req("id:[100 TO 110]"),"//*[@numFound='4']");
assertQ(req("id:100"),"//arr[@name='str_s']/str[.='quoted']"); assertQ(req("id:100"),"//arr[@name='str_s']/str[.='quoted']");
assertQ(req("id:101"),"count(//str[@name='str_s'])=0"); assertQ(req("id:101"),"count(//str[@name='str_s'])=0");
@ -145,23 +147,23 @@ public class TestCSVLoader extends SolrTestCaseJ4 {
assertQ(req("id:103"),"count(//str[@name='str_s'])=0"); assertQ(req("id:103"),"count(//str[@name='str_s'])=0");
// test overwrite by default // test overwrite by default
loadLocal("stream.file",filename, "commit","true"); loadLocal("commit","true");
assertQ(req("id:[100 TO 110]"),"//*[@numFound='4']"); assertQ(req("id:[100 TO 110]"),"//*[@numFound='4']");
// test explicitly adding header=true (the default) // test explicitly adding header=true (the default)
loadLocal("stream.file",filename, "commit","true","header","true"); loadLocal("commit","true","header","true");
assertQ(req("id:[100 TO 110]"),"//*[@numFound='4']"); assertQ(req("id:[100 TO 110]"),"//*[@numFound='4']");
// test no overwrites // test no overwrites
loadLocal("stream.file",filename, "commit","true", "overwrite","false"); loadLocal("commit","true", "overwrite","false");
assertQ(req("id:[100 TO 110]"),"//*[@numFound='8']"); assertQ(req("id:[100 TO 110]"),"//*[@numFound='8']");
// test overwrite // test overwrite
loadLocal("stream.file",filename, "commit","true"); loadLocal("commit","true");
assertQ(req("id:[100 TO 110]"),"//*[@numFound='4']"); assertQ(req("id:[100 TO 110]"),"//*[@numFound='4']");
// test global value mapping // test global value mapping
loadLocal("stream.file",filename, "commit","true", "map","quoted:QUOTED"); loadLocal("commit","true", "map","quoted:QUOTED");
assertQ(req("id:[100 TO 110]"),"//*[@numFound='4']"); assertQ(req("id:[100 TO 110]"),"//*[@numFound='4']");
assertQ(req("id:100"),"//arr[@name='str_s']/str[.='QUOTED']"); assertQ(req("id:100"),"//arr[@name='str_s']/str[.='QUOTED']");
assertQ(req("id:101"),"count(//str[@name='str_s'])=0"); assertQ(req("id:101"),"count(//str[@name='str_s'])=0");
@ -169,12 +171,12 @@ public class TestCSVLoader extends SolrTestCaseJ4 {
assertQ(req("id:103"),"count(//str[@name='str_s'])=0"); assertQ(req("id:103"),"count(//str[@name='str_s'])=0");
// test value mapping to empty (remove) // test value mapping to empty (remove)
loadLocal("stream.file",filename, "commit","true", "map","quoted:"); loadLocal("commit","true", "map","quoted:");
assertQ(req("id:[100 TO 110]"),"//*[@numFound='4']"); assertQ(req("id:[100 TO 110]"),"//*[@numFound='4']");
assertQ(req("id:100"),"count(//str[@name='str_s'])=0"); assertQ(req("id:100"),"count(//str[@name='str_s'])=0");
// test value mapping from empty // test value mapping from empty
loadLocal("stream.file",filename, "commit","true", "map",":EMPTY"); loadLocal("commit","true", "map",":EMPTY");
assertQ(req("id:[100 TO 110]"),"//*[@numFound='4']"); assertQ(req("id:[100 TO 110]"),"//*[@numFound='4']");
assertQ(req("id:100"),"//arr[@name='str_s']/str[.='quoted']"); assertQ(req("id:100"),"//arr[@name='str_s']/str[.='quoted']");
assertQ(req("id:101"),"//arr[@name='str_s']/str[.='EMPTY']"); assertQ(req("id:101"),"//arr[@name='str_s']/str[.='EMPTY']");
@ -182,7 +184,7 @@ public class TestCSVLoader extends SolrTestCaseJ4 {
assertQ(req("id:103"),"//arr[@name='str_s']/str[.='EMPTY']"); assertQ(req("id:103"),"//arr[@name='str_s']/str[.='EMPTY']");
// test multiple map rules // test multiple map rules
loadLocal("stream.file",filename, "commit","true", "map",":EMPTY", "map","quoted:QUOTED"); loadLocal("commit","true", "map",":EMPTY", "map","quoted:QUOTED");
assertQ(req("id:[100 TO 110]"),"//*[@numFound='4']"); assertQ(req("id:[100 TO 110]"),"//*[@numFound='4']");
assertQ(req("id:100"),"//arr[@name='str_s']/str[.='QUOTED']"); assertQ(req("id:100"),"//arr[@name='str_s']/str[.='QUOTED']");
assertQ(req("id:101"),"//arr[@name='str_s']/str[.='EMPTY']"); assertQ(req("id:101"),"//arr[@name='str_s']/str[.='EMPTY']");
@ -190,7 +192,7 @@ public class TestCSVLoader extends SolrTestCaseJ4 {
assertQ(req("id:103"),"//arr[@name='str_s']/str[.='EMPTY']"); assertQ(req("id:103"),"//arr[@name='str_s']/str[.='EMPTY']");
// test indexing empty fields // test indexing empty fields
loadLocal("stream.file",filename, "commit","true", "f.str_s.keepEmpty","true"); loadLocal("commit","true", "f.str_s.keepEmpty","true");
assertQ(req("id:[100 TO 110]"),"//*[@numFound='4']"); assertQ(req("id:[100 TO 110]"),"//*[@numFound='4']");
assertQ(req("id:100"),"//arr[@name='str_s']/str[.='quoted']"); assertQ(req("id:100"),"//arr[@name='str_s']/str[.='quoted']");
assertQ(req("id:101"),"//arr[@name='str_s']/str[.='']"); assertQ(req("id:101"),"//arr[@name='str_s']/str[.='']");
@ -198,7 +200,7 @@ public class TestCSVLoader extends SolrTestCaseJ4 {
assertQ(req("id:103"),"//arr[@name='str_s']/str[.='']"); assertQ(req("id:103"),"//arr[@name='str_s']/str[.='']");
// test overriding the name of fields // test overriding the name of fields
loadLocal("stream.file",filename, "commit","true", loadLocal("commit","true",
"fieldnames","id,my_s", "header","true", "fieldnames","id,my_s", "header","true",
"f.my_s.map",":EMPTY"); "f.my_s.map",":EMPTY");
assertQ(req("id:[100 TO 110]"),"//*[@numFound='4']"); assertQ(req("id:[100 TO 110]"),"//*[@numFound='4']");
@ -214,23 +216,23 @@ public class TestCSVLoader extends SolrTestCaseJ4 {
assertQ(req("id:id"),"//*[@numFound='0']"); assertQ(req("id:id"),"//*[@numFound='0']");
// test skipping a field via the "skip" parameter // test skipping a field via the "skip" parameter
loadLocal("stream.file",filename,"commit","true","keepEmpty","true","skip","str_s"); loadLocal("commit","true","keepEmpty","true","skip","str_s");
assertQ(req("id:[100 TO 110]"),"//*[@numFound='4']"); assertQ(req("id:[100 TO 110]"),"//*[@numFound='4']");
assertQ(req("id:[100 TO 110]"),"count(//str[@name='str_s']/str)=0"); assertQ(req("id:[100 TO 110]"),"count(//str[@name='str_s']/str)=0");
// test skipping a field by specifying an empty name // test skipping a field by specifying an empty name
loadLocal("stream.file",filename,"commit","true","keepEmpty","true","fieldnames","id,"); loadLocal("commit","true","keepEmpty","true","fieldnames","id,");
assertQ(req("id:[100 TO 110]"),"//*[@numFound='4']"); assertQ(req("id:[100 TO 110]"),"//*[@numFound='4']");
assertQ(req("id:[100 TO 110]"),"count(//str[@name='str_s']/str)=0"); assertQ(req("id:[100 TO 110]"),"count(//str[@name='str_s']/str)=0");
// test loading file as if it didn't have a header // test loading file as if it didn't have a header
loadLocal("stream.file",filename, "commit","true", loadLocal("commit","true",
"fieldnames","id,my_s", "header","false"); "fieldnames","id,my_s", "header","false");
assertQ(req("id:id"),"//*[@numFound='1']"); assertQ(req("id:id"),"//*[@numFound='1']");
assertQ(req("id:100"),"//arr[@name='my_s']/str[.='quoted']"); assertQ(req("id:100"),"//arr[@name='my_s']/str[.='quoted']");
// test skipLines // test skipLines
loadLocal("stream.file",filename, "commit","true", loadLocal("commit","true",
"fieldnames","id,my_s", "header","false", "skipLines","1"); "fieldnames","id,my_s", "header","false", "skipLines","1");
assertQ(req("id:id"),"//*[@numFound='1']"); assertQ(req("id:id"),"//*[@numFound='1']");
assertQ(req("id:100"),"//arr[@name='my_s']/str[.='quoted']"); assertQ(req("id:100"),"//arr[@name='my_s']/str[.='quoted']");
@ -242,7 +244,7 @@ public class TestCSVLoader extends SolrTestCaseJ4 {
+"101,\"a,b,c\"\n" +"101,\"a,b,c\"\n"
+"102,\"a,,b\"\n" +"102,\"a,,b\"\n"
+"103,\n"); +"103,\n");
loadLocal("stream.file",filename, "commit","true", loadLocal("commit","true",
"f.str_s.map",":EMPTY", "f.str_s.map",":EMPTY",
"f.str_s.split","true"); "f.str_s.split","true");
assertQ(req("id:[100 TO 110]"),"//*[@numFound='4']"); assertQ(req("id:[100 TO 110]"),"//*[@numFound='4']");
@ -263,7 +265,7 @@ public class TestCSVLoader extends SolrTestCaseJ4 {
+"104|a\\\\b\n" // no backslash escaping should be done by default +"104|a\\\\b\n" // no backslash escaping should be done by default
); );
loadLocal("stream.file",filename, "commit","true", loadLocal("commit","true",
"separator","|", "separator","|",
"encapsulator","^", "encapsulator","^",
"f.str_s.map",":EMPTY", "f.str_s.map",":EMPTY",
@ -286,7 +288,7 @@ public class TestCSVLoader extends SolrTestCaseJ4 {
+"101,unquoted \"\" \\ string\n" // double encap shouldn't be an escape outside encap +"101,unquoted \"\" \\ string\n" // double encap shouldn't be an escape outside encap
+"102,end quote \\\n" +"102,end quote \\\n"
); );
loadLocal("stream.file",filename, "commit","true" loadLocal("commit","true"
); );
assertQ(req("id:100"),"//arr[@name='str_s']/str[.='quoted \" \\ string']"); assertQ(req("id:100"),"//arr[@name='str_s']/str[.='quoted \" \\ string']");
assertQ(req("id:101"),"//arr[@name='str_s']/str[.='unquoted \"\" \\ string']"); assertQ(req("id:101"),"//arr[@name='str_s']/str[.='unquoted \"\" \\ string']");
@ -298,7 +300,7 @@ public class TestCSVLoader extends SolrTestCaseJ4 {
+"100,\"quoted \"\" \\\" \\\\ string\"\n" // quotes should be part of value +"100,\"quoted \"\" \\\" \\\\ string\"\n" // quotes should be part of value
+"101,unquoted \"\" \\\" \\, \\\\ string\n" +"101,unquoted \"\" \\\" \\, \\\\ string\n"
); );
loadLocal("stream.file",filename, "commit","true" loadLocal("commit","true"
,"escape","\\" ,"escape","\\"
); );
assertQ(req("id:100"),"//arr[@name='str_s']/str[.='\"quoted \"\" \" \\ string\"']"); assertQ(req("id:100"),"//arr[@name='str_s']/str[.='\"quoted \"\" \" \\ string\"']");

View File

@ -20,6 +20,7 @@ import org.apache.solr.SolrTestCaseJ4;
import org.apache.commons.lang.ObjectUtils; import org.apache.commons.lang.ObjectUtils;
import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.util.ContentStreamBase; import org.apache.solr.common.util.ContentStreamBase;
import org.apache.solr.handler.loader.XMLLoader;
import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse; import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.update.AddUpdateCommand; import org.apache.solr.update.AddUpdateCommand;
@ -40,12 +41,12 @@ import javax.xml.stream.XMLStreamReader;
public class XmlUpdateRequestHandlerTest extends SolrTestCaseJ4 { public class XmlUpdateRequestHandlerTest extends SolrTestCaseJ4 {
private static XMLInputFactory inputFactory = XMLInputFactory.newInstance(); private static XMLInputFactory inputFactory = XMLInputFactory.newInstance();
protected static XmlUpdateRequestHandler handler; protected static UpdateRequestHandler handler;
@BeforeClass @BeforeClass
public static void beforeTests() throws Exception { public static void beforeTests() throws Exception {
initCore("solrconfig.xml","schema.xml"); initCore("solrconfig.xml","schema.xml");
handler = new XmlUpdateRequestHandler(); handler = new UpdateRequestHandler();
} }
@Test @Test
@ -65,7 +66,7 @@ public class XmlUpdateRequestHandlerTest extends SolrTestCaseJ4 {
inputFactory.createXMLStreamReader( new StringReader( xml ) ); inputFactory.createXMLStreamReader( new StringReader( xml ) );
parser.next(); // read the START document... parser.next(); // read the START document...
//null for the processor is all right here //null for the processor is all right here
XMLLoader loader = new XMLLoader(null, inputFactory); XMLLoader loader = new XMLLoader();
SolrInputDocument doc = loader.readDoc( parser ); SolrInputDocument doc = loader.readDoc( parser );
// Read boosts // Read boosts
@ -100,8 +101,8 @@ public class XmlUpdateRequestHandlerTest extends SolrTestCaseJ4 {
SolrQueryResponse rsp = new SolrQueryResponse(); SolrQueryResponse rsp = new SolrQueryResponse();
BufferingRequestProcessor p = new BufferingRequestProcessor(null); BufferingRequestProcessor p = new BufferingRequestProcessor(null);
XMLLoader loader = new XMLLoader(p, inputFactory); XMLLoader loader = new XMLLoader().init(null);
loader.load(req, rsp, new ContentStreamBase.StringStream(xml)); loader.load(req, rsp, new ContentStreamBase.StringStream(xml), p);
AddUpdateCommand add = p.addCommands.get(0); AddUpdateCommand add = p.addCommands.get(0);
assertEquals(100, add.commitWithin); assertEquals(100, add.commitWithin);
@ -135,8 +136,8 @@ public class XmlUpdateRequestHandlerTest extends SolrTestCaseJ4 {
p.expectDelete(null, "id:150", 500); p.expectDelete(null, "id:150", 500);
p.expectDelete("150", null, -1); p.expectDelete("150", null, -1);
XMLLoader loader = new XMLLoader(p, inputFactory); XMLLoader loader = new XMLLoader().init(null);
loader.load(req(), new SolrQueryResponse(), new ContentStreamBase.StringStream(xml)); loader.load(req(), new SolrQueryResponse(), new ContentStreamBase.StringStream(xml), p);
p.assertNoCommandsPending(); p.assertNoCommandsPending();
} }

View File

@ -22,6 +22,7 @@ import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.MapSolrParams; import org.apache.solr.common.params.MapSolrParams;
import org.apache.solr.common.util.ContentStream; import org.apache.solr.common.util.ContentStream;
import org.apache.solr.common.util.ContentStreamBase; import org.apache.solr.common.util.ContentStreamBase;
@ -35,12 +36,10 @@ import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
public class XsltUpdateRequestHandlerTest extends SolrTestCaseJ4 { public class XsltUpdateRequestHandlerTest extends SolrTestCaseJ4 {
protected static XsltUpdateRequestHandler handler;
@BeforeClass @BeforeClass
public static void beforeTests() throws Exception { public static void beforeTests() throws Exception {
initCore("solrconfig.xml","schema.xml"); initCore("solrconfig.xml","schema.xml");
handler = new XsltUpdateRequestHandler();
} }
@Override @Override
@ -65,24 +64,24 @@ public class XsltUpdateRequestHandlerTest extends SolrTestCaseJ4 {
" </document>" + " </document>" +
"</random>"; "</random>";
Map<String,String> args = new HashMap<String, String>(); Map<String,String> args = new HashMap<String, String>();
args.put("tr", "xsl-update-handler-test.xsl"); args.put(CommonParams.TR, "xsl-update-handler-test.xsl");
SolrCore core = h.getCore(); SolrCore core = h.getCore();
LocalSolrQueryRequest req = new LocalSolrQueryRequest( core, new MapSolrParams( args) ); LocalSolrQueryRequest req = new LocalSolrQueryRequest( core, new MapSolrParams( args) );
ArrayList<ContentStream> streams = new ArrayList<ContentStream>(); ArrayList<ContentStream> streams = new ArrayList<ContentStream>();
streams.add(new ContentStreamBase.StringStream(xml)); streams.add(new ContentStreamBase.StringStream(xml));
req.setContentStreams(streams); req.setContentStreams(streams);
SolrQueryResponse rsp = new SolrQueryResponse(); SolrQueryResponse rsp = new SolrQueryResponse();
XsltUpdateRequestHandler handler = new XsltUpdateRequestHandler(); UpdateRequestHandler handler = new UpdateRequestHandler();
handler.init(new NamedList<String>()); handler.init(new NamedList<String>());
handler.handleRequestBody(req, rsp); handler.handleRequestBody(req, rsp);
StringWriter sw = new StringWriter(32000); StringWriter sw = new StringWriter(32000);
QueryResponseWriter responseWriter = core.getQueryResponseWriter(req); QueryResponseWriter responseWriter = core.getQueryResponseWriter(req);
responseWriter.write(sw,req,rsp); responseWriter.write(sw,req,rsp);
req.close(); req.close();
String response = sw.toString(); String response = sw.toString();
assertU(response); assertU(response);
assertU(commit()); assertU(commit());
assertQ("test document was correctly committed", req("q","*:*") assertQ("test document was correctly committed", req("q","*:*")

View File

@ -75,7 +75,7 @@ public abstract class CacheHeaderTestBase extends SolrJettyTestBase {
} }
java.net.URI uri = URIUtils.createURI("http", java.net.URI uri = URIUtils.createURI("http",
new URI(httpserver.getBaseURL()).getHost(), new URI(httpserver.getBaseURL()).getHost(),
new URI(httpserver.getBaseURL()).getPort(), "/solr/update/csv", new URI(httpserver.getBaseURL()).getPort(), "/solr/update",
URLEncodedUtils.format(qparams, "UTF-8"), null); URLEncodedUtils.format(qparams, "UTF-8"), null);
if ("GET".equals(method)) { if ("GET".equals(method)) {

View File

@ -27,7 +27,7 @@ import org.apache.solr.common.util.ContentStreamBase;
import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.SolrCore; import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrEventListener; import org.apache.solr.core.SolrEventListener;
import org.apache.solr.handler.XmlUpdateRequestHandler; import org.apache.solr.handler.UpdateRequestHandler;
import org.apache.solr.request.SolrQueryRequestBase; import org.apache.solr.request.SolrQueryRequestBase;
import org.apache.solr.response.SolrQueryResponse; import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.search.SolrIndexSearcher; import org.apache.solr.search.SolrIndexSearcher;
@ -150,7 +150,7 @@ public class AutoCommitTest extends AbstractSolrTestCase {
core.registerNewSearcherListener(trigger); core.registerNewSearcherListener(trigger);
XmlUpdateRequestHandler handler = new XmlUpdateRequestHandler(); UpdateRequestHandler handler = new UpdateRequestHandler();
handler.init( null ); handler.init( null );
MapSolrParams params = new MapSolrParams( new HashMap<String, String>() ); MapSolrParams params = new MapSolrParams( new HashMap<String, String>() );
@ -198,7 +198,7 @@ public class AutoCommitTest extends AbstractSolrTestCase {
tracker.setDocsUpperBound(-1); tracker.setDocsUpperBound(-1);
// updater.commitCallbacks.add(trigger); // updater.commitCallbacks.add(trigger);
XmlUpdateRequestHandler handler = new XmlUpdateRequestHandler(); UpdateRequestHandler handler = new UpdateRequestHandler();
handler.init( null ); handler.init( null );
MapSolrParams params = new MapSolrParams( new HashMap<String, String>() ); MapSolrParams params = new MapSolrParams( new HashMap<String, String>() );
@ -268,7 +268,7 @@ public class AutoCommitTest extends AbstractSolrTestCase {
tracker.setTimeUpperBound(0); tracker.setTimeUpperBound(0);
tracker.setDocsUpperBound(-1); tracker.setDocsUpperBound(-1);
XmlUpdateRequestHandler handler = new XmlUpdateRequestHandler(); UpdateRequestHandler handler = new UpdateRequestHandler();
handler.init( null ); handler.init( null );
MapSolrParams params = new MapSolrParams( new HashMap<String, String>() ); MapSolrParams params = new MapSolrParams( new HashMap<String, String>() );

View File

@ -22,7 +22,7 @@ import java.util.HashMap;
import org.apache.solr.common.params.MapSolrParams; import org.apache.solr.common.params.MapSolrParams;
import org.apache.solr.common.params.UpdateParams; import org.apache.solr.common.params.UpdateParams;
import org.apache.solr.core.*; import org.apache.solr.core.*;
import org.apache.solr.handler.XmlUpdateRequestHandler; import org.apache.solr.handler.UpdateRequestHandler;
import org.apache.solr.request.SolrQueryRequestBase; import org.apache.solr.request.SolrQueryRequestBase;
import org.apache.solr.response.SolrQueryResponse; import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.util.AbstractSolrTestCase; import org.apache.solr.util.AbstractSolrTestCase;
@ -42,7 +42,7 @@ public class UpdateParamsTest extends AbstractSolrTestCase {
public void testUpdateProcessorParamDeprecationRemoved() throws Exception { public void testUpdateProcessorParamDeprecationRemoved() throws Exception {
SolrCore core = h.getCore(); SolrCore core = h.getCore();
XmlUpdateRequestHandler handler = new XmlUpdateRequestHandler(); UpdateRequestHandler handler = new UpdateRequestHandler();
handler.init( null ); handler.init( null );
MapSolrParams params = new MapSolrParams( new HashMap<String, String>() ); MapSolrParams params = new MapSolrParams( new HashMap<String, String>() );

View File

@ -33,7 +33,7 @@ import org.apache.solr.common.util.ContentStream;
import org.apache.solr.common.util.ContentStreamBase; import org.apache.solr.common.util.ContentStreamBase;
import org.apache.solr.core.SolrCore; import org.apache.solr.core.SolrCore;
import org.apache.solr.handler.BinaryUpdateRequestHandler; import org.apache.solr.handler.BinaryUpdateRequestHandler;
import org.apache.solr.handler.XmlUpdateRequestHandler; import org.apache.solr.handler.UpdateRequestHandler;
import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.request.SolrQueryRequestBase; import org.apache.solr.request.SolrQueryRequestBase;
import org.apache.solr.request.LocalSolrQueryRequest; import org.apache.solr.request.LocalSolrQueryRequest;
@ -328,7 +328,8 @@ public class SignatureUpdateProcessorFactoryTest extends SolrTestCaseJ4 {
LocalSolrQueryRequest req = new LocalSolrQueryRequest(h.getCore(), mmparams); LocalSolrQueryRequest req = new LocalSolrQueryRequest(h.getCore(), mmparams);
try { try {
req.setContentStreams(streams); req.setContentStreams(streams);
BinaryUpdateRequestHandler h = new BinaryUpdateRequestHandler(); UpdateRequestHandler h = new UpdateRequestHandler();
h.init(new NamedList());
h.handleRequestBody(req, new SolrQueryResponse()); h.handleRequestBody(req, new SolrQueryResponse());
} finally { } finally {
req.close(); req.close();
@ -359,7 +360,7 @@ public class SignatureUpdateProcessorFactoryTest extends SolrTestCaseJ4 {
(SolrParams) mmparams) { (SolrParams) mmparams) {
}; };
XmlUpdateRequestHandler handler = new XmlUpdateRequestHandler(); UpdateRequestHandler handler = new UpdateRequestHandler();
handler.init(null); handler.init(null);
ArrayList<ContentStream> streams = new ArrayList<ContentStream>(2); ArrayList<ContentStream> streams = new ArrayList<ContentStream>(2);
streams.add(new ContentStreamBase.StringStream(doc)); streams.add(new ContentStreamBase.StringStream(doc));

View File

@ -28,7 +28,7 @@ import org.apache.solr.common.params.UpdateParams;
import org.apache.solr.common.util.ContentStream; import org.apache.solr.common.util.ContentStream;
import org.apache.solr.common.util.ContentStreamBase; import org.apache.solr.common.util.ContentStreamBase;
import org.apache.solr.core.SolrCore; import org.apache.solr.core.SolrCore;
import org.apache.solr.handler.XmlUpdateRequestHandler; import org.apache.solr.handler.UpdateRequestHandler;
import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.request.SolrQueryRequestBase; import org.apache.solr.request.SolrQueryRequestBase;
import org.apache.solr.response.SolrQueryResponse; import org.apache.solr.response.SolrQueryResponse;
@ -112,7 +112,7 @@ public class UniqFieldsUpdateProcessorFactoryTest extends SolrTestCaseJ4 {
(SolrParams) mmparams) { (SolrParams) mmparams) {
}; };
XmlUpdateRequestHandler handler = new XmlUpdateRequestHandler(); UpdateRequestHandler handler = new UpdateRequestHandler();
handler.init(null); handler.init(null);
ArrayList<ContentStream> streams = new ArrayList<ContentStream>(2); ArrayList<ContentStream> streams = new ArrayList<ContentStream>(2);
streams.add(new ContentStreamBase.StringStream(doc)); streams.add(new ContentStreamBase.StringStream(doc));

View File

@ -390,14 +390,7 @@
"update.processor.class" is the class name for the UpdateRequestProcessor. It is initalized "update.processor.class" is the class name for the UpdateRequestProcessor. It is initalized
only once. This can not be changed for each request. only once. This can not be changed for each request.
--> -->
<requestHandler name="/update" class="solr.XmlUpdateRequestHandler" > <requestHandler name="/update" class="solr.UpdateRequestHandler" />
<!--
<str name="update.processor.class">org.apache.solr.handler.UpdateRequestProcessor</str>
-->
</requestHandler>
<!-- CSV update handler, loaded on demand -->
<requestHandler name="/update/csv" class="solr.CSVRequestHandler" startup="lazy" />
<!-- <!--

View File

@ -549,14 +549,7 @@
To enable solr1.1 behavior, remove the /update handler or change its path To enable solr1.1 behavior, remove the /update handler or change its path
--> -->
<requestHandler name="/update" class="solr.XmlUpdateRequestHandler" /> <requestHandler name="/update" class="solr.UpdateRequestHandler" />
<requestHandler name="/update/javabin" class="solr.BinaryUpdateRequestHandler" />
<!-- CSV update handler, loaded on demand -->
<requestHandler name="/update/csv" class="solr.CSVRequestHandler" startup="lazy" />
<!-- <!--

View File

@ -389,14 +389,7 @@
"update.processor.class" is the class name for the UpdateRequestProcessor. It is initalized "update.processor.class" is the class name for the UpdateRequestProcessor. It is initalized
only once. This can not be changed for each request. only once. This can not be changed for each request.
--> -->
<requestHandler name="/update" class="solr.XmlUpdateRequestHandler" > <requestHandler name="/update" class="solr.UpdateRequestHandler" />
<!--
<str name="update.processor.class">org.apache.solr.handler.UpdateRequestProcessor</str>
-->
</requestHandler>
<!-- CSV update handler, loaded on demand -->
<requestHandler name="/update/csv" class="solr.CSVRequestHandler" startup="lazy" />
<!-- <!--

View File

@ -388,14 +388,7 @@
"update.processor.class" is the class name for the UpdateRequestProcessor. It is initalized "update.processor.class" is the class name for the UpdateRequestProcessor. It is initalized
only once. This can not be changed for each request. only once. This can not be changed for each request.
--> -->
<requestHandler name="/update" class="solr.XmlUpdateRequestHandler" > <requestHandler name="/update" class="solr.UpdateRequestHandler" />
<!--
<str name="update.processor.class">org.apache.solr.handler.UpdateRequestProcessor</str>
-->
</requestHandler>
<!-- CSV update handler, loaded on demand -->
<requestHandler name="/update/csv" class="solr.CSVRequestHandler" startup="lazy" />
<!-- <!--

View File

@ -302,7 +302,7 @@
"update.processor.class" is the class name for the UpdateRequestProcessor. It is initalized "update.processor.class" is the class name for the UpdateRequestProcessor. It is initalized
only once. This can not be changed for each request. only once. This can not be changed for each request.
--> -->
<requestHandler name="/update" class="solr.XmlUpdateRequestHandler" > <requestHandler name="/update" class="solr.UpdateRequestHandler" >
<!-- <!--
<str name="update.processor.class">org.apache.solr.handler.UpdateRequestProcessor</str> <str name="update.processor.class">org.apache.solr.handler.UpdateRequestProcessor</str>
--> -->

View File

@ -36,7 +36,7 @@
</requestDispatcher> </requestDispatcher>
<requestHandler name="standard" class="solr.StandardRequestHandler" default="true" /> <requestHandler name="standard" class="solr.StandardRequestHandler" default="true" />
<requestHandler name="/update" class="solr.XmlUpdateRequestHandler" /> <requestHandler name="/update" class="solr.UpdateRequestHandler" />
<requestHandler name="/admin/" class="org.apache.solr.handler.admin.AdminHandlers" /> <requestHandler name="/admin/" class="org.apache.solr.handler.admin.AdminHandlers" />
<!-- config for the admin interface --> <!-- config for the admin interface -->

View File

@ -36,7 +36,7 @@
</requestDispatcher> </requestDispatcher>
<requestHandler name="standard" class="solr.StandardRequestHandler" default="true" /> <requestHandler name="standard" class="solr.StandardRequestHandler" default="true" />
<requestHandler name="/update" class="solr.XmlUpdateRequestHandler" /> <requestHandler name="/update" class="solr.UpdateRequestHandler" />
<requestHandler name="/admin/" class="org.apache.solr.handler.admin.AdminHandlers" /> <requestHandler name="/admin/" class="org.apache.solr.handler.admin.AdminHandlers" />
<!-- config for the admin interface --> <!-- config for the admin interface -->

View File

@ -894,19 +894,25 @@
</requestHandler> </requestHandler>
<!-- XML Update Request Handler. <!-- Update Request Handler.
http://wiki.apache.org/solr/UpdateXmlMessages http://wiki.apache.org/solr/UpdateXmlMessages
The canonical Request Handler for Modifying the Index through The canonical Request Handler for Modifying the Index through
commands specified using XML. commands specified using XML, JSON, CSV, or JAVABIN
Note: Since solr1.1 requestHandlers requires a valid content Note: Since solr1.1 requestHandlers requires a valid content
type header if posted in the body. For example, curl now type header if posted in the body. For example, curl now
requires: -H 'Content-type:text/xml; charset=utf-8' requires: -H 'Content-type:text/xml; charset=utf-8'
To override the request content type and force a specific
Content-type, use the request parameter:
?update.contentType=text/csv
This handler will pick a response format to match the input
if the 'wt' parameter is not explicit
--> -->
<requestHandler name="/update" <requestHandler name="/update" class="solr.UpdateRequestHandler">
class="solr.XmlUpdateRequestHandler">
<!-- See below for information on defining <!-- See below for information on defining
updateRequestProcessorChains that can be used by name updateRequestProcessorChains that can be used by name
on each Update Request on each Update Request
@ -916,30 +922,8 @@
<str name="update.chain">dedupe</str> <str name="update.chain">dedupe</str>
</lst> </lst>
--> -->
</requestHandler>
<!-- Binary Update Request Handler
http://wiki.apache.org/solr/javabin
-->
<requestHandler name="/update/javabin"
class="solr.BinaryUpdateRequestHandler" />
<!-- CSV Update Request Handler
http://wiki.apache.org/solr/UpdateCSV
-->
<requestHandler name="/update/csv"
class="solr.CSVRequestHandler"
startup="lazy" />
<!-- JSON Update Request Handler
http://wiki.apache.org/solr/UpdateJSON
-->
<requestHandler name="/update/json" class="solr.JsonUpdateRequestHandler">
<lst name="defaults">
<str name="wt">json</str>
<str name="indent">true</str>
</lst>
</requestHandler> </requestHandler>
<!-- Solr Cell Update Request Handler <!-- Solr Cell Update Request Handler
@ -963,12 +947,6 @@
</lst> </lst>
</requestHandler> </requestHandler>
<!-- XSLT Update Request Handler
Transforms incoming XML with stylesheet identified by tr=
-->
<requestHandler name="/update/xslt"
startup="lazy"
class="solr.XsltUpdateRequestHandler"/>
<!-- Field Analysis Request Handler <!-- Field Analysis Request Handler

View File

@ -58,13 +58,14 @@ public class BinaryRequestWriter extends RequestWriter {
@Override @Override
public String getUpdateContentType() { public String getUpdateContentType() {
return "application/octet-stream"; return "application/javabin";
} }
@Override @Override
public ContentStream getContentStream(final UpdateRequest request) throws IOException { public ContentStream getContentStream(final UpdateRequest request) throws IOException {
final BAOS baos = new BAOS(); final BAOS baos = new BAOS();
new JavaBinUpdateRequestCodec().marshal(request, baos); new JavaBinUpdateRequestCodec().marshal(request, baos);
return new ContentStream() { return new ContentStream() {
public String getName() { public String getName() {
return null; return null;
@ -75,7 +76,7 @@ public class BinaryRequestWriter extends RequestWriter {
} }
public String getContentType() { public String getContentType() {
return "application/octet-stream"; return "application/javabin";
} }
public Long getSize() // size if we know it, otherwise null public Long getSize() // size if we know it, otherwise null
@ -99,9 +100,10 @@ public class BinaryRequestWriter extends RequestWriter {
if (request instanceof UpdateRequest) { if (request instanceof UpdateRequest) {
UpdateRequest updateRequest = (UpdateRequest) request; UpdateRequest updateRequest = (UpdateRequest) request;
new JavaBinUpdateRequestCodec().marshal(updateRequest, os); new JavaBinUpdateRequestCodec().marshal(updateRequest, os);
} }
}
}/*
/*
* A hack to get access to the protected internal buffer and avoid an additional copy * A hack to get access to the protected internal buffer and avoid an additional copy
*/ */
class BAOS extends ByteArrayOutputStream { class BAOS extends ByteArrayOutputStream {
@ -109,13 +111,4 @@ public class BinaryRequestWriter extends RequestWriter {
return super.buf; return super.buf;
} }
} }
@Override
public String getPath(SolrRequest req) {
if (req instanceof UpdateRequest) {
return "/update/javabin";
} else {
return req.getPath();
}
}
} }

View File

@ -129,8 +129,8 @@ public class ConcurrentUpdateSolrServer extends SolrServer {
if (updateRequest == null) if (updateRequest == null)
break; break;
final boolean isXml = ClientUtils.TEXT_XML.equals(server.requestWriter String contentType = server.requestWriter.getUpdateContentType();
.getUpdateContentType()); final boolean isXml = ClientUtils.TEXT_XML.equals(contentType);
final ModifiableSolrParams origParams = new ModifiableSolrParams(updateRequest.getParams()); final ModifiableSolrParams origParams = new ModifiableSolrParams(updateRequest.getParams());
@ -188,12 +188,13 @@ public class ConcurrentUpdateSolrServer extends SolrServer {
requestParams.set(CommonParams.WT, server.parser.getWriterType()); requestParams.set(CommonParams.WT, server.parser.getWriterType());
requestParams.set(CommonParams.VERSION, server.parser.getVersion()); requestParams.set(CommonParams.VERSION, server.parser.getVersion());
final String path = isXml ? "/update" : "/update/javabin"; method = new HttpPost(server.getBaseURL() + "/update"
method = new HttpPost(server.getBaseURL() + path
+ ClientUtils.toQueryString(requestParams, false)); + ClientUtils.toQueryString(requestParams, false));
method.setEntity(template); method.setEntity(template);
method.addHeader("User-Agent", HttpSolrServer.AGENT); method.addHeader("User-Agent", HttpSolrServer.AGENT);
method.addHeader("Content-Type", contentType);
response = server.getHttpClient().execute(method); response = server.getHttpClient().execute(method);
int statusCode = response.getStatusLine().getStatusCode(); int statusCode = response.getStatusLine().getStatusCode();
log.info("Status for: " log.info("Status for: "

View File

@ -61,8 +61,10 @@ public class ContentStreamUpdateRequest extends AbstractUpdateRequest {
* @see #getContentStreams() * @see #getContentStreams()
* @see org.apache.solr.common.util.ContentStreamBase.FileStream * @see org.apache.solr.common.util.ContentStreamBase.FileStream
*/ */
public void addFile(File file) throws IOException { public void addFile(File file, String contentType) throws IOException {
addContentStream(new ContentStreamBase.FileStream(file)); ContentStreamBase cs = new ContentStreamBase.FileStream(file);
cs.setContentType(contentType);
addContentStream(cs);
} }
/** /**

View File

@ -79,6 +79,9 @@ public interface CommonParams {
/** default query field */ /** default query field */
public static final String DF = "df"; public static final String DF = "df";
/** Transformer param -- used with XSLT */
public static final String TR = "tr";
/** whether to include debug data for all components pieces, including doing explains*/ /** whether to include debug data for all components pieces, including doing explains*/
public static final String DEBUG_QUERY = "debugQuery"; public static final String DEBUG_QUERY = "debugQuery";

View File

@ -55,6 +55,9 @@ public interface UpdateParams
/** Select the update processor chain to use. A RequestHandler may or may not respect this parameter */ /** Select the update processor chain to use. A RequestHandler may or may not respect this parameter */
public static final String UPDATE_CHAIN = "update.chain"; public static final String UPDATE_CHAIN = "update.chain";
/** Override the content type used for UpdateLoader **/
public static final String ASSUME_CONTENT_TYPE = "update.contentType";
/** /**
* If optimizing, set the maximum number of segments left in the index after optimization. 1 is the default (and is equivalent to calling IndexWriter.optimize() in Lucene). * If optimizing, set the maximum number of segments left in the index after optimization. 1 is the default (and is equivalent to calling IndexWriter.optimize() in Lucene).
*/ */

View File

@ -104,6 +104,22 @@ public abstract class ContentStreamBase implements ContentStream
sourceInfo = file.toURI().toString(); sourceInfo = file.toURI().toString();
} }
public String getContentType() {
if(contentType==null) {
try {
char first = (char)new FileInputStream( file ).read();
if(first == '<') {
return "application/xml";
}
if(first == '{') {
return "application/json";
}
}
catch(Exception ex) {}
}
return contentType;
}
public InputStream getStream() throws IOException { public InputStream getStream() throws IOException {
return new FileInputStream( file ); return new FileInputStream( file );
} }
@ -123,7 +139,7 @@ public abstract class ContentStreamBase implements ContentStream
/** /**
* Construct a <code>ContentStream</code> from a <code>File</code> * Construct a <code>ContentStream</code> from a <code>String</code>
*/ */
public static class StringStream extends ContentStreamBase public static class StringStream extends ContentStreamBase
{ {
@ -138,6 +154,20 @@ public abstract class ContentStreamBase implements ContentStream
sourceInfo = "string"; sourceInfo = "string";
} }
public String getContentType() {
if(contentType==null && str.length() > 0) {
char first = str.charAt(0);
if(first == '<') {
return "application/xml";
}
if(first == '{') {
return "application/json";
}
// find a comma? for CSV?
}
return contentType;
}
public InputStream getStream() throws IOException { public InputStream getStream() throws IOException {
return new ByteArrayInputStream( str.getBytes(DEFAULT_CHARSET) ); return new ByteArrayInputStream( str.getBytes(DEFAULT_CHARSET) );
} }

View File

@ -45,10 +45,7 @@
<requestHandler name="lazy" class="solr.StandardRequestHandler" startup="lazy"> <requestHandler name="lazy" class="solr.StandardRequestHandler" startup="lazy">
</requestHandler> </requestHandler>
<requestHandler name="/update" class="solr.XmlUpdateRequestHandler"/> <requestHandler name="/update" class="solr.UpdateRequestHandler" />
<requestHandler name="/update/javabin" class="solr.BinaryUpdateRequestHandler"/>
<requestHandler name="/replication" class="solr.ReplicationHandler"> <requestHandler name="/replication" class="solr.ReplicationHandler">

View File

@ -32,7 +32,7 @@
</requestDispatcher> </requestDispatcher>
<requestHandler name="standard" class="solr.StandardRequestHandler" default="true" /> <requestHandler name="standard" class="solr.StandardRequestHandler" default="true" />
<requestHandler name="/update" class="solr.XmlUpdateRequestHandler" /> <requestHandler name="/update" class="solr.UpdateRequestHandler" />
<requestHandler name="/admin/" class="org.apache.solr.handler.admin.AdminHandlers" /> <requestHandler name="/admin/" class="org.apache.solr.handler.admin.AdminHandlers" />
<!-- config for the admin interface --> <!-- config for the admin interface -->

View File

@ -588,8 +588,8 @@ abstract public class SolrExampleTests extends SolrJettyTestBase
QueryResponse rsp = server.query( new SolrQuery( "*:*") ); QueryResponse rsp = server.query( new SolrQuery( "*:*") );
Assert.assertEquals( 0, rsp.getResults().getNumFound() ); Assert.assertEquals( 0, rsp.getResults().getNumFound() );
ContentStreamUpdateRequest up = new ContentStreamUpdateRequest("/update/csv"); ContentStreamUpdateRequest up = new ContentStreamUpdateRequest("/update");
up.addFile(getFile("solrj/books.csv")); up.addFile(getFile("solrj/books.csv"), "application/csv");
up.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true); up.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
NamedList<Object> result = server.request(up); NamedList<Object> result = server.request(up);
assertNotNull("Couldn't upload books.csv", result); assertNotNull("Couldn't upload books.csv", result);
@ -606,8 +606,8 @@ abstract public class SolrExampleTests extends SolrJettyTestBase
Assert.assertEquals( 0, rsp.getResults().getNumFound() ); Assert.assertEquals( 0, rsp.getResults().getNumFound() );
ContentStreamUpdateRequest up = new ContentStreamUpdateRequest("/update"); ContentStreamUpdateRequest up = new ContentStreamUpdateRequest("/update");
up.addFile(getFile("solrj/docs1.xml")); // 2 up.addFile(getFile("solrj/docs1.xml"),"application/xml"); // 2
up.addFile(getFile("solrj/docs2.xml")); // 3 up.addFile(getFile("solrj/docs2.xml"),"application/xml"); // 3
up.setParam("a", "\u1234"); up.setParam("a", "\u1234");
up.setParam(CommonParams.HEADER_ECHO_PARAMS, CommonParams.EchoParamStyle.ALL.toString()); up.setParam(CommonParams.HEADER_ECHO_PARAMS, CommonParams.EchoParamStyle.ALL.toString());
up.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true); up.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);

View File

@ -332,7 +332,7 @@ public class BasicHttpSolrServerTest extends SolrJettyTestBase {
assertEquals("javabin", DebugServlet.parameters.get(CommonParams.WT)[0]); assertEquals("javabin", DebugServlet.parameters.get(CommonParams.WT)[0]);
assertEquals(1, DebugServlet.parameters.get(CommonParams.VERSION).length); assertEquals(1, DebugServlet.parameters.get(CommonParams.VERSION).length);
assertEquals(server.getParser().getVersion(), DebugServlet.parameters.get(CommonParams.VERSION)[0]); assertEquals(server.getParser().getVersion(), DebugServlet.parameters.get(CommonParams.VERSION)[0]);
assertEquals("application/octet-stream", DebugServlet.headers.get("Content-Type")); assertEquals("application/javabin", DebugServlet.headers.get("Content-Type"));
assertEquals(1, DebugServlet.parameters.get("a").length); assertEquals(1, DebugServlet.parameters.get("a").length);
assertEquals("\u1234", DebugServlet.parameters.get("a")[0]); assertEquals("\u1234", DebugServlet.parameters.get("a")[0]);
} }

View File

@ -26,7 +26,7 @@ import org.apache.solr.core.SolrCore;
import org.apache.solr.core.CoreContainer; import org.apache.solr.core.CoreContainer;
import org.apache.solr.core.CoreDescriptor; import org.apache.solr.core.CoreDescriptor;
import org.apache.solr.core.SolrResourceLoader; import org.apache.solr.core.SolrResourceLoader;
import org.apache.solr.handler.XmlUpdateRequestHandler; import org.apache.solr.handler.UpdateRequestHandler;
import org.apache.solr.logging.ListenerConfig; import org.apache.solr.logging.ListenerConfig;
import org.apache.solr.logging.LogWatcher; import org.apache.solr.logging.LogWatcher;
import org.apache.solr.logging.jul.JulWatcher; import org.apache.solr.logging.jul.JulWatcher;
@ -74,7 +74,7 @@ public class TestHarness {
private SolrCore core; private SolrCore core;
private final ThreadLocal<DocumentBuilder> builderTL = new ThreadLocal<DocumentBuilder>(); private final ThreadLocal<DocumentBuilder> builderTL = new ThreadLocal<DocumentBuilder>();
private final ThreadLocal<XPath> xpathTL = new ThreadLocal<XPath>(); private final ThreadLocal<XPath> xpathTL = new ThreadLocal<XPath>();
public XmlUpdateRequestHandler updater; public UpdateRequestHandler updater;
public static SolrConfig createConfig(String confFile) { public static SolrConfig createConfig(String confFile) {
// set some system properties for use by tests // set some system properties for use by tests
@ -147,7 +147,7 @@ public class TestHarness {
if (core != null) if (core != null)
core.close(); core.close();
updater = new XmlUpdateRequestHandler(); updater = new UpdateRequestHandler();
updater.init( null ); updater.init( null );
} catch (Exception e) { } catch (Exception e) {
throw new RuntimeException(e); throw new RuntimeException(e);