SOLR-104 - Support for Update Plugins: giving RequestHandlers access to ContentStreams, new URL structure allowing for more RESTful URLs, common baseclass for RequestHandlers, refactoring existing XML based updates into a RequestHandler

git-svn-id: https://svn.apache.org/repos/asf/lucene/solr/trunk@501174 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Chris M. Hostetter 2007-01-29 20:33:21 +00:00
parent 7e877bc49f
commit 3c603b2aea
25 changed files with 1620 additions and 576 deletions

View File

@ -60,6 +60,14 @@ New Features
7. SOLR-107: JAVA API: Change NamedList to use Java5 generics 7. SOLR-107: JAVA API: Change NamedList to use Java5 generics
and implement Iterable<Map.Entry> (Ryan McKinley via yonik) and implement Iterable<Map.Entry> (Ryan McKinley via yonik)
8. SOLR-104: Support for "Update Plugins" -- RequestHandlers that want
access to streams of data for doing updates. ContentStreams can come
from the raw POST body, multi-part form data, or remote URLs.
Included in this change is a new SlrDispatchFilter that allows
RequestHandlers registered with names that begin with a "/" to be
accessed using a URL structure based on that name.
(Ryan McKinley via hossman)
Changes in runtime behavior Changes in runtime behavior
1. Highlighting using DisMax will only pick up terms from the main 1. Highlighting using DisMax will only pick up terms from the main
user query, not boost or filter queries (klaas). user query, not boost or filter queries (klaas).

View File

@ -222,6 +222,8 @@
</query> </query>
<!--Make sure your system has some authentication before enabling remote streaming! -->
<requestParsers enableRemoteStreaming="true" multipartUploadLimitInKB="2048" />
<!-- requestHandler plugins... incoming queries will be dispatched to the <!-- requestHandler plugins... incoming queries will be dispatched to the
correct handler based on the qt (query type) param matching the correct handler based on the qt (query type) param matching the
@ -338,6 +340,22 @@
</str> </str>
</requestHandler> </requestHandler>
<!-- Standard update plugin. If we put this on /update, it will get all the new goodness -->
<requestHandler name="/update/xml" class="solr.XmlUpdateRequestHandler" >
<!--
<lst name="defaults">
<str name="name">value</str>
</lst>
-->
</requestHandler>
<requestHandler name="/debug/dump" class="solr.DumpRequestHandler" />
<!-- NOTE, /update is mapped to a servlet, we can have the filter handle requests off that! -->
<requestHandler name="/update/commit" class="solr.CommitRequestHandler" />
<!-- queryResponseWriter plugins... query responses will be written using the <!-- queryResponseWriter plugins... query responses will be written using the
writer specified by the 'wt' request parameter matching the name of a registered writer specified by the 'wt' request parameter matching the name of a registered
writer. writer.

View File

@ -0,0 +1,2 @@
AnyObjectId[5058ac0a3df716610375d6401c835aea9c35540f] was removed in git history.
Apache SVN contains full history.

2
lib/commons-io-1.2.jar Normal file
View File

@ -0,0 +1,2 @@
AnyObjectId[b2867cdde4284228f2adc51e8a0358972bccaaf1] was removed in git history.
Apache SVN contains full history.

View File

@ -181,7 +181,7 @@ public class Config {
private static final String project = "solr"; private static final String project = "solr";
private static final String base = "org.apache" + "." + project; private static final String base = "org.apache" + "." + project;
private static final String[] packages = {"","analysis.","schema.","search.","update.","core.","request.","util."}; private static final String[] packages = {"","analysis.","schema.","search.","update.","core.","request.","handler.","util."};
public static Class findClass(String cname, String... subpackages) { public static Class findClass(String cname, String... subpackages) {
ClassLoader loader = getClassLoader(); ClassLoader loader = getClassLoader();

View File

@ -21,6 +21,7 @@ import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory; import org.apache.lucene.store.FSDirectory;
import org.apache.solr.handler.XmlUpdateRequestHandler;
import org.apache.solr.request.*; import org.apache.solr.request.*;
import org.apache.solr.schema.IndexSchema; import org.apache.solr.schema.IndexSchema;
import org.apache.solr.schema.SchemaField; import org.apache.solr.schema.SchemaField;
@ -41,7 +42,10 @@ import org.xmlpull.v1.XmlPullParserFactory;
import javax.xml.xpath.XPathConstants; import javax.xml.xpath.XPathConstants;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.io.PrintWriter;
import java.io.Reader; import java.io.Reader;
import java.io.StringReader;
import java.io.StringWriter;
import java.io.Writer; import java.io.Writer;
import java.util.*; import java.util.*;
import java.util.concurrent.Callable; import java.util.concurrent.Callable;
@ -241,6 +245,18 @@ public final class SolrCore {
void finalizer() { close(); } void finalizer() { close(); }
////////////////////////////////////////////////////////////////////////////////
// Update Handler
////////////////////////////////////////////////////////////////////////////////
/**
* RequestHandlers need access to the updateHandler so they can all talk to the
* same RAM indexer.
*/
public UpdateHandler getUpdateHandler()
{
return updateHandler;
}
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
// Searcher Control // Searcher Control
@ -589,14 +605,7 @@ public final class SolrCore {
} }
public void execute(SolrRequestHandler handler, SolrQueryRequest req, SolrQueryResponse rsp) {
public void execute(SolrQueryRequest req, SolrQueryResponse rsp) {
SolrRequestHandler handler = getRequestHandler(req.getQueryType());
if (handler==null) {
log.warning("Unknown Request Handler '" + req.getQueryType() +"' :" + req);
throw new SolrException(400,"Unknown Request Handler '" + req.getQueryType() + "'", true);
}
// setup response header and handle request // setup response header and handle request
final NamedList responseHeader = new NamedList(); final NamedList responseHeader = new NamedList();
rsp.add("responseHeader", responseHeader); rsp.add("responseHeader", responseHeader);
@ -607,6 +616,16 @@ public final class SolrCore {
(int)(rsp.getEndTime() - req.getStartTime())); (int)(rsp.getEndTime() - req.getStartTime()));
} }
@Deprecated
public void execute(SolrQueryRequest req, SolrQueryResponse rsp) {
SolrRequestHandler handler = getRequestHandler(req.getQueryType());
if (handler==null) {
log.warning("Unknown Request Handler '" + req.getQueryType() +"' :" + req);
throw new SolrException(400,"Unknown Request Handler '" + req.getQueryType() + "'", true);
}
execute(handler, req, rsp);
}
protected void setResponseHeaderValues(NamedList responseHeader,SolrQueryRequest req, SolrQueryResponse rsp) { protected void setResponseHeaderValues(NamedList responseHeader,SolrQueryRequest req, SolrQueryResponse rsp) {
// TODO should check that responseHeader has not been replaced by handler // TODO should check that responseHeader has not been replaced by handler
@ -625,321 +644,6 @@ public final class SolrCore {
} }
} }
XmlPullParserFactory factory;
{
try {
factory = XmlPullParserFactory.newInstance();
} catch (XmlPullParserException e) {
throw new RuntimeException(e);
}
factory.setNamespaceAware(false);
}
private int findNextTag(XmlPullParser xpp, String tag) throws XmlPullParserException, IOException {
int eventType;
while((eventType=xpp.next()) != XmlPullParser.END_DOCUMENT) {
if(eventType == XmlPullParser.START_TAG) {
if (tag.equals(xpp.getName())) break;
}
}
return eventType;
}
public void update(Reader reader, Writer writer) {
// TODO: add param to specify maximum time to commit?
// todo - might be nice to separate command parsing w/ a factory
// then new commands could be added w/o risk to old ones
XmlPullParser xpp = null;
try {
xpp = factory.newPullParser();
} catch (XmlPullParserException e) {
throw new RuntimeException(e);
}
long startTime=System.currentTimeMillis();
try {
xpp.setInput(reader);
xpp.nextTag();
String currTag = xpp.getName();
if ("add".equals(currTag)) {
log.finest("SolrCore.update(add)");
AddUpdateCommand cmd = new AddUpdateCommand();
cmd.allowDups=false; // the default
int status=0;
boolean pendingAttr=false, committedAttr=false;
int attrcount = xpp.getAttributeCount();
for (int i=0; i<attrcount; i++) {
String attrName = xpp.getAttributeName(i);
String attrVal = xpp.getAttributeValue(i);
if ("allowDups".equals(attrName)) {
cmd.allowDups = StrUtils.parseBoolean(attrVal);
} else if ("overwritePending".equals(attrName)) {
cmd.overwritePending = StrUtils.parseBoolean(attrVal);
pendingAttr=true;
} else if ("overwriteCommitted".equals(attrName)) {
cmd.overwriteCommitted = StrUtils.parseBoolean(attrVal);
committedAttr=true;
} else {
log.warning("Unknown attribute id in add:" + attrName);
}
}
//set defaults for committed and pending based on allowDups value
if (!pendingAttr) cmd.overwritePending=!cmd.allowDups;
if (!committedAttr) cmd.overwriteCommitted=!cmd.allowDups;
DocumentBuilder builder = new DocumentBuilder(schema);
SchemaField uniqueKeyField = schema.getUniqueKeyField();
int eventType=0;
// accumulate responses
List<String> added = new ArrayList<String>(10);
while(true) {
// this may be our second time through the loop in the case
// that there are multiple docs in the add... so make sure that
// objects can handle that.
cmd.indexedId = null; // reset the id for this add
if (eventType !=0) {
eventType=xpp.getEventType();
if (eventType==XmlPullParser.END_DOCUMENT) break;
}
// eventType = xpp.next();
eventType = xpp.nextTag();
if (eventType == XmlPullParser.END_TAG || eventType == XmlPullParser.END_DOCUMENT) break; // should match </add>
readDoc(builder,xpp);
builder.endDoc();
cmd.doc = builder.getDoc();
log.finest("adding doc...");
updateHandler.addDoc(cmd);
String docId = null;
if (uniqueKeyField!=null)
docId = schema.printableUniqueKey(cmd.doc);
added.add(docId);
} // end while
// write log and result
StringBuilder out = new StringBuilder();
for (String docId: added)
if(docId != null)
out.append(docId + ",");
String outMsg = out.toString();
if(outMsg.length() > 0)
outMsg = outMsg.substring(0, outMsg.length() - 1);
log.info("added id={" + outMsg + "} in " + (System.currentTimeMillis()-startTime) + "ms");
writer.write("<result status=\"0\"></result>");
} // end add
else if ("commit".equals(currTag) || "optimize".equals(currTag)) {
log.finest("parsing "+currTag);
try {
CommitUpdateCommand cmd = new CommitUpdateCommand("optimize".equals(currTag));
boolean sawWaitSearcher=false, sawWaitFlush=false;
int attrcount = xpp.getAttributeCount();
for (int i=0; i<attrcount; i++) {
String attrName = xpp.getAttributeName(i);
String attrVal = xpp.getAttributeValue(i);
if ("waitFlush".equals(attrName)) {
cmd.waitFlush = StrUtils.parseBoolean(attrVal);
sawWaitFlush=true;
} else if ("waitSearcher".equals(attrName)) {
cmd.waitSearcher = StrUtils.parseBoolean(attrVal);
sawWaitSearcher=true;
} else {
log.warning("unexpected attribute commit/@" + attrName);
}
}
// If waitFlush is specified and waitSearcher wasn't, then
// clear waitSearcher.
if (sawWaitFlush && !sawWaitSearcher) {
cmd.waitSearcher=false;
}
updateHandler.commit(cmd);
if ("optimize".equals(currTag)) {
log.info("optimize 0 "+(System.currentTimeMillis()-startTime));
}
else {
log.info("commit 0 "+(System.currentTimeMillis()-startTime));
}
while (true) {
int eventType = xpp.nextTag();
if (eventType == XmlPullParser.END_TAG) break; // match </commit>
}
writer.write("<result status=\"0\"></result>");
} catch (SolrException e) {
log(e);
if ("optimize".equals(currTag)) {
log.info("optimize "+e.code+" "+
(System.currentTimeMillis()-startTime));
}
else {
log.info("commit "+e.code+" "+
(System.currentTimeMillis()-startTime));
}
writeResult(writer,e);
} catch (Exception e) {
SolrException.log(log, "Exception during commit/optimize",e);
writeResult(writer,e);
}
} // end commit
else if ("delete".equals(currTag)) {
log.finest("parsing delete");
try {
DeleteUpdateCommand cmd = new DeleteUpdateCommand();
cmd.fromPending=true;
cmd.fromCommitted=true;
int attrcount = xpp.getAttributeCount();
for (int i=0; i<attrcount; i++) {
String attrName = xpp.getAttributeName(i);
String attrVal = xpp.getAttributeValue(i);
if ("fromPending".equals(attrName)) {
cmd.fromPending = StrUtils.parseBoolean(attrVal);
} else if ("fromCommitted".equals(attrName)) {
cmd.fromCommitted = StrUtils.parseBoolean(attrVal);
} else {
log.warning("unexpected attribute delete/@" + attrName);
}
}
int eventType = xpp.nextTag();
currTag = xpp.getName();
String val = xpp.nextText();
if ("id".equals(currTag)) {
cmd.id = val;
updateHandler.delete(cmd);
log.info("delete(id " + val + ") 0 " +
(System.currentTimeMillis()-startTime));
} else if ("query".equals(currTag)) {
cmd.query = val;
updateHandler.deleteByQuery(cmd);
log.info("deleteByQuery(query " + val + ") 0 " +
(System.currentTimeMillis()-startTime));
} else {
log.warning("unexpected XML tag /delete/"+currTag);
throw new SolrException(400,"unexpected XML tag /delete/"+currTag);
}
writer.write("<result status=\"0\"></result>");
while (xpp.nextTag() != XmlPullParser.END_TAG);
} catch (SolrException e) {
log(e);
log.info("delete "+e.code+" "+(System.currentTimeMillis()-startTime));
writeResult(writer,e);
} catch (Exception e) {
log(e);
writeResult(writer,e);
}
} // end delete
} catch (XmlPullParserException e) {
log(e);
writeResult(writer,e);
} catch (IOException e) {
log(e);
writeResult(writer,e);
} catch (SolrException e) {
log(e);
log.info("update "+e.code+" "+(System.currentTimeMillis()-startTime));
writeResult(writer,e);
} catch (Throwable e) {
log(e);
writeResult(writer,e);
}
}
private void readDoc(DocumentBuilder builder, XmlPullParser xpp) throws IOException, XmlPullParserException {
// xpp should be at <doc> at this point
builder.startDoc();
int attrcount = xpp.getAttributeCount();
float docBoost = 1.0f;
for (int i=0; i<attrcount; i++) {
String attrName = xpp.getAttributeName(i);
String attrVal = xpp.getAttributeValue(i);
if ("boost".equals(attrName)) {
docBoost = Float.parseFloat(attrVal);
builder.setBoost(docBoost);
} else {
log.warning("Unknown attribute doc/@" + attrName);
}
}
if (docBoost != 1.0f) builder.setBoost(docBoost);
// while (findNextTag(xpp,"field") != XmlPullParser.END_DOCUMENT) {
while(true) {
int eventType = xpp.nextTag();
if (eventType == XmlPullParser.END_TAG) break; // </doc>
String tname=xpp.getName();
// System.out.println("FIELD READER AT TAG " + tname);
if (!"field".equals(tname)) {
log.warning("unexpected XML tag doc/"+tname);
throw new SolrException(400,"unexpected XML tag doc/"+tname);
}
//
// get field name and parse field attributes
//
attrcount = xpp.getAttributeCount();
String name=null;
float boost=1.0f;
boolean isNull=false;
for (int i=0; i<attrcount; i++) {
String attrName = xpp.getAttributeName(i);
String attrVal = xpp.getAttributeValue(i);
if ("name".equals(attrName)) {
name=attrVal;
} else if ("boost".equals(attrName)) {
boost=Float.parseFloat(attrVal);
} else if ("null".equals(attrName)) {
isNull=StrUtils.parseBoolean(attrVal);
} else {
log.warning("Unknown attribute doc/field/@" + attrName);
}
}
// now get the field value
String val = xpp.nextText(); // todo... text event for <field></field>???
// need this line for isNull???
// Don't add fields marked as null (for now at least)
if (!isNull) {
if (boost != 1.0f) {
builder.addField(name,val,boost);
} else {
builder.addField(name,val);
}
}
// do I have to do a nextTag here to read the end_tag?
} // end field loop
}
final public static void log(Throwable e) { final public static void log(Throwable e) {
@ -947,23 +651,6 @@ public final class SolrCore {
} }
final static void writeResult(Writer out, SolrException e) {
try {
XML.writeXML(out,"result",e.getMessage(),"status",e.code());
} catch (Exception ee) {
log.severe("Error writing to putput stream: "+ee);
}
}
final static void writeResult(Writer out, Throwable e) {
try {
XML.writeXML(out,"result",SolrException.toStr(e),"status","1");
} catch (Exception ee) {
log.severe("Error writing to putput stream: "+ee);
}
}
private QueryResponseWriter defaultResponseWriter; private QueryResponseWriter defaultResponseWriter;
private final Map<String, QueryResponseWriter> responseWriters = new HashMap<String, QueryResponseWriter>(); private final Map<String, QueryResponseWriter> responseWriters = new HashMap<String, QueryResponseWriter>();
@ -1034,3 +721,4 @@ public final class SolrCore {

View File

@ -0,0 +1,71 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.handler;
import java.io.IOException;
import org.apache.solr.core.SolrCore;
import org.apache.solr.request.SolrParams;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.request.SolrQueryResponse;
import org.apache.solr.update.CommitUpdateCommand;
public class CommitRequestHandler extends RequestHandlerBase
{
@Override
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws IOException
{
SolrParams params = req.getParams();
boolean optimize = params.getBool( UpdateParams.OPTIMIZE, false );
CommitUpdateCommand cmd = new CommitUpdateCommand( optimize );
cmd.waitFlush = params.getBool( UpdateParams.WAIT_FLUSH, cmd.waitFlush );
cmd.waitSearcher = params.getBool( UpdateParams.WAIT_SEARCHER, cmd.waitSearcher );
SolrCore.getSolrCore().getUpdateHandler().commit( cmd );
if( optimize ) {
rsp.add( "optimize", "true" );
}
else {
rsp.add( "commit", "true" );
}
}
//////////////////////// SolrInfoMBeans methods //////////////////////
@Override
public String getDescription() {
return "Commit all pending documents";
}
@Override
public String getVersion() {
return "$Revision:$";
}
@Override
public String getSourceId() {
return "$Id:$";
}
@Override
public String getSource() {
return "$URL:$";
}
}

View File

@ -0,0 +1,85 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.handler;
import java.io.IOException;
import java.util.Map;
import org.apache.commons.io.IOUtils;
import org.apache.solr.request.ContentStream;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.request.SolrQueryResponse;
import org.apache.solr.util.NamedList;
public class DumpRequestHandler extends RequestHandlerBase
{
@Override
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws IOException
{
// Show params
rsp.add( "params", req.getParams().toNamedList() );
// Write the streams...
if( req.getContentStreams() != null ) {
NamedList<Object> streams = new NamedList<Object>();
// Cycle through each stream
for( ContentStream content : req.getContentStreams() ) {
NamedList<Object> stream = new NamedList<Object>();
stream.add( "name", content.getName() );
stream.add( "fieldName", content.getSourceInfo() );
stream.add( "size", content.getSize() );
stream.add( "contentType", content.getContentType() );
stream.add( "stream", IOUtils.toString( content.getStream() ) );
streams.add( "stream", stream );
}
rsp.add( "streams", streams );
}
// Show the context
Map<Object,Object> context = req.getContext();
if( context != null ) {
NamedList ctx = new NamedList();
for( Map.Entry<Object,Object> entry : context.entrySet() ) {
ctx.add( entry.getKey().toString(), entry.getValue() );
}
rsp.add( "context", ctx );
}
}
//////////////////////// SolrInfoMBeans methods //////////////////////
@Override
public String getDescription() {
return "Dump handler (debug)";
}
@Override
public String getVersion() {
return "$Revision:$";
}
@Override
public String getSourceId() {
return "$Id:$";
}
@Override
public String getSource() {
return "$URL:$";
}
}

View File

@ -0,0 +1,113 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.handler;
import java.net.URL;
import org.apache.solr.core.SolrException;
import org.apache.solr.core.SolrInfoMBean;
import org.apache.solr.request.SolrParams;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.request.SolrQueryResponse;
import org.apache.solr.request.SolrRequestHandler;
import org.apache.solr.util.NamedList;
import org.apache.solr.util.SolrPluginUtils;
/**
*
*/
public abstract class RequestHandlerBase implements SolrRequestHandler, SolrInfoMBean {
// statistics
// TODO: should we bother synchronizing these, or is an off-by-one error
// acceptable every million requests or so?
long numRequests;
long numErrors;
protected SolrParams defaults;
protected SolrParams appends;
protected SolrParams invariants;
/** shorten the class references for utilities */
private static class U extends SolrPluginUtils {
/* :NOOP */
}
public void init(NamedList args) {
// Copied from StandardRequestHandler
if( args != null ) {
Object o = args.get("defaults");
if (o != null && o instanceof NamedList) {
defaults = SolrParams.toSolrParams((NamedList)o);
}
o = args.get("appends");
if (o != null && o instanceof NamedList) {
appends = SolrParams.toSolrParams((NamedList)o);
}
o = args.get("invariants");
if (o != null && o instanceof NamedList) {
invariants = SolrParams.toSolrParams((NamedList)o);
}
}
}
public abstract void handleRequestBody( SolrQueryRequest req, SolrQueryResponse rsp ) throws Exception;
public void handleRequest(SolrQueryRequest req, SolrQueryResponse rsp) {
numRequests++;
try {
U.setDefaults(req,defaults,appends,invariants);
handleRequestBody( req, rsp );
}
catch( SolrException se ) {
numErrors++;
throw se;
}
catch( Exception e) {
numErrors++;
}
}
//////////////////////// SolrInfoMBeans methods //////////////////////
public String getName() {
return this.getClass().getName();
}
public abstract String getDescription();
public abstract String getSourceId();
public abstract String getSource();
public abstract String getVersion();
public Category getCategory() {
return Category.QUERYHANDLER;
}
public URL[] getDocs() {
return null; // this can be overridden, but not required
}
public NamedList getStatistics() {
NamedList lst = new NamedList();
lst.add("requests", numRequests);
lst.add("errors", numErrors);
return lst;
}
}

View File

@ -0,0 +1,39 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.handler;
/**
* A collection of params used by Update handlers
*/
public interface UpdateParams
{
/** wait till the command has flushed */
public static String WAIT_FLUSH = "waitFlush";
/** wait for the search to warm up */
public static String WAIT_SEARCHER = "waitSearcher";
/** overwrite indexing fields */
public static String OVERWRITE = "overwrite";
/** Commit everything after the command completes */
public static String COMMIT = "commit";
/** Commit everything after the command completes */
public static String OPTIMIZE = "optimize";
}

View File

@ -0,0 +1,399 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.handler;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.Writer;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Logger;
import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrException;
import org.apache.solr.request.ContentStream;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.request.SolrQueryResponse;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.schema.SchemaField;
import org.apache.solr.servlet.SolrServlet;
import org.apache.solr.update.AddUpdateCommand;
import org.apache.solr.update.CommitUpdateCommand;
import org.apache.solr.update.DeleteUpdateCommand;
import org.apache.solr.update.DocumentBuilder;
import org.apache.solr.update.UpdateHandler;
import org.apache.solr.util.NamedList;
import org.apache.solr.util.StrUtils;
import org.apache.solr.util.XML;
import org.xmlpull.v1.XmlPullParser;
import org.xmlpull.v1.XmlPullParserException;
import org.xmlpull.v1.XmlPullParserFactory;
public class XmlUpdateRequestHandler extends RequestHandlerBase
{
public static Logger log = Logger.getLogger(XmlUpdateRequestHandler.class.getName());
private XmlPullParserFactory factory;
// This must be called AFTER solrCore has initalized!
// otherwise you get a big bad error loop
public void init(NamedList args)
{
super.init( args );
try {
factory = XmlPullParserFactory.newInstance();
factory.setNamespaceAware(false);
}
catch (XmlPullParserException e) {
throw new RuntimeException(e);
}
}
// TODO - this should be a general utility in another class
public static String getCharsetFromContentType( String contentType )
{
if( contentType != null ) {
int idx = contentType.toLowerCase().indexOf( "charset=" );
if( idx > 0 ) {
return contentType.substring( idx + "charset=".length() ).trim();
}
}
return null;
}
@Override
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception
{
Iterable<ContentStream> streams = req.getContentStreams();
if( streams == null ) {
throw new SolrException( 400, "missing content stream" );
}
// Cycle through each stream
for( ContentStream stream : req.getContentStreams() ) {
String charset = getCharsetFromContentType( stream.getContentType() );
Reader reader = null;
if( charset == null ) {
reader = new InputStreamReader( stream.getStream() );
}
else {
reader = new InputStreamReader( stream.getStream(), charset );
}
rsp.add( "update", this.update( reader ) );
// Make sure its closed
try { reader.close(); } catch( Exception ex ){}
}
}
public NamedList update( Reader reader ) throws Exception
{
SolrCore core = SolrCore.getSolrCore();
IndexSchema schema = core.getSchema();
UpdateHandler updateHandler = core.getUpdateHandler();
// TODO: What results should be returned?
NamedList res = new NamedList();
XmlPullParser xpp = factory.newPullParser();
long startTime=System.currentTimeMillis();
xpp.setInput(reader);
xpp.nextTag();
String currTag = xpp.getName();
if ("add".equals(currTag)) {
log.finest("SolrCore.update(add)");
AddUpdateCommand cmd = new AddUpdateCommand();
cmd.allowDups=false; // the default
int status=0;
boolean pendingAttr=false, committedAttr=false;
int attrcount = xpp.getAttributeCount();
for (int i=0; i<attrcount; i++) {
String attrName = xpp.getAttributeName(i);
String attrVal = xpp.getAttributeValue(i);
if ("allowDups".equals(attrName)) {
cmd.allowDups = StrUtils.parseBoolean(attrVal);
} else if ("overwritePending".equals(attrName)) {
cmd.overwritePending = StrUtils.parseBoolean(attrVal);
pendingAttr=true;
} else if ("overwriteCommitted".equals(attrName)) {
cmd.overwriteCommitted = StrUtils.parseBoolean(attrVal);
committedAttr=true;
} else {
log.warning("Unknown attribute id in add:" + attrName);
}
}
//set defaults for committed and pending based on allowDups value
if (!pendingAttr) cmd.overwritePending=!cmd.allowDups;
if (!committedAttr) cmd.overwriteCommitted=!cmd.allowDups;
DocumentBuilder builder = new DocumentBuilder(schema);
SchemaField uniqueKeyField = schema.getUniqueKeyField();
int eventType=0;
// accumulate responses
List<String> added = new ArrayList<String>(10);
while(true) {
// this may be our second time through the loop in the case
// that there are multiple docs in the add... so make sure that
// objects can handle that.
cmd.indexedId = null; // reset the id for this add
if (eventType !=0) {
eventType=xpp.getEventType();
if (eventType==XmlPullParser.END_DOCUMENT) break;
}
// eventType = xpp.next();
eventType = xpp.nextTag();
if (eventType == XmlPullParser.END_TAG || eventType == XmlPullParser.END_DOCUMENT) break; // should match </add>
readDoc(builder,xpp);
builder.endDoc();
cmd.doc = builder.getDoc();
log.finest("adding doc...");
updateHandler.addDoc(cmd);
String docId = null;
if (uniqueKeyField!=null)
docId = schema.printableUniqueKey(cmd.doc);
added.add(docId);
} // end while
// write log and result
StringBuilder out = new StringBuilder();
for (String docId: added)
if(docId != null)
out.append(docId + ",");
String outMsg = out.toString();
if(outMsg.length() > 0)
outMsg = outMsg.substring(0, outMsg.length() - 1);
log.info("added id={" + outMsg + "} in " + (System.currentTimeMillis()-startTime) + "ms");
// Add output
res.add( "added", outMsg );
} // end add
else if ("commit".equals(currTag) || "optimize".equals(currTag)) {
log.finest("parsing "+currTag);
CommitUpdateCommand cmd = new CommitUpdateCommand("optimize".equals(currTag));
boolean sawWaitSearcher=false, sawWaitFlush=false;
int attrcount = xpp.getAttributeCount();
for (int i=0; i<attrcount; i++) {
String attrName = xpp.getAttributeName(i);
String attrVal = xpp.getAttributeValue(i);
if ("waitFlush".equals(attrName)) {
cmd.waitFlush = StrUtils.parseBoolean(attrVal);
sawWaitFlush=true;
} else if ("waitSearcher".equals(attrName)) {
cmd.waitSearcher = StrUtils.parseBoolean(attrVal);
sawWaitSearcher=true;
} else {
log.warning("unexpected attribute commit/@" + attrName);
}
}
// If waitFlush is specified and waitSearcher wasn't, then
// clear waitSearcher.
if (sawWaitFlush && !sawWaitSearcher) {
cmd.waitSearcher=false;
}
updateHandler.commit(cmd);
if ("optimize".equals(currTag)) {
log.info("optimize 0 "+(System.currentTimeMillis()-startTime));
}
else {
log.info("commit 0 "+(System.currentTimeMillis()-startTime));
}
while (true) {
int eventType = xpp.nextTag();
if (eventType == XmlPullParser.END_TAG) break; // match </commit>
}
// add debug output
res.add( cmd.optimize?"optimize":"commit", "" );
} // end commit
else if ("delete".equals(currTag)) {
log.finest("parsing delete");
DeleteUpdateCommand cmd = new DeleteUpdateCommand();
cmd.fromPending=true;
cmd.fromCommitted=true;
int attrcount = xpp.getAttributeCount();
for (int i=0; i<attrcount; i++) {
String attrName = xpp.getAttributeName(i);
String attrVal = xpp.getAttributeValue(i);
if ("fromPending".equals(attrName)) {
cmd.fromPending = StrUtils.parseBoolean(attrVal);
} else if ("fromCommitted".equals(attrName)) {
cmd.fromCommitted = StrUtils.parseBoolean(attrVal);
} else {
log.warning("unexpected attribute delete/@" + attrName);
}
}
int eventType = xpp.nextTag();
currTag = xpp.getName();
String val = xpp.nextText();
if ("id".equals(currTag)) {
cmd.id = val;
updateHandler.delete(cmd);
log.info("delete(id " + val + ") 0 " +
(System.currentTimeMillis()-startTime));
} else if ("query".equals(currTag)) {
cmd.query = val;
updateHandler.deleteByQuery(cmd);
log.info("deleteByQuery(query " + val + ") 0 " +
(System.currentTimeMillis()-startTime));
} else {
log.warning("unexpected XML tag /delete/"+currTag);
throw new SolrException(400,"unexpected XML tag /delete/"+currTag);
}
res.add( "delete", "" );
while (xpp.nextTag() != XmlPullParser.END_TAG);
} // end delete
return res;
}
private void readDoc(DocumentBuilder builder, XmlPullParser xpp) throws IOException, XmlPullParserException {
// xpp should be at <doc> at this point
builder.startDoc();
int attrcount = xpp.getAttributeCount();
float docBoost = 1.0f;
for (int i=0; i<attrcount; i++) {
String attrName = xpp.getAttributeName(i);
String attrVal = xpp.getAttributeValue(i);
if ("boost".equals(attrName)) {
docBoost = Float.parseFloat(attrVal);
builder.setBoost(docBoost);
} else {
log.warning("Unknown attribute doc/@" + attrName);
}
}
if (docBoost != 1.0f) builder.setBoost(docBoost);
// while (findNextTag(xpp,"field") != XmlPullParser.END_DOCUMENT) {
while(true) {
int eventType = xpp.nextTag();
if (eventType == XmlPullParser.END_TAG) break; // </doc>
String tname=xpp.getName();
// System.out.println("FIELD READER AT TAG " + tname);
if (!"field".equals(tname)) {
log.warning("unexpected XML tag doc/"+tname);
throw new SolrException(400,"unexpected XML tag doc/"+tname);
}
//
// get field name and parse field attributes
//
attrcount = xpp.getAttributeCount();
String name=null;
float boost=1.0f;
boolean isNull=false;
for (int i=0; i<attrcount; i++) {
String attrName = xpp.getAttributeName(i);
String attrVal = xpp.getAttributeValue(i);
if ("name".equals(attrName)) {
name=attrVal;
} else if ("boost".equals(attrName)) {
boost=Float.parseFloat(attrVal);
} else if ("null".equals(attrName)) {
isNull=StrUtils.parseBoolean(attrVal);
} else {
log.warning("Unknown attribute doc/field/@" + attrName);
}
}
// now get the field value
String val = xpp.nextText(); // todo... text event for <field></field>???
// need this line for isNull???
// Don't add fields marked as null (for now at least)
if (!isNull) {
if (boost != 1.0f) {
builder.addField(name,val,boost);
} else {
builder.addField(name,val);
}
}
// do I have to do a nextTag here to read the end_tag?
} // end field loop
}
/**
* A Convinince method for getting back a simple XML string indicating
* successs of failure from an XML formated Update (from the Reader)
*/
public void doLegacyUpdate(Reader input, Writer output) {
try {
NamedList ignored = this.update( input );
output.write("<result status=\"0\"></result>");
}
catch( Exception ex ) {
try {
XML.writeXML(output,"result",SolrException.toStr(ex),"status","1");
} catch (Exception ee) {
log.severe("Error writing to output stream: "+ee);
}
}
}
//////////////////////// SolrInfoMBeans methods //////////////////////
@Override
public String getDescription() {
return "Add documents with XML";
}
@Override
public String getVersion() {
return "$Revision:$";
}
@Override
public String getSourceId() {
return "$Id:$";
}
@Override
public String getSource() {
return "$URL:$";
}
}

View File

@ -0,0 +1,29 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.request;
import java.io.IOException;
import java.io.InputStream;
public interface ContentStream {
String getName();
String getSourceInfo();
String getContentType();
Long getSize(); // size if we know it, otherwise null
InputStream getStream() throws IOException;
}

View File

@ -17,41 +17,34 @@
package org.apache.solr.request; package org.apache.solr.request;
import org.apache.solr.core.SolrCore; import static org.apache.solr.request.SolrParams.FACET;
import org.apache.solr.core.SolrInfoMBean; import static org.apache.solr.request.SolrParams.FQ;
import org.apache.solr.core.SolrException; import static org.apache.solr.request.SolrParams.Q;
import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.search.DocList;
import org.apache.solr.search.DocSet;
import org.apache.solr.search.DocListAndSet;
import org.apache.solr.search.SolrQueryParser;
import org.apache.solr.search.QueryParsing;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.request.SolrQueryResponse;
import org.apache.solr.request.SolrRequestHandler;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.util.NamedList;
import org.apache.solr.util.HighlightingUtils;
import org.apache.solr.util.SolrPluginUtils;
import org.apache.solr.util.DisMaxParams;
import static org.apache.solr.request.SolrParams.*;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.queryParser.QueryParser;
/* this is the standard logging framework for Solr */
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.net.URL;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrException;
import org.apache.solr.handler.RequestHandlerBase;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.search.DocListAndSet;
import org.apache.solr.search.DocSet;
import org.apache.solr.search.QueryParsing;
import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.search.SolrQueryParser;
import org.apache.solr.util.DisMaxParams;
import org.apache.solr.util.HighlightingUtils;
import org.apache.solr.util.NamedList;
import org.apache.solr.util.SolrPluginUtils;
/** /**
* <p> * <p>
@ -122,9 +115,7 @@ import java.net.URL;
* :TODO: make bf,pf,qf multival params now that SolrParams supports them * :TODO: make bf,pf,qf multival params now that SolrParams supports them
* </pre> * </pre>
*/ */
public class DisMaxRequestHandler public class DisMaxRequestHandler extends RequestHandlerBase {
implements SolrRequestHandler, SolrInfoMBean {
/** /**
* A field we can't ever find in any schema, so we can safely tell * A field we can't ever find in any schema, so we can safely tell
@ -133,15 +124,6 @@ public class DisMaxRequestHandler
*/ */
private static String IMPOSSIBLE_FIELD_NAME = "\uFFFC\uFFFC\uFFFC"; private static String IMPOSSIBLE_FIELD_NAME = "\uFFFC\uFFFC\uFFFC";
// statistics
// TODO: should we bother synchronizing these, or is an off-by-one error
// acceptable every million requests or so?
long numRequests;
long numErrors;
SolrParams defaults;
SolrParams appends;
SolrParams invariants;
/** shorten the class references for utilities */ /** shorten the class references for utilities */
private static class U extends SolrPluginUtils { private static class U extends SolrPluginUtils {
@ -156,74 +138,24 @@ public class DisMaxRequestHandler
super(); super();
} }
/* returns URLs to the Wiki pages */
public URL[] getDocs() {
/* :TODO: need docs */
return new URL[0];
}
public String getName() {
return this.getClass().getName();
}
public NamedList getStatistics() {
NamedList lst = new NamedList();
lst.add("requests", numRequests);
lst.add("errors", numErrors);
return lst;
}
public String getVersion() {
return "$Revision:$";
}
public String getDescription() {
return "DisjunctionMax Request Handler: Does relevancy based queries "
+ "accross a variety of fields using configured boosts";
}
public Category getCategory() {
return Category.QUERYHANDLER;
}
public String getSourceId() {
return "$Id:$";
}
public String getSource() {
return "$URL:$";
}
/** sets the default variables for any usefull info it finds in the config /** sets the default variables for any usefull info it finds in the config
* if a config option is not inthe format expected, logs an warning * if a config option is not inthe format expected, logs an warning
* and ignores it.. * and ignores it..
*/ */
public void init(NamedList args) { public void init(NamedList args) {
// Handle an old format
if (-1 == args.indexOf("defaults",0)) { if (-1 == args.indexOf("defaults",0)) {
// no explict defaults list, use all args implicitly // no explict defaults list, use all args implicitly
// indexOf so "<null name="defaults"/> is valid indicator of no defaults // indexOf so "<null name="defaults"/> is valid indicator of no defaults
defaults = SolrParams.toSolrParams(args); defaults = SolrParams.toSolrParams(args);
} else { } else {
Object o = args.get("defaults"); // otherwise use the new one.
if (o != null && o instanceof NamedList) { super.init( args );
defaults = SolrParams.toSolrParams((NamedList)o);
}
o = args.get("appends");
if (o != null && o instanceof NamedList) {
appends = SolrParams.toSolrParams((NamedList)o);
}
o = args.get("invariants");
if (o != null && o instanceof NamedList) {
invariants = SolrParams.toSolrParams((NamedList)o);
}
} }
} }
public void handleRequest(SolrQueryRequest req, SolrQueryResponse rsp) { public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception
numRequests++; {
try {
U.setDefaults(req,defaults,appends,invariants);
SolrParams params = req.getParams(); SolrParams params = req.getParams();
int flags = 0; int flags = 0;
@ -385,12 +317,6 @@ public class DisMaxRequestHandler
if(sumData != null) if(sumData != null)
rsp.add("highlighting", sumData); rsp.add("highlighting", sumData);
} }
} catch (Exception e) {
SolrException.log(SolrCore.log,e);
rsp.setException(e);
numErrors++;
}
} }
/** /**
@ -411,4 +337,34 @@ public class DisMaxRequestHandler
} }
//////////////////////// SolrInfoMBeans methods //////////////////////
@Override
public String getDescription() {
return "DisjunctionMax Request Handler: Does relevancy based queries "
+ "accross a variety of fields using configured boosts";
}
@Override
public String getVersion() {
return "$Revision:$";
}
@Override
public String getSourceId() {
return "$Id:$";
}
@Override
public String getSource() {
return "$URL:$";
}
@Override
public URL[] getDocs() {
try {
return new URL[] { new URL("http://wiki.apache.org/solr/DisMaxRequestHandler") };
}
catch( MalformedURLException ex ) { return null; }
}
} }

View File

@ -127,6 +127,13 @@ public abstract class SolrParams {
*/ */
public static final String FACET_PREFIX = "facet.prefix"; public static final String FACET_PREFIX = "facet.prefix";
/** If the content stream should come from a URL */
public static final String STREAM_URL = "stream.url";
/** If the content stream should come directly from a field */
public static final String STREAM_BODY = "stream.body";
/** returns the String value of a param, or null if not set */ /** returns the String value of a param, or null if not set */
public abstract String get(String param); public abstract String get(String param);

View File

@ -40,6 +40,10 @@ public interface SolrQueryRequest {
*/ */
public void setParams(SolrParams params); public void setParams(SolrParams params);
/** A Collection of ContentStreams passed to the request
*/
public Iterable<ContentStream> getContentStreams();
/** Returns the original request parameters. As this /** Returns the original request parameters. As this
* does not normally include configured defaults * does not normally include configured defaults
* it's more suitable for logging. * it's more suitable for logging.
@ -117,3 +121,5 @@ public interface SolrQueryRequest {
******/ ******/
} }

View File

@ -23,6 +23,7 @@ import org.apache.solr.schema.IndexSchema;
import org.apache.solr.core.SolrCore; import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrException; import org.apache.solr.core.SolrException;
import java.util.Iterator;
import java.util.Map; import java.util.Map;
import java.util.HashMap; import java.util.HashMap;
@ -56,6 +57,7 @@ public abstract class SolrQueryRequestBase implements SolrQueryRequest {
protected final SolrParams origParams; protected final SolrParams origParams;
protected SolrParams params; protected SolrParams params;
protected Map<Object,Object> context; protected Map<Object,Object> context;
protected Iterable<ContentStream> streams;
public SolrQueryRequestBase(SolrCore core, SolrParams params) { public SolrQueryRequestBase(SolrCore core, SolrParams params) {
this.core = core; this.core = core;
@ -115,15 +117,18 @@ public abstract class SolrQueryRequestBase implements SolrQueryRequest {
return s==null ? defval : s; return s==null ? defval : s;
} }
@Deprecated
public String getQueryString() { public String getQueryString() {
return params.get(SolrParams.Q); return params.get(SolrParams.Q);
} }
@Deprecated
public String getQueryType() { public String getQueryType() {
return params.get(SolrParams.QT); return params.get(SolrParams.QT);
} }
// starting position in matches to return to client // starting position in matches to return to client
@Deprecated
public int getStart() { public int getStart() {
return params.getInt(SolrParams.START, 0); return params.getInt(SolrParams.START, 0);
} }
@ -175,6 +180,15 @@ public abstract class SolrQueryRequestBase implements SolrQueryRequest {
} }
} }
/** A Collection of ContentStreams passed to the request
*/
public Iterable<ContentStream> getContentStreams() {
return streams;
}
public void setContentStreams( Iterable<ContentStream> s ) {
streams = s;
}
public String getParamString() { public String getParamString() {
return origParams.toString(); return origParams.toString();

View File

@ -19,9 +19,10 @@ package org.apache.solr.request;
import org.apache.lucene.search.*; import org.apache.lucene.search.*;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.net.URL;
import org.apache.solr.util.StrUtils; import org.apache.solr.util.StrUtils;
import org.apache.solr.util.NamedList; import org.apache.solr.util.NamedList;
@ -29,8 +30,11 @@ import org.apache.solr.util.HighlightingUtils;
import org.apache.solr.util.SolrPluginUtils; import org.apache.solr.util.SolrPluginUtils;
import org.apache.solr.search.*; import org.apache.solr.search.*;
import org.apache.solr.core.SolrCore; import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrInfoMBean;
import org.apache.solr.core.SolrException; import org.apache.solr.core.SolrException;
import org.apache.solr.handler.RequestHandlerBase;
import com.sun.org.apache.xerces.internal.util.URI.MalformedURIException;
import static org.apache.solr.request.SolrParams.*; import static org.apache.solr.request.SolrParams.*;
/** /**
@ -53,43 +57,17 @@ import static org.apache.solr.request.SolrParams.*;
* </ul> * </ul>
* *
*/ */
public class StandardRequestHandler implements SolrRequestHandler, SolrInfoMBean { public class StandardRequestHandler extends RequestHandlerBase {
// statistics
// TODO: should we bother synchronizing these, or is an off-by-one error
// acceptable every million requests or so?
long numRequests;
long numErrors;
SolrParams defaults;
SolrParams appends;
SolrParams invariants;
/** shorten the class references for utilities */ /** shorten the class references for utilities */
private static class U extends SolrPluginUtils { private static class U extends SolrPluginUtils {
/* :NOOP */ /* :NOOP */
} }
public void init(NamedList args) {
Object o = args.get("defaults");
if (o != null && o instanceof NamedList) {
defaults = SolrParams.toSolrParams((NamedList)o);
}
o = args.get("appends");
if (o != null && o instanceof NamedList) {
appends = SolrParams.toSolrParams((NamedList)o);
}
o = args.get("invariants");
if (o != null && o instanceof NamedList) {
invariants = SolrParams.toSolrParams((NamedList)o);
}
} public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception
{
public void handleRequest(SolrQueryRequest req, SolrQueryResponse rsp) {
numRequests++;
try {
U.setDefaults(req,defaults,appends,invariants);
SolrParams p = req.getParams(); SolrParams p = req.getParams();
String sreq = p.get(Q); String sreq = p.get(Q);
@ -165,17 +143,6 @@ public class StandardRequestHandler implements SolrRequestHandler, SolrInfoMBean
results.docList, query, req, new String[]{defaultField}); results.docList, query, req, new String[]{defaultField});
if(sumData != null) if(sumData != null)
rsp.add("highlighting", sumData); rsp.add("highlighting", sumData);
} catch (SolrException e) {
rsp.setException(e);
numErrors++;
return;
} catch (Exception e) {
SolrException.log(SolrCore.log,e);
rsp.setException(e);
numErrors++;
return;
}
} }
/** /**
@ -199,23 +166,14 @@ public class StandardRequestHandler implements SolrRequestHandler, SolrInfoMBean
//////////////////////// SolrInfoMBeans methods ////////////////////// //////////////////////// SolrInfoMBeans methods //////////////////////
public String getName() {
return StandardRequestHandler.class.getName();
}
public String getVersion() { public String getVersion() {
return SolrCore.version; return "$Revision$";
} }
public String getDescription() { public String getDescription() {
return "The standard Solr request handler"; return "The standard Solr request handler";
} }
public Category getCategory() {
return Category.QUERYHANDLER;
}
public String getSourceId() { public String getSourceId() {
return "$Id$"; return "$Id$";
} }
@ -225,14 +183,10 @@ public class StandardRequestHandler implements SolrRequestHandler, SolrInfoMBean
} }
public URL[] getDocs() { public URL[] getDocs() {
return null; try {
} return new URL[] { new URL("http://wiki.apache.org/solr/StandardRequestHandler") };
}
public NamedList getStatistics() { catch( MalformedURLException ex ) { return null; }
NamedList lst = new NamedList();
lst.add("requests", numRequests);
lst.add("errors", numErrors);
return lst;
} }
} }

View File

@ -19,6 +19,7 @@ package org.apache.solr.util;
import org.apache.solr.core.SolrConfig; import org.apache.solr.core.SolrConfig;
import org.apache.solr.core.SolrCore; import org.apache.solr.core.SolrCore;
import org.apache.solr.handler.XmlUpdateRequestHandler;
import org.apache.solr.request.LocalSolrQueryRequest; import org.apache.solr.request.LocalSolrQueryRequest;
import org.apache.solr.request.QueryResponseWriter; import org.apache.solr.request.QueryResponseWriter;
import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.request.SolrQueryRequest;
@ -36,9 +37,12 @@ import javax.xml.xpath.XPathFactory;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.io.IOException; import java.io.IOException;
import java.io.PrintWriter;
import java.io.Reader;
import java.io.StringReader; import java.io.StringReader;
import java.io.StringWriter; import java.io.StringWriter;
import java.io.UnsupportedEncodingException; import java.io.UnsupportedEncodingException;
import java.io.Writer;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
@ -62,6 +66,7 @@ public class TestHarness {
private SolrCore core; private SolrCore core;
private XPath xpath = XPathFactory.newInstance().newXPath(); private XPath xpath = XPathFactory.newInstance().newXPath();
private DocumentBuilder builder; private DocumentBuilder builder;
XmlUpdateRequestHandler updater;
/** /**
* Assumes "solrconfig.xml" is the config file to use, and * Assumes "solrconfig.xml" is the config file to use, and
@ -93,26 +98,34 @@ public class TestHarness {
SolrConfig.initConfig(confFile); SolrConfig.initConfig(confFile);
core = new SolrCore(dataDirectory, new IndexSchema(schemaFile)); core = new SolrCore(dataDirectory, new IndexSchema(schemaFile));
builder = DocumentBuilderFactory.newInstance().newDocumentBuilder(); builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
updater = new XmlUpdateRequestHandler();
updater.init( null );
} catch (Exception e) { } catch (Exception e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
} }
/** /**
* Processes an "update" (add, commit or optimize) and * Processes an "update" (add, commit or optimize) and
* returns the response as a String. * returns the response as a String.
* *
* The better approach is to instanciate a Updatehandler directly
*
* @param xml The XML of the update * @param xml The XML of the update
* @return The XML response to the update * @return The XML response to the update
*/ */
@Deprecated
public String update(String xml) { public String update(String xml) {
StringReader req = new StringReader(xml); StringReader req = new StringReader(xml);
StringWriter writer = new StringWriter(32000); StringWriter writer = new StringWriter(32000);
core.update(req, writer);
return writer.toString();
updater.doLegacyUpdate(req, writer);
return writer.toString();
} }

View File

@ -0,0 +1,113 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.servlet;
import java.io.IOException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletRequestWrapper;
import org.apache.commons.io.IOUtils;
import org.apache.solr.core.Config;
import org.apache.solr.core.SolrConfig;
import org.apache.solr.core.SolrCore;
import org.apache.solr.request.ContentStream;
import org.apache.solr.request.MapSolrParams;
import org.apache.solr.request.MultiMapSolrParams;
import org.apache.solr.request.SolrParams;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.util.AbstractSolrTestCase;
import junit.framework.TestCase;
public class SolrRequestParserTest extends AbstractSolrTestCase {
public String getSchemaFile() { return "schema.xml"; }
public String getSolrConfigFile() { return "solrconfig.xml"; }
SolrRequestParsers parser;
public void setUp() throws Exception {
super.setUp();
parser = new SolrRequestParsers( SolrCore.getSolrCore(), SolrConfig.config );
}
public void testStreamBody() throws Exception
{
String body1 = "AMANAPLANPANAMA";
String body2 = "qwertasdfgzxcvb";
String body3 = "1234567890";
Map<String,String[]> args = new HashMap<String, String[]>();
args.put( SolrParams.STREAM_BODY, new String[] {body1} );
// Make sure it got a single stream in and out ok
List<ContentStream> streams = new ArrayList<ContentStream>();
parser.buildRequestFrom( new MultiMapSolrParams( args ), streams );
assertEquals( 1, streams.size() );
assertEquals( body1, IOUtils.toString( streams.get(0).getStream() ) );
// Now add three and make sure they come out ok
streams = new ArrayList<ContentStream>();
args.put( SolrParams.STREAM_BODY, new String[] {body1,body2,body3} );
parser.buildRequestFrom( new MultiMapSolrParams( args ), streams );
assertEquals( 3, streams.size() );
ArrayList<String> input = new ArrayList<String>();
ArrayList<String> output = new ArrayList<String>();
input.add( body1 );
input.add( body2 );
input.add( body3 );
output.add( IOUtils.toString( streams.get(0).getStream() ) );
output.add( IOUtils.toString( streams.get(1).getStream() ) );
output.add( IOUtils.toString( streams.get(2).getStream() ) );
// sort them so the output is consistent
Collections.sort( input );
Collections.sort( output );
assertEquals( input.toString(), output.toString() );
}
public void testStreamURL() throws Exception
{
boolean ok = false;
String url = "http://svn.apache.org/repos/asf/lucene/solr/trunk/";
String txt = null;
try {
txt = IOUtils.toString( new URL(url).openStream() );
}
catch( Exception ex ) {
// TODO - should it fail/skip?
fail( "this test only works if you have a network connection." );
return;
}
Map<String,String[]> args = new HashMap<String, String[]>();
args.put( SolrParams.STREAM_URL, new String[] {url} );
// Make sure it got a single stream in and out ok
List<ContentStream> streams = new ArrayList<ContentStream>();
parser.buildRequestFrom( new MultiMapSolrParams( args ), streams );
assertEquals( 1, streams.size() );
assertEquals( txt, IOUtils.toString( streams.get(0).getStream() ) );
}
}

View File

@ -255,6 +255,8 @@
</lst> </lst>
</requestHandler> </requestHandler>
<!-- enable streaming for testing... -->
<requestParsers enableRemoteStreaming="true" multipartUploadLimitInKB="2048" />
<admin> <admin>
<defaultQuery>solr</defaultQuery> <defaultQuery>solr</defaultQuery>

View File

@ -30,6 +30,17 @@
"com.sun.org.apache.xerces.internal.jaxp.SAXParserFactoryImpl"/> "com.sun.org.apache.xerces.internal.jaxp.SAXParserFactoryImpl"/>
--> -->
<!-- Any path (name) registered in solrconfig.xml will be sent to that filter -->
<filter>
<filter-name>SolrRequestFilter</filter-name>
<filter-class>org.apache.solr.servlet.SolrDispatchFilter</filter-class>
</filter>
<filter-mapping>
<filter-name>SolrRequestFilter</filter-name>
<url-pattern>/*</url-pattern>
</filter-mapping>
<!-- Otherwise it will continue to the old servlets -->
<servlet> <servlet>
<servlet-name>SolrServer</servlet-name> <servlet-name>SolrServer</servlet-name>

View File

@ -0,0 +1,160 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.servlet;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.logging.Logger;
import javax.naming.Context;
import javax.naming.InitialContext;
import javax.naming.NamingException;
import javax.naming.NoInitialContextException;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.solr.core.Config;
import org.apache.solr.core.SolrConfig;
import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrException;
import org.apache.solr.request.QueryResponseWriter;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.request.SolrQueryResponse;
import org.apache.solr.request.SolrRequestHandler;
/**
* This filter looks at the incoming URL maps them to handlers defined in solrconfig.xml
*/
public class SolrDispatchFilter implements Filter
{
final Logger log = Logger.getLogger(SolrDispatchFilter.class.getName());
protected SolrCore core;
protected SolrRequestParsers parsers;
public void init(FilterConfig config) throws ServletException
{
log.info("SolrDispatchFilter.init()");
try {
Context c = new InitialContext();
/***
System.out.println("Enumerating JNDI Context=" + c);
NamingEnumeration<NameClassPair> en = c.list("java:comp/env");
while (en.hasMore()) {
NameClassPair ncp = en.next();
System.out.println(" ENTRY:" + ncp);
}
System.out.println("JNDI lookup=" + c.lookup("java:comp/env/solr/home"));
***/
String home = (String)c.lookup("java:comp/env/solr/home");
if (home!=null) Config.setInstanceDir(home);
} catch (NoInitialContextException e) {
log.info("JNDI not configured for Solr (NoInitialContextEx)");
} catch (NamingException e) {
log.info("No /solr/home in JNDI");
}
log.info("user.dir=" + System.getProperty("user.dir"));
core = SolrCore.getSolrCore();
parsers = new SolrRequestParsers( core, SolrConfig.config );
log.info("SolrDispatchFilter.init() done");
}
public void destroy() {
core.close();
}
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException
{
if( request instanceof HttpServletRequest) {
HttpServletRequest req = (HttpServletRequest)request;
try {
String path = req.getServletPath();
if( req.getPathInfo() != null ) {
// this lets you handle /update/commit when /update is a servlet
path += req.getPathInfo();
}
int idx = path.indexOf( ':' );
if( idx > 0 ) {
// save the portion after the ':' for a 'handler' path parameter
path = path.substring( 0, idx );
}
SolrRequestHandler handler = core.getRequestHandler( path );
if( handler != null ) {
SolrQueryRequest solrReq = parsers.parse( path, req );
SolrQueryResponse solrRsp = new SolrQueryResponse();
core.execute( handler, solrReq, solrRsp );
if( solrRsp.getException() != null ) {
sendError( (HttpServletResponse)response, solrRsp.getException() );
return;
}
// Now write it out
QueryResponseWriter responseWriter = core.getQueryResponseWriter(solrReq);
response.setContentType(responseWriter.getContentType(solrReq, solrRsp));
PrintWriter out = response.getWriter();
responseWriter.write(out, solrReq, solrRsp);
return;
}
}
catch( Throwable ex ) {
sendError( (HttpServletResponse)response, ex );
return;
}
}
// Otherwise let the webapp handle the request
chain.doFilter(request, response);
}
protected void sendError(HttpServletResponse res, Throwable ex) throws IOException
{
int code=500;
String trace = "";
if( ex instanceof SolrException ) {
code = ((SolrException)ex).code();
}
// For any regular code, don't include the stack trace
if( code == 500 || code < 100 ) {
StringWriter sw = new StringWriter();
ex.printStackTrace(new PrintWriter(sw));
trace = "\n\n"+sw.toString();
SolrException.logOnce(log,null,ex );
// non standard codes have undefined results with various servers
if( code < 100 ) {
log.warning( "invalid return code: "+code );
code = 500;
}
}
res.sendError( code, ex.getMessage() + trace );
}
}

View File

@ -0,0 +1,383 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.servlet;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.net.URLConnection;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.logging.Logger;
import javax.servlet.http.HttpServletRequest;
import javax.xml.xpath.XPathConstants;
import org.apache.commons.fileupload.FileItem;
import org.apache.commons.fileupload.disk.DiskFileItemFactory;
import org.apache.commons.fileupload.servlet.ServletFileUpload;
import org.apache.solr.core.Config;
import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrException;
import org.apache.solr.request.ContentStream;
import org.apache.solr.request.MultiMapSolrParams;
import org.apache.solr.request.ServletSolrParams;
import org.apache.solr.request.SolrParams;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.request.SolrQueryRequestBase;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
public class SolrRequestParsers
{
final Logger log = Logger.getLogger(SolrRequestParsers.class.getName());
// Should these constants be in a more public place?
public static final String MULTIPART = "multipart";
public static final String RAW = "raw";
public static final String SIMPLE = "simple";
public static final String STANDARD = "standard";
private HashMap<String, SolrRequestParser> parsers;
private SolrCore core;
private boolean enableRemoteStreams = false;
private StandardRequestParser standard;
public SolrRequestParsers( SolrCore core, Config config )
{
this.core = core;
long uploadLimitKB = 2000; // 2MB default
NodeList nodes = (NodeList)config.evaluate("requestParsers", XPathConstants.NODESET);
if( nodes!=null && nodes.getLength()>0 ) {
// only look at the first node.
NamedNodeMap attrs = nodes.item(0).getAttributes();
Node node = attrs.getNamedItem( "enableRemoteStreaming" );
if( node != null ) {
enableRemoteStreams = Boolean.parseBoolean( node.getTextContent() );
}
node = attrs.getNamedItem( "multipartUploadLimitInKB" );
if( node != null ) {
uploadLimitKB = Long.parseLong( node.getTextContent() );
}
}
MultipartRequestParser multi = new MultipartRequestParser( uploadLimitKB );
RawRequestParser raw = new RawRequestParser();
standard = new StandardRequestParser( multi, raw );
// I don't see a need to have this publically configured just yet
// adding it is trivial
parsers = new HashMap<String, SolrRequestParser>();
parsers.put( MULTIPART, multi );
parsers.put( RAW, raw );
parsers.put( SIMPLE, new SimpleRequestParser() );
parsers.put( STANDARD, standard );
parsers.put( "", standard );
}
public SolrQueryRequest parse( String path, HttpServletRequest req ) throws Exception
{
SolrRequestParser parser = standard;
// TODO -- in the future, we could pick a different parser based on the request
// Pick the parer from the request...
ArrayList<ContentStream> streams = new ArrayList<ContentStream>(1);
SolrParams params = parser.parseParamsAndFillStreams( req, streams );
SolrQueryRequest sreq = buildRequestFrom( params, streams );
// If there is some path left over, add it to the context
int idx = req.getServletPath().indexOf( ':' );
if( idx > 0 ) {
sreq.getContext().put( "path", req.getServletPath().substring( idx+1 ) );
}
return sreq;
}
SolrQueryRequest buildRequestFrom( SolrParams params, List<ContentStream> streams ) throws Exception
{
// Handle anything with a remoteURL
String[] strs = params.getParams( SolrParams.STREAM_URL );
if( strs != null ) {
if( !enableRemoteStreams ) {
throw new SolrException( 400, "Remote Streaming is disabled." );
}
for( final String url : strs ) {
final URLConnection conn = new URL(url).openConnection();
streams.add( new ContentStream() {
public String getContentType() { return conn.getContentType(); }
public String getName() { return url; }
public Long getSize() { return new Long( conn.getContentLength() ); }
public String getSourceInfo() {
return SolrParams.STREAM_URL;
}
public InputStream getStream() throws IOException {
return conn.getInputStream();
}
});
}
}
// Check for streams in the request parameters
strs = params.getParams( SolrParams.STREAM_BODY );
if( strs != null ) {
for( final String body : strs ) {
streams.add( new ContentStream() {
public String getContentType() { return null; } // Is there anything meaningful?
public String getName() { return null; }
public Long getSize() { return null; }
public String getSourceInfo() {
return SolrParams.STREAM_BODY;
}
public InputStream getStream() throws IOException {
return new ByteArrayInputStream( body.getBytes() );
}
});
}
}
SolrQueryRequestBase q = new SolrQueryRequestBase( core, params ) { };
if( streams != null && streams.size() > 0 ) {
q.setContentStreams( streams );
}
return q;
}
/**
* Given a standard query string map it into solr params
*/
public static MultiMapSolrParams parseQueryString(String queryString)
{
Map<String,String[]> map = new HashMap<String, String[]>();
if( queryString != null && queryString.length() > 0 ) {
for( String kv : queryString.split( "&" ) ) {
int idx = kv.indexOf( '=' );
if( idx > 0 ) {
String name = URLDecoder.decode( kv.substring( 0, idx ));
String value = URLDecoder.decode( kv.substring( idx+1 ));
MultiMapSolrParams.addParam( name, value, map );
}
else {
String name = URLDecoder.decode( kv );
MultiMapSolrParams.addParam( name, "", map );
}
}
}
return new MultiMapSolrParams( map );
}
}
//-----------------------------------------------------------------
//-----------------------------------------------------------------
// I guess we don't really even need the interface, but i'll keep it here just for kicks
interface SolrRequestParser
{
public SolrParams parseParamsAndFillStreams(
final HttpServletRequest req, ArrayList<ContentStream> streams ) throws Exception;
}
//-----------------------------------------------------------------
//-----------------------------------------------------------------
/**
* The simple parser just uses the params directly
*/
class SimpleRequestParser implements SolrRequestParser
{
public SolrParams parseParamsAndFillStreams(
final HttpServletRequest req, ArrayList<ContentStream> streams ) throws Exception
{
return new ServletSolrParams(req);
}
}
/**
* The simple parser just uses the params directly
*/
class RawRequestParser implements SolrRequestParser
{
public SolrParams parseParamsAndFillStreams(
final HttpServletRequest req, ArrayList<ContentStream> streams ) throws Exception
{
streams.add( new ContentStream() {
public String getContentType() {
return req.getContentType();
}
public String getName() {
return null; // Is there any meaningfull name?
}
public String getSourceInfo() {
return null; // Is there any meaningfull name?
}
public Long getSize() {
String v = req.getHeader( "Content-Length" );
if( v != null ) {
return Long.valueOf( v );
}
return null;
}
public InputStream getStream() throws IOException {
return req.getInputStream();
}
});
return SolrRequestParsers.parseQueryString( req.getQueryString() );
}
}
/**
* Extract Multipart streams
*/
class MultipartRequestParser implements SolrRequestParser
{
private long uploadLimitKB;
public MultipartRequestParser( long limit )
{
uploadLimitKB = limit;
}
public SolrParams parseParamsAndFillStreams(
final HttpServletRequest req, ArrayList<ContentStream> streams ) throws Exception
{
if( !ServletFileUpload.isMultipartContent(req) ) {
throw new SolrException( 400, "Not multipart content! "+req.getContentType() );
}
MultiMapSolrParams params = SolrRequestParsers.parseQueryString( req.getQueryString() );
// Create a factory for disk-based file items
DiskFileItemFactory factory = new DiskFileItemFactory();
// Set factory constraints
// TODO - configure factory.setSizeThreshold(yourMaxMemorySize);
// TODO - configure factory.setRepository(yourTempDirectory);
// Create a new file upload handler
ServletFileUpload upload = new ServletFileUpload(factory);
upload.setSizeMax( uploadLimitKB*1024 );
// Parse the request
List items = upload.parseRequest(req);
Iterator iter = items.iterator();
while (iter.hasNext()) {
FileItem item = (FileItem) iter.next();
// If its a form field, put it in our parameter map
if (item.isFormField()) {
MultiMapSolrParams.addParam(
item.getFieldName(),
item.getString(), params.getMap() );
}
// Only add it if it actually has something...
else if( item.getSize() > 0 ) {
streams.add( new FileItemContentStream( item ) );
}
}
return params;
}
/**
* Wrap a FileItem as a ContentStream
*/
private static class FileItemContentStream implements ContentStream
{
FileItem item;
public FileItemContentStream( FileItem f )
{
item = f;
}
public String getContentType() {
return item.getContentType();
}
public String getName() {
return item.getName();
}
public InputStream getStream() throws IOException {
return item.getInputStream();
}
public String getSourceInfo() {
return item.getFieldName();
}
public Long getSize()
{
return item.getSize();
}
}
}
/**
* The default Logic
*/
class StandardRequestParser implements SolrRequestParser
{
MultipartRequestParser multipart;
RawRequestParser raw;
StandardRequestParser( MultipartRequestParser multi, RawRequestParser raw )
{
this.multipart = multi;
this.raw = raw;
}
public SolrParams parseParamsAndFillStreams(
final HttpServletRequest req, ArrayList<ContentStream> streams ) throws Exception
{
String method = req.getMethod().toUpperCase();
if( "GET".equals( method ) ) {
return new ServletSolrParams(req);
}
if( "POST".equals( method ) ) {
String contentType = req.getContentType();
if( contentType != null ) {
if( "application/x-www-form-urlencoded".equals( contentType.toLowerCase() ) ) {
return new ServletSolrParams(req); // just get the params from parameterMap
}
if( ServletFileUpload.isMultipartContent(req) ) {
return multipart.parseParamsAndFillStreams(req, streams);
}
}
return raw.parseParamsAndFillStreams(req, streams);
}
throw new SolrException( 400, "Unsuported method: "+method );
}
}

View File

@ -17,29 +17,27 @@
package org.apache.solr.servlet; package org.apache.solr.servlet;
import org.apache.solr.core.Config; import java.io.IOException;
import org.apache.solr.core.SolrCore; import java.io.PrintWriter;
import org.apache.solr.core.SolrException; import java.util.logging.Logger;
import org.apache.solr.request.SolrQueryResponse;
import org.apache.solr.request.QueryResponseWriter;
import javax.naming.Context;
import javax.naming.InitialContext;
import javax.naming.NamingException;
import javax.naming.NoInitialContextException;
import javax.servlet.ServletException; import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.io.PrintWriter; import org.apache.solr.core.SolrCore;
import java.util.logging.Logger; import org.apache.solr.core.SolrException;
import org.apache.solr.request.QueryResponseWriter;
import org.apache.solr.request.SolrQueryResponse;
import org.apache.solr.request.SolrRequestHandler;
/** /**
* @author yonik * @author yonik
* @author <a href='mailto:mbaranczak@epublishing.com'> Mike Baranczak </a> * @author <a href='mailto:mbaranczak@epublishing.com'> Mike Baranczak </a>
*/ */
@Deprecated
public class SolrServlet extends HttpServlet { public class SolrServlet extends HttpServlet {
final Logger log = Logger.getLogger(SolrServlet.class.getName()); final Logger log = Logger.getLogger(SolrServlet.class.getName());
@ -47,38 +45,10 @@ public class SolrServlet extends HttpServlet {
public void init() throws ServletException { public void init() throws ServletException {
log.info("SolrServlet.init()"); log.info("SolrServlet.init()");
try {
Context c = new InitialContext();
/***
System.out.println("Enumerating JNDI Context=" + c);
NamingEnumeration<NameClassPair> en = c.list("java:comp/env");
while (en.hasMore()) {
NameClassPair ncp = en.next();
System.out.println(" ENTRY:" + ncp);
}
System.out.println("JNDI lookup=" + c.lookup("java:comp/env/solr/home"));
***/
String home = (String)c.lookup("java:comp/env/solr/home");
if (home!=null) Config.setInstanceDir(home);
} catch (NoInitialContextException e) {
log.info("JNDI not configured for Solr (NoInitialContextEx)");
} catch (NamingException e) {
log.info("No /solr/home in JNDI");
}
log.info("user.dir=" + System.getProperty("user.dir"));
core = SolrCore.getSolrCore(); core = SolrCore.getSolrCore();
log.info("SolrServlet.init() done"); log.info("SolrServlet.init() done");
} }
public void destroy() {
core.close();
super.destroy();
}
public void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { public void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
doGet(request,response); doGet(request,response);
} }
@ -87,7 +57,13 @@ public class SolrServlet extends HttpServlet {
SolrServletRequest solrReq = new SolrServletRequest(core, request);; SolrServletRequest solrReq = new SolrServletRequest(core, request);;
SolrQueryResponse solrRsp = new SolrQueryResponse(); SolrQueryResponse solrRsp = new SolrQueryResponse();
try { try {
core.execute(solrReq, solrRsp);
SolrRequestHandler handler = core.getRequestHandler(solrReq.getQueryType());
if (handler==null) {
log.warning("Unknown Request Handler '" + solrReq.getQueryType() +"' :" + solrReq);
throw new SolrException(400,"Unknown Request Handler '" + solrReq.getQueryType() + "'", true);
}
core.execute(handler, solrReq, solrRsp );
if (solrRsp.getException() == null) { if (solrRsp.getException() == null) {
QueryResponseWriter responseWriter = core.getQueryResponseWriter(solrReq); QueryResponseWriter responseWriter = core.getQueryResponseWriter(solrReq);
response.setContentType(responseWriter.getContentType(solrReq, solrRsp)); response.setContentType(responseWriter.getContentType(solrReq, solrRsp));
@ -127,14 +103,4 @@ public class SolrServlet extends HttpServlet {
SolrException.log(log,e); SolrException.log(log,e);
} }
} }
final int getParam(HttpServletRequest request, String param, int defval) {
final String pval = request.getParameter(param);
return (pval==null) ? defval : Integer.parseInt(pval);
}
final boolean paramExists(HttpServletRequest request, String param) {
return request.getParameter(param)!=null ? true : false;
}
} }

View File

@ -15,41 +15,46 @@ package org.apache.solr.servlet;/**
* limitations under the License. * limitations under the License.
*/ */
import org.apache.solr.core.SolrCore; import java.io.BufferedReader;
import org.apache.solr.core.SolrException; import java.io.IOException;
import org.apache.solr.request.XMLResponseWriter; import java.io.PrintWriter;
import org.apache.solr.request.SolrQueryResponse; import java.util.logging.Logger;
import org.apache.solr.request.QueryResponseWriter;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import javax.servlet.ServletException;
import java.util.logging.Logger; import org.apache.solr.core.SolrException;
import java.io.IOException; import org.apache.solr.handler.XmlUpdateRequestHandler;
import java.io.BufferedReader; import org.apache.solr.request.QueryResponseWriter;
import java.io.PrintWriter; import org.apache.solr.request.XMLResponseWriter;
import org.apache.solr.util.XML;
/** /**
* @author yonik * @author yonik
* @version $Id$ * @version $Id$
*/ */
@Deprecated
public class SolrUpdateServlet extends HttpServlet { public class SolrUpdateServlet extends HttpServlet {
final Logger log = Logger.getLogger(SolrUpdateServlet.class.getName()); final Logger log = Logger.getLogger(SolrUpdateServlet.class.getName());
private SolrCore core;
XmlUpdateRequestHandler legacyUpdateHandler;
XMLResponseWriter xmlResponseWriter; XMLResponseWriter xmlResponseWriter;
public void init() throws ServletException public void init() throws ServletException
{ {
core = SolrCore.getSolrCore(); legacyUpdateHandler = new XmlUpdateRequestHandler();
legacyUpdateHandler.init( null );
log.info("SolrUpdateServlet.init() done"); log.info("SolrUpdateServlet.init() done");
} }
public void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { public void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
BufferedReader requestReader = request.getReader(); BufferedReader requestReader = request.getReader();
response.setContentType(QueryResponseWriter.CONTENT_TYPE_XML_UTF8); response.setContentType(QueryResponseWriter.CONTENT_TYPE_XML_UTF8);
PrintWriter responseWriter = response.getWriter();
core.update(requestReader, responseWriter); PrintWriter writer = response.getWriter();
legacyUpdateHandler.doLegacyUpdate(requestReader, writer);
} }
} }