SOLR-1930: add SolrQueryRequest to UpdateCommand

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1053965 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Yonik Seeley 2010-12-30 18:27:27 +00:00
parent 390b5ff14b
commit 582fc07370
26 changed files with 195 additions and 239 deletions

View File

@ -188,7 +188,7 @@ public class TestMailEntityProcessor extends AbstractDataImportHandlerTestCase {
Boolean commitCalled;
public SolrWriterImpl() {
super(null, ".");
super(null, ".", null);
}
public boolean upload(SolrInputDocument doc) {

View File

@ -194,7 +194,7 @@ public class DataImportHandler extends RequestHandlerBase implements
req.getCore().getUpdateProcessingChain(params.get(UpdateParams.UPDATE_PROCESSOR));
UpdateRequestProcessor processor = processorChain.createProcessor(req, rsp);
SolrResourceLoader loader = req.getCore().getResourceLoader();
SolrWriter sw = getSolrWriter(processor, loader, requestParams);
SolrWriter sw = getSolrWriter(processor, loader, requestParams, req);
if (requestParams.debug) {
if (debugEnabled) {
@ -276,9 +276,9 @@ public class DataImportHandler extends RequestHandlerBase implements
}
private SolrWriter getSolrWriter(final UpdateRequestProcessor processor,
final SolrResourceLoader loader, final DataImporter.RequestParams requestParams) {
final SolrResourceLoader loader, final DataImporter.RequestParams requestParams, SolrQueryRequest req) {
return new SolrWriter(processor, loader.getConfigDir(), myName) {
return new SolrWriter(processor, loader.getConfigDir(), myName, req) {
@Override
public boolean upload(SolrInputDocument document) {

View File

@ -17,6 +17,7 @@
package org.apache.solr.handler.dataimport;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.update.AddUpdateCommand;
import org.apache.solr.update.CommitUpdateCommand;
import org.apache.solr.update.DeleteUpdateCommand;
@ -51,21 +52,25 @@ public class SolrWriter {
DebugLogger debugLogger;
public SolrWriter(UpdateRequestProcessor processor, String confDir) {
SolrQueryRequest req;
public SolrWriter(UpdateRequestProcessor processor, String confDir, SolrQueryRequest req) {
this.processor = processor;
configDir = confDir;
this.req = req;
}
public SolrWriter(UpdateRequestProcessor processor, String confDir, String filePrefix) {
public SolrWriter(UpdateRequestProcessor processor, String confDir, String filePrefix, SolrQueryRequest req) {
this.processor = processor;
configDir = confDir;
if(filePrefix != null){
persistFilename = filePrefix+".properties";
}
this.req = req;
}
public boolean upload(SolrInputDocument d) {
try {
AddUpdateCommand command = new AddUpdateCommand();
AddUpdateCommand command = new AddUpdateCommand(req);
command.solrDoc = d;
processor.processAdd(command);
} catch (Exception e) {
@ -79,7 +84,7 @@ public class SolrWriter {
public void deleteDoc(Object id) {
try {
log.info("Deleting document: " + id);
DeleteUpdateCommand delCmd = new DeleteUpdateCommand();
DeleteUpdateCommand delCmd = new DeleteUpdateCommand(req);
delCmd.id = id.toString();
processor.processDelete(delCmd);
} catch (IOException e) {
@ -153,7 +158,7 @@ public class SolrWriter {
public void deleteByQuery(String query) {
try {
log.info("Deleting documents from Solr with query: " + query);
DeleteUpdateCommand delCmd = new DeleteUpdateCommand();
DeleteUpdateCommand delCmd = new DeleteUpdateCommand(req);
delCmd.query = query;
processor.processDelete(delCmd);
} catch (IOException e) {
@ -163,7 +168,7 @@ public class SolrWriter {
public void commit(boolean optimize) {
try {
CommitUpdateCommand commit = new CommitUpdateCommand(optimize);
CommitUpdateCommand commit = new CommitUpdateCommand(req,optimize);
processor.processCommit(commit);
} catch (Throwable t) {
log.error("Exception while solr commit.", t);
@ -172,7 +177,7 @@ public class SolrWriter {
public void rollback() {
try {
RollbackUpdateCommand rollback = new RollbackUpdateCommand();
RollbackUpdateCommand rollback = new RollbackUpdateCommand(req);
processor.processRollback(rollback);
} catch (Throwable t) {
log.error("Exception while solr rollback.", t);
@ -181,7 +186,7 @@ public class SolrWriter {
public void doDeleteAll() {
try {
DeleteUpdateCommand deleteCommand = new DeleteUpdateCommand();
DeleteUpdateCommand deleteCommand = new DeleteUpdateCommand(req);
deleteCommand.query = "*:*";
processor.processDelete(deleteCommand);
} catch (IOException e) {

View File

@ -197,7 +197,7 @@ public class TestDocBuilder extends AbstractDataImportHandlerTestCase {
Boolean finishCalled = Boolean.FALSE;
public SolrWriterImpl() {
super(null, ".");
super(null, ".",null);
}
public boolean upload(SolrInputDocument doc) {

View File

@ -89,7 +89,7 @@ public class ExtractingDocumentLoader extends ContentStreamLoader {
this.config = config;
this.processor = processor;
templateAdd = new AddUpdateCommand();
templateAdd = new AddUpdateCommand(req);
templateAdd.overwrite = params.getBool(UpdateParams.OVERWRITE, true);
//this is lightweight

View File

@ -62,7 +62,7 @@ public class BinaryUpdateRequestHandler extends ContentStreamHandlerBase {
};
}
private void parseAndLoadDocs(SolrQueryRequest req, SolrQueryResponse rsp, InputStream stream,
private void parseAndLoadDocs(final SolrQueryRequest req, SolrQueryResponse rsp, InputStream stream,
final UpdateRequestProcessor processor) throws IOException {
UpdateRequest update = null;
update = new JavaBinUpdateRequestCodec().unmarshal(stream,
@ -71,7 +71,7 @@ public class BinaryUpdateRequestHandler extends ContentStreamHandlerBase {
public void document(SolrInputDocument document, UpdateRequest updateRequest) {
if (addCmd == null) {
addCmd = getAddCommand(updateRequest.getParams());
addCmd = getAddCommand(req, updateRequest.getParams());
}
addCmd.solrDoc = document;
try {
@ -83,25 +83,25 @@ public class BinaryUpdateRequestHandler extends ContentStreamHandlerBase {
}
});
if (update.getDeleteById() != null) {
delete(update.getDeleteById(), processor, true);
delete(req, update.getDeleteById(), processor, true);
}
if (update.getDeleteQuery() != null) {
delete(update.getDeleteQuery(), processor, false);
delete(req, update.getDeleteQuery(), processor, false);
}
}
private AddUpdateCommand getAddCommand(SolrParams params) {
AddUpdateCommand addCmd = new AddUpdateCommand();
private AddUpdateCommand getAddCommand(SolrQueryRequest req, SolrParams params) {
AddUpdateCommand addCmd = new AddUpdateCommand(req);
addCmd.overwrite = params.getBool(UpdateParams.OVERWRITE, true);
addCmd.commitWithin = params.getInt(COMMIT_WITHIN, -1);
return addCmd;
}
private void delete(List<String> l, UpdateRequestProcessor processor, boolean isId) throws IOException {
private void delete(SolrQueryRequest req, List<String> l, UpdateRequestProcessor processor, boolean isId) throws IOException {
for (String s : l) {
DeleteUpdateCommand delcmd = new DeleteUpdateCommand();
DeleteUpdateCommand delcmd = new DeleteUpdateCommand(req);
if (isId) {
delcmd.id = s;
} else {

View File

@ -185,7 +185,7 @@ abstract class CSVLoader extends ContentStreamLoader {
this.params = req.getParams();
schema = req.getSchema();
templateAdd = new AddUpdateCommand();
templateAdd = new AddUpdateCommand(req);
templateAdd.overwrite=params.getBool(OVERWRITE,true);
strategy = new CSVStrategy(',', '"', CSVStrategy.COMMENTS_DISABLED, CSVStrategy.ESCAPE_DISABLED, false, false, false, true);

View File

@ -45,7 +45,7 @@ public abstract class ContentStreamHandlerBase extends RequestHandlerBase {
Iterable<ContentStream> streams = req.getContentStreams();
if (streams == null) {
if (!RequestHandlerUtils.handleCommit(processor, params, false) && !RequestHandlerUtils.handleRollback(processor, params, false)) {
if (!RequestHandlerUtils.handleCommit(req, processor, params, false) && !RequestHandlerUtils.handleRollback(req, processor, params, false)) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "missing content stream");
}
} else {
@ -55,8 +55,8 @@ public abstract class ContentStreamHandlerBase extends RequestHandlerBase {
}
// Perhaps commit from the parameters
RequestHandlerUtils.handleCommit(processor, params, false);
RequestHandlerUtils.handleRollback(processor, params, false);
RequestHandlerUtils.handleCommit(req, processor, params, false);
RequestHandlerUtils.handleRollback(req, processor, params, false);
}
} finally {
// finish the request

View File

@ -62,14 +62,14 @@ class JsonLoader extends ContentStreamLoader {
}
JSONParser parser = new JSONParser(reader);
this.processUpdate(processor, parser);
this.processUpdate(req, processor, parser);
}
finally {
IOUtils.closeQuietly(reader);
}
}
void processUpdate(UpdateRequestProcessor processor, JSONParser parser) throws IOException
void processUpdate(SolrQueryRequest req, UpdateRequestProcessor processor, JSONParser parser) throws IOException
{
int ev = parser.nextEvent();
while( ev != JSONParser.EOF ) {
@ -80,25 +80,25 @@ class JsonLoader extends ContentStreamLoader {
if( parser.wasKey() ) {
String v = parser.getString();
if( v.equals( XmlUpdateRequestHandler.ADD ) ) {
processor.processAdd( parseAdd( parser ) );
processor.processAdd( parseAdd(req, parser ) );
}
else if( v.equals( XmlUpdateRequestHandler.COMMIT ) ) {
CommitUpdateCommand cmd = new CommitUpdateCommand( false );
CommitUpdateCommand cmd = new CommitUpdateCommand(req, false );
cmd.waitFlush = cmd.waitSearcher = true;
parseCommitOptions( parser, cmd );
processor.processCommit( cmd );
}
else if( v.equals( XmlUpdateRequestHandler.OPTIMIZE ) ) {
CommitUpdateCommand cmd = new CommitUpdateCommand( true );
CommitUpdateCommand cmd = new CommitUpdateCommand(req, true );
cmd.waitFlush = cmd.waitSearcher = true;
parseCommitOptions( parser, cmd );
processor.processCommit( cmd );
}
else if( v.equals( XmlUpdateRequestHandler.DELETE ) ) {
processor.processDelete( parseDelete( parser ) );
processor.processDelete( parseDelete(req, parser ) );
}
else if( v.equals( XmlUpdateRequestHandler.ROLLBACK ) ) {
processor.processRollback( parseRollback( parser ) );
processor.processRollback( parseRollback(req, parser ) );
}
else {
throw new IOException( "Unknown command: "+v+" ["+parser.getPosition()+"]" );
@ -129,10 +129,10 @@ class JsonLoader extends ContentStreamLoader {
}
}
DeleteUpdateCommand parseDelete(JSONParser js) throws IOException {
DeleteUpdateCommand parseDelete(SolrQueryRequest req, JSONParser js) throws IOException {
assertNextEvent( js, JSONParser.OBJECT_START );
DeleteUpdateCommand cmd = new DeleteUpdateCommand();
DeleteUpdateCommand cmd = new DeleteUpdateCommand(req);
while( true ) {
int ev = js.nextEvent();
@ -169,10 +169,10 @@ class JsonLoader extends ContentStreamLoader {
}
}
RollbackUpdateCommand parseRollback(JSONParser js) throws IOException {
RollbackUpdateCommand parseRollback(SolrQueryRequest req, JSONParser js) throws IOException {
assertNextEvent( js, JSONParser.OBJECT_START );
assertNextEvent( js, JSONParser.OBJECT_END );
return new RollbackUpdateCommand();
return new RollbackUpdateCommand(req);
}
void parseCommitOptions( JSONParser js, CommitUpdateCommand cmd ) throws IOException
@ -211,10 +211,10 @@ class JsonLoader extends ContentStreamLoader {
}
}
AddUpdateCommand parseAdd( JSONParser js ) throws IOException
AddUpdateCommand parseAdd(SolrQueryRequest req, JSONParser js ) throws IOException
{
assertNextEvent( js, JSONParser.OBJECT_START );
AddUpdateCommand cmd = new AddUpdateCommand();
AddUpdateCommand cmd = new AddUpdateCommand(req);
float boost = 1.0f;
while( true ) {

View File

@ -65,7 +65,7 @@ public class RequestHandlerUtils
boolean commit = params.getBool( UpdateParams.COMMIT, false );
if( optimize || commit || force ) {
CommitUpdateCommand cmd = new CommitUpdateCommand( optimize );
CommitUpdateCommand cmd = new CommitUpdateCommand(req, optimize );
cmd.waitFlush = params.getBool( UpdateParams.WAIT_FLUSH, cmd.waitFlush );
cmd.waitSearcher = params.getBool( UpdateParams.WAIT_SEARCHER, cmd.waitSearcher );
cmd.expungeDeletes = params.getBool( UpdateParams.EXPUNGE_DELETES, cmd.expungeDeletes);
@ -89,7 +89,7 @@ public class RequestHandlerUtils
* Check the request parameters and decide if it should commit or optimize.
* If it does, it will check parameters for "waitFlush" and "waitSearcher"
*/
public static boolean handleCommit( UpdateRequestProcessor processor, SolrParams params, boolean force ) throws IOException
public static boolean handleCommit(SolrQueryRequest req, UpdateRequestProcessor processor, SolrParams params, boolean force ) throws IOException
{
if( params == null ) {
params = new MapSolrParams( new HashMap<String, String>() );
@ -99,7 +99,7 @@ public class RequestHandlerUtils
boolean commit = params.getBool( UpdateParams.COMMIT, false );
if( optimize || commit || force ) {
CommitUpdateCommand cmd = new CommitUpdateCommand( optimize );
CommitUpdateCommand cmd = new CommitUpdateCommand(req, optimize );
cmd.waitFlush = params.getBool( UpdateParams.WAIT_FLUSH, cmd.waitFlush );
cmd.waitSearcher = params.getBool( UpdateParams.WAIT_SEARCHER, cmd.waitSearcher );
cmd.expungeDeletes = params.getBool( UpdateParams.EXPUNGE_DELETES, cmd.expungeDeletes);
@ -113,7 +113,7 @@ public class RequestHandlerUtils
/**
* @since Solr 1.4
*/
public static boolean handleRollback( UpdateRequestProcessor processor, SolrParams params, boolean force ) throws IOException
public static boolean handleRollback(SolrQueryRequest req, UpdateRequestProcessor processor, SolrParams params, boolean force ) throws IOException
{
if( params == null ) {
params = new MapSolrParams( new HashMap<String, String>() );
@ -122,7 +122,7 @@ public class RequestHandlerUtils
boolean rollback = params.getBool( UpdateParams.ROLLBACK, false );
if( rollback || force ) {
RollbackUpdateCommand cmd = new RollbackUpdateCommand();
RollbackUpdateCommand cmd = new RollbackUpdateCommand(req);
processor.processRollback( cmd );
return true;
}

View File

@ -22,12 +22,16 @@ import org.apache.commons.httpclient.methods.PostMethod;
import org.apache.commons.io.IOUtils;
import org.apache.lucene.index.IndexCommit;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.util.FastInputStream;
import org.apache.solr.common.util.JavaBinCodec;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.FileUtils;
import org.apache.solr.core.SolrCore;
import static org.apache.solr.handler.ReplicationHandler.*;
import org.apache.solr.request.LocalSolrQueryRequest;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.update.CommitUpdateCommand;
import org.apache.solr.update.DirectUpdateHandler2;
@ -461,18 +465,23 @@ public class SnapPuller {
}
private void doCommit() throws IOException {
CommitUpdateCommand cmd = new CommitUpdateCommand(false);
cmd.waitFlush = true;
cmd.waitSearcher = true;
solrCore.getUpdateHandler().commit(cmd);
if (solrCore.getUpdateHandler() instanceof DirectUpdateHandler2) {
LOG.info("Force open index writer to make sure older index files get deleted");
DirectUpdateHandler2 handler = (DirectUpdateHandler2) solrCore.getUpdateHandler();
handler.forceOpenWriter();
replicationHandler.refreshCommitpoint();
} else {
LOG.warn("The update handler is not an instance or sub-class of DirectUpdateHandler2. " +
"ReplicationHandler may not be able to cleanup un-used index files.");
SolrQueryRequest req = new LocalSolrQueryRequest(solrCore, new ModifiableSolrParams());
try {
CommitUpdateCommand cmd = new CommitUpdateCommand(req, false);
cmd.waitFlush = true;
cmd.waitSearcher = true;
solrCore.getUpdateHandler().commit(cmd);
if (solrCore.getUpdateHandler() instanceof DirectUpdateHandler2) {
LOG.info("Force open index writer to make sure older index files get deleted");
DirectUpdateHandler2 handler = (DirectUpdateHandler2) solrCore.getUpdateHandler();
handler.forceOpenWriter();
replicationHandler.refreshCommitpoint();
} else {
LOG.warn("The update handler is not an instance or sub-class of DirectUpdateHandler2. " +
"ReplicationHandler may not be able to cleanup un-used index files.");
}
} finally {
req.close();
}
}

View File

@ -66,7 +66,7 @@ class XMLLoader extends ContentStreamLoader {
}
XMLStreamReader parser = inputFactory.createXMLStreamReader(reader);
this.processUpdate(processor, parser);
this.processUpdate(req, processor, parser);
}
catch (XMLStreamException e) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e.getMessage(), e);
@ -81,7 +81,7 @@ class XMLLoader extends ContentStreamLoader {
/**
* @since solr 1.2
*/
void processUpdate(UpdateRequestProcessor processor, XMLStreamReader parser)
void processUpdate(SolrQueryRequest req, UpdateRequestProcessor processor, XMLStreamReader parser)
throws XMLStreamException, IOException, FactoryConfigurationError,
InstantiationException, IllegalAccessException,
TransformerConfigurationException {
@ -98,7 +98,7 @@ class XMLLoader extends ContentStreamLoader {
if (currTag.equals(XmlUpdateRequestHandler.ADD)) {
XmlUpdateRequestHandler.log.trace("SolrCore.update(add)");
addCmd = new AddUpdateCommand();
addCmd = new AddUpdateCommand(req);
for (int i = 0; i < parser.getAttributeCount(); i++) {
String attrName = parser.getAttributeLocalName(i);
@ -120,7 +120,7 @@ class XMLLoader extends ContentStreamLoader {
} else if (XmlUpdateRequestHandler.COMMIT.equals(currTag) || XmlUpdateRequestHandler.OPTIMIZE.equals(currTag)) {
XmlUpdateRequestHandler.log.trace("parsing " + currTag);
CommitUpdateCommand cmd = new CommitUpdateCommand(XmlUpdateRequestHandler.OPTIMIZE.equals(currTag));
CommitUpdateCommand cmd = new CommitUpdateCommand(req, XmlUpdateRequestHandler.OPTIMIZE.equals(currTag));
boolean sawWaitSearcher = false, sawWaitFlush = false;
for (int i = 0; i < parser.getAttributeCount(); i++) {
@ -151,13 +151,13 @@ class XMLLoader extends ContentStreamLoader {
else if (XmlUpdateRequestHandler.ROLLBACK.equals(currTag)) {
XmlUpdateRequestHandler.log.trace("parsing " + currTag);
RollbackUpdateCommand cmd = new RollbackUpdateCommand();
RollbackUpdateCommand cmd = new RollbackUpdateCommand(req);
processor.processRollback(cmd);
} // end rollback
else if (XmlUpdateRequestHandler.DELETE.equals(currTag)) {
XmlUpdateRequestHandler.log.trace("parsing delete");
processDelete(processor, parser);
processDelete(req, processor, parser);
} // end delete
break;
}
@ -167,9 +167,9 @@ class XMLLoader extends ContentStreamLoader {
/**
* @since solr 1.3
*/
void processDelete(UpdateRequestProcessor processor, XMLStreamReader parser) throws XMLStreamException, IOException {
void processDelete(SolrQueryRequest req, UpdateRequestProcessor processor, XMLStreamReader parser) throws XMLStreamException, IOException {
// Parse the command
DeleteUpdateCommand deleteCmd = new DeleteUpdateCommand();
DeleteUpdateCommand deleteCmd = new DeleteUpdateCommand(req);
for (int i = 0; i < parser.getAttributeCount(); i++) {
String attrName = parser.getAttributeLocalName(i);

View File

@ -109,7 +109,7 @@ public class XmlUpdateRequestHandler extends ContentStreamHandlerBase {
XMLStreamReader parser = inputFactory.createXMLStreamReader(input);
UpdateRequestProcessor processor = processorFactory.createProcessor(req, rsp);
XMLLoader loader = (XMLLoader) newLoader(req, processor);
loader.processUpdate(processor, parser);
loader.processUpdate(req, processor, parser);
processor.finish();
output.write("<result status=\"0\"></result>");
}

View File

@ -194,7 +194,7 @@ public class CoreAdminHandler extends RequestHandlerBase {
wrappedReq = new LocalSolrQueryRequest(core, req.getParams());
UpdateRequestProcessor processor =
processorChain.createProcessor(wrappedReq, rsp);
processor.processMergeIndexes(new MergeIndexesCommand(dirs));
processor.processMergeIndexes(new MergeIndexesCommand(dirs, req));
} finally {
core.close();
wrappedReq.close();

View File

@ -22,6 +22,7 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.index.Term;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.SolrInputField;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.schema.SchemaField;
@ -45,6 +46,9 @@ public class AddUpdateCommand extends UpdateCommand {
public Term updateTerm;
public int commitWithin = -1;
public AddUpdateCommand(SolrQueryRequest req) {
super("add", req);
}
/** Reset state to reuse this object with a different document in the same request */
public void clear() {
@ -103,10 +107,6 @@ public class AddUpdateCommand extends UpdateCommand {
return "(null)";
}
public AddUpdateCommand() {
super("add");
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder(commandName);

View File

@ -16,6 +16,9 @@
*/
package org.apache.solr.update;
import org.apache.solr.request.SolrQueryRequest;
/**
* @version $Id$
*/
@ -32,8 +35,8 @@ public class CommitUpdateCommand extends UpdateCommand {
*/
public int maxOptimizeSegments = 1;
public CommitUpdateCommand(boolean optimize) {
super("commit");
public CommitUpdateCommand(SolrQueryRequest req, boolean optimize) {
super("commit", req);
this.optimize=optimize;
}
public String toString() {

View File

@ -16,6 +16,9 @@
*/
package org.apache.solr.update;
import org.apache.solr.request.SolrQueryRequest;
/**
* @version $Id$
*/
@ -23,8 +26,8 @@ public class DeleteUpdateCommand extends UpdateCommand {
public String id; // external (printable) id, for delete-by-id
public String query; // query string for delete-by-query
public DeleteUpdateCommand() {
super("delete");
public DeleteUpdateCommand(SolrQueryRequest req) {
super("delete", req);
}
public String toString() {

View File

@ -43,6 +43,10 @@ import java.util.concurrent.atomic.AtomicLong;
import java.io.IOException;
import java.net.URL;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.request.LocalSolrQueryRequest;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.search.QueryParsing;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.util.NamedList;
@ -502,8 +506,9 @@ public class DirectUpdateHandler2 extends UpdateHandler {
/** This is the worker part for the ScheduledFuture **/
public synchronized void run() {
long started = System.currentTimeMillis();
SolrQueryRequest req = new LocalSolrQueryRequest(core, new ModifiableSolrParams());
try {
CommitUpdateCommand command = new CommitUpdateCommand( false );
CommitUpdateCommand command = new CommitUpdateCommand(req, false );
command.waitFlush = true;
command.waitSearcher = true;
//no need for command.maxOptimizeSegments = 1; since it is not optimizing
@ -516,6 +521,7 @@ public class DirectUpdateHandler2 extends UpdateHandler {
}
finally {
pending = null;
req.close();
}
// check if docs have been submitted since the commit started

View File

@ -18,6 +18,7 @@
package org.apache.solr.update;
import org.apache.lucene.store.Directory;
import org.apache.solr.request.SolrQueryRequest;
/**
* A merge indexes command encapsulated in an object.
@ -28,12 +29,12 @@ import org.apache.lucene.store.Directory;
public class MergeIndexesCommand extends UpdateCommand {
public Directory[] dirs;
public MergeIndexesCommand() {
this(null);
public MergeIndexesCommand(SolrQueryRequest req) {
this(null, req);
}
public MergeIndexesCommand(Directory[] dirs) {
super("mergeIndexes");
public MergeIndexesCommand(Directory[] dirs, SolrQueryRequest req) {
super("mergeIndexes", req);
this.dirs = dirs;
}

View File

@ -17,14 +17,16 @@
package org.apache.solr.update;
import org.apache.solr.request.SolrQueryRequest;
/**
* @version $Id$
* @since Solr 1.4
*/
public class RollbackUpdateCommand extends UpdateCommand {
public RollbackUpdateCommand() {
super("rollback");
public RollbackUpdateCommand(SolrQueryRequest req) {
super("rollback", req);
}
}

View File

@ -17,15 +17,19 @@
package org.apache.solr.update;
import org.apache.solr.request.SolrQueryRequest;
/** An index update command encapsulated in an object (Command pattern)
*
* @version $Id$
*/
public class UpdateCommand {
protected String commandName;
protected final SolrQueryRequest req;
protected final String commandName;
public UpdateCommand(String commandName) {
public UpdateCommand(String commandName, SolrQueryRequest req) {
this.req = req;
this.commandName = commandName;
}

View File

@ -23,16 +23,23 @@ import java.util.List;
import org.apache.noggit.JSONParser;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.SolrInputField;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.update.AddUpdateCommand;
import org.apache.solr.update.CommitUpdateCommand;
import org.apache.solr.update.DeleteUpdateCommand;
import org.apache.solr.update.RollbackUpdateCommand;
import org.apache.solr.update.processor.UpdateRequestProcessor;
import org.junit.BeforeClass;
public class JsonLoaderTest extends LuceneTestCase {
public class JsonLoaderTest extends SolrTestCaseJ4 {
@BeforeClass
public static void beforeTests() throws Exception {
initCore("solrconfig.xml","schema.xml");
}
static String input = ("{\n" +
"\n" +
"'add': {\n" +
@ -73,12 +80,13 @@ public class JsonLoaderTest extends LuceneTestCase {
public void testParsing() throws Exception
{
SolrQueryRequest req = req();
Reader reader = new StringReader(input);
BufferingRequestProcessor p = new BufferingRequestProcessor(null);
JsonLoader loader = new JsonLoader( p );
loader.processUpdate( p, new JSONParser(reader) );
loader.processUpdate(req, p, new JSONParser(reader) );
assertEquals( 2, p.addCommands.size() );
@ -122,6 +130,8 @@ public class JsonLoaderTest extends LuceneTestCase {
// ROLLBACK COMMANDS
assertEquals( 1, p.rollbackCommands.size() );
req.close();
}
}

View File

@ -111,7 +111,7 @@ public class TestSearchPerf extends AbstractSolrTestCase {
doc.addField("t10_100_ws", sb.toString());
}
AddUpdateCommand cmd = new AddUpdateCommand();
AddUpdateCommand cmd = new AddUpdateCommand(req);
cmd.solrDoc = doc;
processor.processAdd(cmd);
}

View File

@ -19,6 +19,7 @@ package org.apache.solr.update;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.solr.core.SolrCore;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.util.AbstractSolrTestCase;
import java.io.File;
@ -45,7 +46,8 @@ public class DirectUpdateHandlerOptimizeTest extends AbstractSolrTestCase {
SolrCore core = h.getCore();
UpdateHandler updater = core.getUpdateHandler();
AddUpdateCommand cmd = new AddUpdateCommand();
SolrQueryRequest req = req();
AddUpdateCommand cmd = new AddUpdateCommand(req);
//add just under the merge factor, so no segments are merged
//the merge factor is 100 and the maxBufferedDocs is 2, so there should be 50 segments
@ -57,7 +59,7 @@ public class DirectUpdateHandlerOptimizeTest extends AbstractSolrTestCase {
updater.addDoc(cmd);
}
CommitUpdateCommand cmtCmd = new CommitUpdateCommand(false);
CommitUpdateCommand cmtCmd = new CommitUpdateCommand(req, false);
updater.commit(cmtCmd);
updater.commit(cmtCmd); // commit twice to give systems such as windows a chance to delete the old files
@ -65,7 +67,7 @@ public class DirectUpdateHandlerOptimizeTest extends AbstractSolrTestCase {
assertNumSegments(indexDir, 50);
//now do an optimize
cmtCmd = new CommitUpdateCommand(true);
cmtCmd = new CommitUpdateCommand(req, true);
cmtCmd.maxOptimizeSegments = 25;
updater.commit(cmtCmd);
updater.commit(cmtCmd);
@ -81,6 +83,8 @@ public class DirectUpdateHandlerOptimizeTest extends AbstractSolrTestCase {
updater.commit(cmtCmd);
updater.commit(cmtCmd);
assertNumSegments(indexDir, 1);
req.close();
}
private void assertNumSegments(String indexDir, int numSegs) {

View File

@ -53,123 +53,46 @@ public class DirectUpdateHandlerTest extends SolrTestCaseJ4 {
clearIndex();
assertU(commit());
}
@Test
public void testRequireUniqueKey() throws Exception
{
SolrCore core = h.getCore();
UpdateHandler updater = core.getUpdateHandler();
AddUpdateCommand cmd = new AddUpdateCommand();
public void testRequireUniqueKey() throws Exception {
// Add a valid document
cmd.doc = new Document();
cmd.doc.add( new Field( "id", "AAA", Store.YES, Index.NOT_ANALYZED ) );
cmd.doc.add( new Field( "subject", "xxxxx", Store.YES, Index.NOT_ANALYZED ) );
updater.addDoc( cmd );
assertU(adoc("id","1"));
// More than one id should fail
assertFailedU(adoc("id","2", "id","3", "text","ignore_exception"));
// No id should fail
assertFailedU(adoc("text","ignore_exception"));
}
@Test
public void testBasics() throws Exception {
assertU(adoc("id","5"));
// search - not committed - "5" should not be found.
assertQ(req("q","id:5"), "//*[@numFound='0']");
assertU(commit());
// now it should be there
assertQ(req("q","id:5"), "//*[@numFound='1']");
// now delete it
assertU(delI("5"));
// not committed yet
assertQ(req("q","id:5"), "//*[@numFound='1']");
assertU(commit());
// Add a document with multiple ids
cmd.indexedId = null; // reset the id for this add
cmd.doc = new Document();
cmd.doc.add( new Field( "id", "AAA", Store.YES, Index.NOT_ANALYZED ) );
cmd.doc.add( new Field( "id", "BBB", Store.YES, Index.NOT_ANALYZED ) );
cmd.doc.add( new Field( "subject", "xxxxx", Store.YES, Index.NOT_ANALYZED ) );
try {
updater.addDoc( cmd );
fail( "added a document with multiple ids" );
}
catch( SolrException ex ) { } // expected
// should be gone
assertQ(req("q","id:5"), "//*[@numFound='0']");
// Add a document without an id
cmd.indexedId = null; // reset the id for this add
cmd.doc = new Document();
cmd.doc.add( new Field( "subject", "xxxxx", Store.YES, Index.NOT_ANALYZED ) );
try {
updater.addDoc( cmd );
fail( "added a document without an ids" );
}
catch( SolrException ex ) { } // expected
}
@Test
public void testUncommit() throws Exception {
addSimpleDoc("A");
// search - not committed - "A" should not be found.
Map<String,String> args = new HashMap<String, String>();
args.put( CommonParams.Q, "id:A" );
args.put( "indent", "true" );
SolrQueryRequest req = new LocalSolrQueryRequest( h.getCore(), new MapSolrParams( args) );
assertQ("\"A\" should not be found.", req
,"//*[@numFound='0']"
);
}
@Test
public void testAddCommit() throws Exception {
addSimpleDoc("A");
// commit "A"
SolrCore core = h.getCore();
UpdateHandler updater = core.getUpdateHandler();
CommitUpdateCommand cmtCmd = new CommitUpdateCommand(false);
cmtCmd.waitSearcher = true;
updater.commit(cmtCmd);
// search - "A" should be found.
Map<String,String> args = new HashMap<String, String>();
args.put( CommonParams.Q, "id:A" );
args.put( "indent", "true" );
SolrQueryRequest req = new LocalSolrQueryRequest( core, new MapSolrParams( args) );
assertQ("\"A\" should be found.", req
,"//*[@numFound='1']"
,"//result/doc[1]/str[@name='id'][.='A']"
);
}
@Test
public void testDeleteCommit() throws Exception {
addSimpleDoc("A");
addSimpleDoc("B");
// commit "A", "B"
SolrCore core = h.getCore();
UpdateHandler updater = core.getUpdateHandler();
CommitUpdateCommand cmtCmd = new CommitUpdateCommand(false);
cmtCmd.waitSearcher = true;
updater.commit(cmtCmd);
// search - "A","B" should be found.
Map<String,String> args = new HashMap<String, String>();
args.put( CommonParams.Q, "id:A OR id:B" );
args.put( "indent", "true" );
SolrQueryRequest req = new LocalSolrQueryRequest( core, new MapSolrParams( args) );
assertQ("\"A\" and \"B\" should be found.", req
,"//*[@numFound='2']"
,"//result/doc[1]/str[@name='id'][.='A']"
,"//result/doc[2]/str[@name='id'][.='B']"
);
// delete "B"
deleteSimpleDoc("B");
// search - "A","B" should be found.
assertQ("\"A\" and \"B\" should be found.", req
,"//*[@numFound='2']"
,"//result/doc[1]/str[@name='id'][.='A']"
,"//result/doc[2]/str[@name='id'][.='B']"
);
// commit
updater.commit(cmtCmd);
// search - "B" should not be found.
assertQ("\"B\" should not be found.", req
,"//*[@numFound='1']"
,"//result/doc[1]/str[@name='id'][.='A']"
);
}
@Test
public void testAddRollback() throws Exception {
@ -177,14 +100,15 @@ public class DirectUpdateHandlerTest extends SolrTestCaseJ4 {
deleteCore();
initCore("solrconfig.xml", "schema12.xml");
addSimpleDoc("A");
assertU(adoc("id","A"));
// commit "A"
SolrCore core = h.getCore();
UpdateHandler updater = core.getUpdateHandler();
assertTrue( updater instanceof DirectUpdateHandler2 );
DirectUpdateHandler2 duh2 = (DirectUpdateHandler2)updater;
CommitUpdateCommand cmtCmd = new CommitUpdateCommand(false);
SolrQueryRequest ureq = req();
CommitUpdateCommand cmtCmd = new CommitUpdateCommand(ureq, false);
cmtCmd.waitSearcher = true;
assertEquals( 1, duh2.addCommands.get() );
assertEquals( 1, duh2.addCommandsCumulative.get() );
@ -193,11 +117,13 @@ public class DirectUpdateHandlerTest extends SolrTestCaseJ4 {
assertEquals( 0, duh2.addCommands.get() );
assertEquals( 1, duh2.addCommandsCumulative.get() );
assertEquals( 1, duh2.commitCommands.get() );
ureq.close();
addSimpleDoc("B");
assertU(adoc("id","B"));
// rollback "B"
RollbackUpdateCommand rbkCmd = new RollbackUpdateCommand();
ureq = req();
RollbackUpdateCommand rbkCmd = new RollbackUpdateCommand(ureq);
assertEquals( 1, duh2.addCommands.get() );
assertEquals( 2, duh2.addCommandsCumulative.get() );
assertEquals( 0, duh2.rollbackCommands.get() );
@ -205,6 +131,7 @@ public class DirectUpdateHandlerTest extends SolrTestCaseJ4 {
assertEquals( 0, duh2.addCommands.get() );
assertEquals( 1, duh2.addCommandsCumulative.get() );
assertEquals( 1, duh2.rollbackCommands.get() );
ureq.close();
// search - "B" should not be found.
Map<String,String> args = new HashMap<String, String>();
@ -218,7 +145,7 @@ public class DirectUpdateHandlerTest extends SolrTestCaseJ4 {
// Add a doc after the rollback to make sure we can continue to add/delete documents
// after a rollback as normal
addSimpleDoc("ZZZ");
assertU(adoc("id","ZZZ"));
assertU(commit());
assertQ("\"ZZZ\" must be found.", req("q", "id:ZZZ")
,"//*[@numFound='1']"
@ -232,15 +159,16 @@ public class DirectUpdateHandlerTest extends SolrTestCaseJ4 {
deleteCore();
initCore("solrconfig.xml", "schema12.xml");
addSimpleDoc("A");
addSimpleDoc("B");
assertU(adoc("id","A"));
assertU(adoc("id","B"));
// commit "A", "B"
SolrCore core = h.getCore();
UpdateHandler updater = core.getUpdateHandler();
assertTrue( updater instanceof DirectUpdateHandler2 );
DirectUpdateHandler2 duh2 = (DirectUpdateHandler2)updater;
CommitUpdateCommand cmtCmd = new CommitUpdateCommand(false);
SolrQueryRequest ureq = req();
CommitUpdateCommand cmtCmd = new CommitUpdateCommand(ureq, false);
cmtCmd.waitSearcher = true;
assertEquals( 2, duh2.addCommands.get() );
assertEquals( 2, duh2.addCommandsCumulative.get() );
@ -249,6 +177,7 @@ public class DirectUpdateHandlerTest extends SolrTestCaseJ4 {
assertEquals( 0, duh2.addCommands.get() );
assertEquals( 2, duh2.addCommandsCumulative.get() );
assertEquals( 1, duh2.commitCommands.get() );
ureq.close();
// search - "A","B" should be found.
Map<String,String> args = new HashMap<String, String>();
@ -262,8 +191,8 @@ public class DirectUpdateHandlerTest extends SolrTestCaseJ4 {
);
// delete "B"
deleteSimpleDoc("B");
assertU(delI("B"));
// search - "A","B" should be found.
assertQ("\"A\" and \"B\" should be found.", req
,"//*[@numFound='2']"
@ -272,11 +201,13 @@ public class DirectUpdateHandlerTest extends SolrTestCaseJ4 {
);
// rollback "B"
RollbackUpdateCommand rbkCmd = new RollbackUpdateCommand();
ureq = req();
RollbackUpdateCommand rbkCmd = new RollbackUpdateCommand(ureq);
assertEquals( 1, duh2.deleteByIdCommands.get() );
assertEquals( 1, duh2.deleteByIdCommandsCumulative.get() );
assertEquals( 0, duh2.rollbackCommands.get() );
updater.rollback(rbkCmd);
ureq.close();
assertEquals( 0, duh2.deleteByIdCommands.get() );
assertEquals( 0, duh2.deleteByIdCommandsCumulative.get() );
assertEquals( 1, duh2.rollbackCommands.get() );
@ -290,7 +221,7 @@ public class DirectUpdateHandlerTest extends SolrTestCaseJ4 {
// Add a doc after the rollback to make sure we can continue to add/delete documents
// after a rollback as normal
addSimpleDoc("ZZZ");
assertU(adoc("id","ZZZ"));
assertU(commit());
assertQ("\"ZZZ\" must be found.", req("q", "id:ZZZ")
,"//*[@numFound='1']"
@ -325,28 +256,6 @@ public class DirectUpdateHandlerTest extends SolrTestCaseJ4 {
sr.close();
}
private void addSimpleDoc(String id) throws Exception {
SolrCore core = h.getCore();
UpdateHandler updater = core.getUpdateHandler();
AddUpdateCommand cmd = new AddUpdateCommand();
// Add a document
cmd.doc = new Document();
cmd.doc.add( new Field( "id", id, Store.YES, Index.NOT_ANALYZED ) );
updater.addDoc( cmd );
}
private void deleteSimpleDoc(String id) throws Exception {
SolrCore core = h.getCore();
UpdateHandler updater = core.getUpdateHandler();
// Delete the document
DeleteUpdateCommand cmd = new DeleteUpdateCommand();
cmd.id = id;
updater.delete(cmd);
}
}

View File

@ -87,7 +87,7 @@ public class TestIndexingPerformance extends AbstractSolrTestCase {
long start = System.currentTimeMillis();
AddUpdateCommand add = new AddUpdateCommand();
AddUpdateCommand add = new AddUpdateCommand(req);
Field idField=null;
@ -111,7 +111,7 @@ public class TestIndexingPerformance extends AbstractSolrTestCase {
log.info("iter="+iter +" time=" + (end-start) + " throughput=" + ((long)iter*1000)/(end-start));
//discard all the changes
updateHandler.rollback(new RollbackUpdateCommand());
updateHandler.rollback(new RollbackUpdateCommand(req));
req.close();
}