From cca29c44c29c15792f3a88ab6cbb05c960689f15 Mon Sep 17 00:00:00 2001 From: Yonik Seeley Date: Wed, 27 Jul 2011 16:27:40 +0000 Subject: [PATCH] tests: write a lucene version of the NRT stress test git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1151526 13f79535-47bb-0310-9956-ffa450edef68 --- .../apache/solr/search/TestRealTimeGet.java | 244 +++++++++++++++++- 1 file changed, 237 insertions(+), 7 deletions(-) diff --git a/solr/core/src/test/org/apache/solr/search/TestRealTimeGet.java b/solr/core/src/test/org/apache/solr/search/TestRealTimeGet.java index d778b5d39f5..ce2eff669fa 100644 --- a/solr/core/src/test/org/apache/solr/search/TestRealTimeGet.java +++ b/solr/core/src/test/org/apache/solr/search/TestRealTimeGet.java @@ -16,18 +16,31 @@ */ package org.apache.solr.search; +import org.apache.lucene.analysis.core.WhitespaceAnalyzer; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader.AtomicReaderContext; import org.apache.lucene.index.IndexReader.ReaderContext; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.Term; import org.apache.lucene.queries.function.DocValues; import org.apache.lucene.queries.function.ValueSource; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.TopDocs; +import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.util.ReaderUtil; +import org.apache.lucene.util.Version; import org.apache.noggit.ObjectBuilder; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.SolrRequest; import org.apache.solr.common.SolrException; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.schema.SchemaField; +import org.apache.solr.schema.StrField; import org.junit.BeforeClass; import org.junit.Test; @@ -84,6 +97,22 @@ public class TestRealTimeGet extends SolrTestCaseJ4 { long committedModelClock; volatile int lastId; final String field = "val_l"; + Object[] syncArr; + + private void initModel(int ndocs) { + snapshotCount = 0; + committedModelClock = 0; + lastId = 0; + + syncArr = new Object[ndocs]; + + for (int i=0; i threads = new ArrayList(); + + RAMDirectory dir = new RAMDirectory(); + final IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Version.LUCENE_40, new WhitespaceAnalyzer(Version.LUCENE_40))); + writer.commit(); + reader = IndexReader.open(dir); + + for (int i=0; i 0) { + int oper = rand.nextInt(100); + + if (oper < commitPercent) { + if (numCommitting.incrementAndGet() <= maxConcurrentCommits) { + Map newCommittedModel; + long version; + + synchronized(TestRealTimeGet.this) { + newCommittedModel = new HashMap(model); // take a snapshot + version = snapshotCount++; + } + + IndexReader newReader; + if (rand.nextInt(100) < softCommitPercent) { + // assertU(h.commit("softCommit","true")); + newReader = reader.reopen(writer, true); + } else { + // assertU(commit()); + writer.commit(); + newReader = reader.reopen(); + } + + synchronized(TestRealTimeGet.this) { + // install the new reader if it's newest + if (newReader.getVersion() > reader.getVersion()) { + reader.decRef(); + reader = newReader; + } else if (newReader != reader) { + newReader.decRef(); + } + + // install this snapshot only if it's newer than the current one + if (version >= committedModelClock) { + committedModel = newCommittedModel; + committedModelClock = version; + } + + } + } + numCommitting.decrementAndGet(); + continue; + } + + + int id = rand.nextInt(ndocs); + Object sync = syncArr[id]; + + // set the lastId before we actually change it sometimes to try and + // uncover more race conditions between writing and reading + boolean before = random.nextBoolean(); + if (before) { + lastId = id; + } + + // We can't concurrently update the same document and retain our invariants of increasing values + // since we can't guarantee what order the updates will be executed. + synchronized (sync) { + Long val = model.get(id); + long nextVal = Math.abs(val)+1; + + if (oper < commitPercent + deletePercent) { + // assertU("" + id + ""); + writer.deleteDocuments(new Term("id",Integer.toString(id))); + model.put(id, -nextVal); + } else if (oper < commitPercent + deletePercent + deleteByQueryPercent) { + //assertU("id:" + id + ""); + writer.deleteDocuments(new TermQuery(new Term("id", Integer.toString(id)))); + model.put(id, -nextVal); + } else { + // assertU(adoc("id",Integer.toString(id), field, Long.toString(nextVal))); + Document d = new Document(); + d.add(new Field("id",Integer.toString(id), Field.Store.YES, Field.Index.NOT_ANALYZED_NO_NORMS)); + d.add(new Field(field, Long.toString(nextVal), Field.Store.YES, Field.Index.NO)); + writer.updateDocument(new Term("id", Integer.toString(id)), d); + model.put(id, nextVal); + } + } + + if (!before) { + lastId = id; + } + } + } catch (Exception ex) { + throw new RuntimeException(ex); + } + } + }; + + threads.add(thread); + } + + + for (int i=0; i= 0) { + int oper = rand.nextInt(100); + // bias toward a recently changed doc + int id = rand.nextInt(100) < 25 ? lastId : rand.nextInt(ndocs); + + // when indexing, we update the index, then the model + // so when querying, we should first check the model, and then the index + + long val; + + synchronized(TestRealTimeGet.this) { + val = committedModel.get(id); + } + + + IndexReader r; + synchronized(TestRealTimeGet.this) { + r = reader; + r.incRef(); + } + + // sreq = req("wt","json", "q","id:"+Integer.toString(id), "omitHeader","true"); + IndexSearcher searcher = new IndexSearcher(r); + Query q = new TermQuery(new Term("id",Integer.toString(id))); + TopDocs results = searcher.search(q, 10); + + if (results.totalHits == 0) { + // there's no info we can get back with a delete, so not much we can check without further synchronization + } else { + assertEquals(1, results.totalHits); + Document doc = searcher.doc(results.scoreDocs[0].doc); + long foundVal = Long.parseLong(doc.get(field)); + if (foundVal < Math.abs(val)) { + System.out.println("model_val="+val+" foundVal="+foundVal); + } + assertTrue(foundVal >= Math.abs(val)); + } + + r.decRef(); + } + } + catch (Throwable e) { + operations.set(-1L); + SolrException.log(log,e); + fail(e.toString()); + } + } + }; + + threads.add(thread); + } + + + for (Thread thread : threads) { + thread.start(); + } + + for (Thread thread : threads) { + thread.join(); + } + + writer.close(); + reader.close(); + + } + + + }