mirror of https://github.com/apache/lucene.git
SOLR-4909: Use DirectoryReader.openIfChanged in non-NRT mode
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1521556 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
54427466aa
commit
d545fdfd1e
|
@ -198,6 +198,9 @@ Bug Fixes
|
||||||
* SOLR-5215: Fix possibility of deadlock in ZooKeeper ConnectionManager.
|
* SOLR-5215: Fix possibility of deadlock in ZooKeeper ConnectionManager.
|
||||||
(Mark Miller, Ricardo Merizalde)
|
(Mark Miller, Ricardo Merizalde)
|
||||||
|
|
||||||
|
* SOLR-4909: Use DirectoryReader.openIfChanged in non-NRT mode.
|
||||||
|
(Michael Garski via Robert Muir)
|
||||||
|
|
||||||
Optimizations
|
Optimizations
|
||||||
----------------------
|
----------------------
|
||||||
|
|
||||||
|
|
|
@ -60,7 +60,7 @@ public abstract class IndexReaderFactory implements NamedListInitializedPlugin {
|
||||||
/**
|
/**
|
||||||
* Creates a new IndexReader instance using the given IndexWriter.
|
* Creates a new IndexReader instance using the given IndexWriter.
|
||||||
* <p>
|
* <p>
|
||||||
* This is used for opening the initial reader in NRT mode ({@code reopenReaders=true}
|
* This is used for opening the initial reader in NRT mode ({@code nrtMode=true}
|
||||||
* in solrconfig.xml)
|
* in solrconfig.xml)
|
||||||
*
|
*
|
||||||
* @param writer IndexWriter
|
* @param writer IndexWriter
|
||||||
|
|
|
@ -152,7 +152,7 @@ public class SolrConfig extends Config {
|
||||||
defaultIndexConfig = mainIndexConfig = null;
|
defaultIndexConfig = mainIndexConfig = null;
|
||||||
indexConfigPrefix = "indexConfig";
|
indexConfigPrefix = "indexConfig";
|
||||||
}
|
}
|
||||||
reopenReaders = getBool(indexConfigPrefix+"/reopenReaders", true);
|
nrtMode = getBool(indexConfigPrefix+"/nrtMode", true);
|
||||||
// Parse indexConfig section, using mainIndex as backup in case old config is used
|
// Parse indexConfig section, using mainIndex as backup in case old config is used
|
||||||
indexConfig = new SolrIndexConfig(this, "indexConfig", mainIndexConfig);
|
indexConfig = new SolrIndexConfig(this, "indexConfig", mainIndexConfig);
|
||||||
|
|
||||||
|
@ -316,7 +316,7 @@ public class SolrConfig extends Config {
|
||||||
public final int queryResultWindowSize;
|
public final int queryResultWindowSize;
|
||||||
public final int queryResultMaxDocsCached;
|
public final int queryResultMaxDocsCached;
|
||||||
public final boolean enableLazyFieldLoading;
|
public final boolean enableLazyFieldLoading;
|
||||||
public final boolean reopenReaders;
|
public final boolean nrtMode;
|
||||||
// DocSet
|
// DocSet
|
||||||
public final float hashSetInverseLoadFactor;
|
public final float hashSetInverseLoadFactor;
|
||||||
public final int hashDocSetMaxSize;
|
public final int hashDocSetMaxSize;
|
||||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.solr.core;
|
||||||
|
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.lucene.codecs.Codec;
|
import org.apache.lucene.codecs.Codec;
|
||||||
|
import org.apache.lucene.index.AtomicReaderContext;
|
||||||
import org.apache.lucene.index.DirectoryReader;
|
import org.apache.lucene.index.DirectoryReader;
|
||||||
import org.apache.lucene.index.IndexDeletionPolicy;
|
import org.apache.lucene.index.IndexDeletionPolicy;
|
||||||
import org.apache.lucene.index.IndexWriter;
|
import org.apache.lucene.index.IndexWriter;
|
||||||
|
@ -786,10 +787,19 @@ public final class SolrCore implements SolrInfoMBean {
|
||||||
iwRef = prev.getUpdateHandler().getSolrCoreState().getIndexWriter(null);
|
iwRef = prev.getUpdateHandler().getSolrCoreState().getIndexWriter(null);
|
||||||
if (iwRef != null) {
|
if (iwRef != null) {
|
||||||
final IndexWriter iw = iwRef.get();
|
final IndexWriter iw = iwRef.get();
|
||||||
|
final SolrCore core = this;
|
||||||
newReaderCreator = new Callable<DirectoryReader>() {
|
newReaderCreator = new Callable<DirectoryReader>() {
|
||||||
|
// this is used during a core reload
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DirectoryReader call() throws Exception {
|
public DirectoryReader call() throws Exception {
|
||||||
return DirectoryReader.open(iw, true);
|
if(getSolrConfig().nrtMode) {
|
||||||
|
// if in NRT mode, need to open from the previous writer
|
||||||
|
return indexReaderFactory.newReader(iw, core);
|
||||||
|
} else {
|
||||||
|
// if not NRT, need to create a new reader from the directory
|
||||||
|
return indexReaderFactory.newReader(iw.getDirectory(), core);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -1355,7 +1365,7 @@ public final class SolrCore implements SolrInfoMBean {
|
||||||
|
|
||||||
SolrIndexSearcher tmp;
|
SolrIndexSearcher tmp;
|
||||||
RefCounted<SolrIndexSearcher> newestSearcher = null;
|
RefCounted<SolrIndexSearcher> newestSearcher = null;
|
||||||
boolean nrt = solrConfig.reopenReaders && updateHandlerReopens;
|
boolean nrt = solrConfig.nrtMode && updateHandlerReopens;
|
||||||
|
|
||||||
openSearcherLock.lock();
|
openSearcherLock.lock();
|
||||||
try {
|
try {
|
||||||
|
@ -1376,8 +1386,7 @@ public final class SolrCore implements SolrInfoMBean {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (newestSearcher != null && solrConfig.reopenReaders
|
if (newestSearcher != null && (nrt || indexDirFile.equals(newIndexDirFile))) {
|
||||||
&& (nrt || indexDirFile.equals(newIndexDirFile))) {
|
|
||||||
|
|
||||||
DirectoryReader newReader;
|
DirectoryReader newReader;
|
||||||
DirectoryReader currentReader = newestSearcher.get().getIndexReader();
|
DirectoryReader currentReader = newestSearcher.get().getIndexReader();
|
||||||
|
@ -1387,13 +1396,13 @@ public final class SolrCore implements SolrInfoMBean {
|
||||||
RefCounted<IndexWriter> writer = getUpdateHandler().getSolrCoreState()
|
RefCounted<IndexWriter> writer = getUpdateHandler().getSolrCoreState()
|
||||||
.getIndexWriter(null);
|
.getIndexWriter(null);
|
||||||
try {
|
try {
|
||||||
if (writer != null) {
|
if (writer != null && solrConfig.nrtMode) {
|
||||||
newReader = DirectoryReader.openIfChanged(currentReader,
|
// if in NRT mode, open from the writer
|
||||||
writer.get(), true);
|
newReader = DirectoryReader.openIfChanged(currentReader, writer.get(), true);
|
||||||
} else {
|
} else {
|
||||||
// verbose("start reopen without writer, reader=", currentReader);
|
// verbose("start reopen without writer, reader=", currentReader);
|
||||||
|
// if not in NRT mode, just re-open the reader
|
||||||
newReader = DirectoryReader.openIfChanged(currentReader);
|
newReader = DirectoryReader.openIfChanged(currentReader);
|
||||||
|
|
||||||
// verbose("reopen result", newReader);
|
// verbose("reopen result", newReader);
|
||||||
}
|
}
|
||||||
} finally {
|
} finally {
|
||||||
|
@ -1427,7 +1436,7 @@ public final class SolrCore implements SolrInfoMBean {
|
||||||
DirectoryReader newReader = newReaderCreator.call();
|
DirectoryReader newReader = newReaderCreator.call();
|
||||||
tmp = new SolrIndexSearcher(this, newIndexDir, getLatestSchema(), getSolrConfig().indexConfig,
|
tmp = new SolrIndexSearcher(this, newIndexDir, getLatestSchema(), getSolrConfig().indexConfig,
|
||||||
(realtime ? "realtime":"main"), newReader, true, !realtime, true, directoryFactory);
|
(realtime ? "realtime":"main"), newReader, true, !realtime, true, directoryFactory);
|
||||||
} else if (solrConfig.reopenReaders) {
|
} else if (solrConfig.nrtMode) {
|
||||||
RefCounted<IndexWriter> writer = getUpdateHandler().getSolrCoreState().getIndexWriter(this);
|
RefCounted<IndexWriter> writer = getUpdateHandler().getSolrCoreState().getIndexWriter(this);
|
||||||
DirectoryReader newReader = null;
|
DirectoryReader newReader = null;
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -17,7 +17,6 @@
|
||||||
|
|
||||||
package org.apache.solr.update;
|
package org.apache.solr.update;
|
||||||
|
|
||||||
import org.apache.commons.io.FileUtils;
|
|
||||||
import org.apache.lucene.index.*;
|
import org.apache.lucene.index.*;
|
||||||
import org.apache.lucene.index.IndexWriter.IndexReaderWarmer;
|
import org.apache.lucene.index.IndexWriter.IndexReaderWarmer;
|
||||||
import org.apache.lucene.util.InfoStream;
|
import org.apache.lucene.util.InfoStream;
|
||||||
|
@ -34,7 +33,6 @@ import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.FileNotFoundException;
|
|
||||||
import java.io.FileOutputStream;
|
import java.io.FileOutputStream;
|
||||||
import java.io.PrintStream;
|
import java.io.PrintStream;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
@ -166,8 +164,8 @@ public class SolrIndexConfig {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
mergedSegmentWarmerInfo = getPluginInfo(prefix + "/mergedSegmentWarmer", solrConfig, def.mergedSegmentWarmerInfo);
|
mergedSegmentWarmerInfo = getPluginInfo(prefix + "/mergedSegmentWarmer", solrConfig, def.mergedSegmentWarmerInfo);
|
||||||
if (mergedSegmentWarmerInfo != null && solrConfig.reopenReaders == false) {
|
if (mergedSegmentWarmerInfo != null && solrConfig.nrtMode == false) {
|
||||||
throw new IllegalArgumentException("Supplying a mergedSegmentWarmer will do nothing since reopenReaders is false");
|
throw new IllegalArgumentException("Supplying a mergedSegmentWarmer will do nothing since nrtMode is false");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -22,6 +22,6 @@
|
||||||
<luceneMatchVersion>${tests.luceneMatchVersion:LUCENE_CURRENT}</luceneMatchVersion>
|
<luceneMatchVersion>${tests.luceneMatchVersion:LUCENE_CURRENT}</luceneMatchVersion>
|
||||||
<indexConfig>
|
<indexConfig>
|
||||||
<mergedSegmentWarmer class="org.apache.lucene.index.SimpleMergedSegmentWarmer"/>
|
<mergedSegmentWarmer class="org.apache.lucene.index.SimpleMergedSegmentWarmer"/>
|
||||||
<reopenReaders>false</reopenReaders> <!-- BAD -->
|
<nrtMode>false</nrtMode> <!-- BAD -->
|
||||||
</indexConfig>
|
</indexConfig>
|
||||||
</config>
|
</config>
|
||||||
|
|
|
@ -35,7 +35,7 @@ A solrconfig.xml snippet containing indexConfig settings for randomized testing.
|
||||||
<ramBufferSizeMB>${solr.tests.ramBufferSizeMB}</ramBufferSizeMB>
|
<ramBufferSizeMB>${solr.tests.ramBufferSizeMB}</ramBufferSizeMB>
|
||||||
|
|
||||||
<mergeScheduler class="${solr.tests.mergeScheduler}" />
|
<mergeScheduler class="${solr.tests.mergeScheduler}" />
|
||||||
<reopenReaders>${solr.tests.reopenReaders:true}</reopenReaders>
|
<nrtMode>${solr.tests.nrtMode:true}</nrtMode>
|
||||||
|
|
||||||
<writeLockTimeout>1000</writeLockTimeout>
|
<writeLockTimeout>1000</writeLockTimeout>
|
||||||
<commitLockTimeout>10000</commitLockTimeout>
|
<commitLockTimeout>10000</commitLockTimeout>
|
||||||
|
|
|
@ -54,14 +54,14 @@ public class TestArbitraryIndexDir extends AbstractSolrTestCase{
|
||||||
@BeforeClass
|
@BeforeClass
|
||||||
public static void beforeClass() {
|
public static void beforeClass() {
|
||||||
// this test wants to start solr, and then open a separate indexwriter of its own on the same dir.
|
// this test wants to start solr, and then open a separate indexwriter of its own on the same dir.
|
||||||
System.setProperty("solr.tests.reopenReaders", "false");
|
System.setProperty("solr.tests.nrtMode", "false");
|
||||||
System.setProperty("enable.update.log", "false"); // schema12 doesn't support _version_
|
System.setProperty("enable.update.log", "false"); // schema12 doesn't support _version_
|
||||||
savedFactory = System.getProperty("solr.DirectoryFactory");
|
savedFactory = System.getProperty("solr.DirectoryFactory");
|
||||||
System.setProperty("solr.directoryFactory", "org.apache.solr.core.MockFSDirectoryFactory");
|
System.setProperty("solr.directoryFactory", "org.apache.solr.core.MockFSDirectoryFactory");
|
||||||
}
|
}
|
||||||
@AfterClass
|
@AfterClass
|
||||||
public static void afterClass() {
|
public static void afterClass() {
|
||||||
System.clearProperty("solr.tests.reopenReaders");
|
System.clearProperty("solr.tests.nrtMode");
|
||||||
if (savedFactory == null) {
|
if (savedFactory == null) {
|
||||||
System.clearProperty("solr.directoryFactory");
|
System.clearProperty("solr.directoryFactory");
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -18,7 +18,12 @@ package org.apache.solr.core;
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.IdentityHashMap;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
import org.apache.lucene.index.AtomicReaderContext;
|
||||||
import org.apache.lucene.index.DirectoryReader;
|
import org.apache.lucene.index.DirectoryReader;
|
||||||
import org.apache.solr.SolrTestCaseJ4;
|
import org.apache.solr.SolrTestCaseJ4;
|
||||||
import org.apache.solr.search.SolrIndexSearcher;
|
import org.apache.solr.search.SolrIndexSearcher;
|
||||||
|
@ -34,6 +39,9 @@ public class TestNRTOpen extends SolrTestCaseJ4 {
|
||||||
System.setProperty("solr.directoryFactory", "solr.StandardDirectoryFactory");
|
System.setProperty("solr.directoryFactory", "solr.StandardDirectoryFactory");
|
||||||
// and dont delete it initially
|
// and dont delete it initially
|
||||||
System.setProperty("solr.test.leavedatadir", "true");
|
System.setProperty("solr.test.leavedatadir", "true");
|
||||||
|
// set these so that merges won't break the test
|
||||||
|
System.setProperty("solr.tests.maxBufferedDocs", "100000");
|
||||||
|
System.setProperty("solr.tests.mergePolicy", "org.apache.lucene.index.LogDocMergePolicy");
|
||||||
initCore("solrconfig-basic.xml", "schema-minimal.xml");
|
initCore("solrconfig-basic.xml", "schema-minimal.xml");
|
||||||
// add a doc
|
// add a doc
|
||||||
assertU(adoc("foo", "bar"));
|
assertU(adoc("foo", "bar"));
|
||||||
|
@ -43,6 +51,8 @@ public class TestNRTOpen extends SolrTestCaseJ4 {
|
||||||
// boot up again over the same index
|
// boot up again over the same index
|
||||||
dataDir = myDir;
|
dataDir = myDir;
|
||||||
initCore("solrconfig-basic.xml", "schema-minimal.xml");
|
initCore("solrconfig-basic.xml", "schema-minimal.xml");
|
||||||
|
// startup
|
||||||
|
assertNRT(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
@AfterClass
|
@AfterClass
|
||||||
|
@ -50,23 +60,94 @@ public class TestNRTOpen extends SolrTestCaseJ4 {
|
||||||
// ensure we clean up after ourselves, this will fire before superclass...
|
// ensure we clean up after ourselves, this will fire before superclass...
|
||||||
System.clearProperty("solr.test.leavedatadir");
|
System.clearProperty("solr.test.leavedatadir");
|
||||||
System.clearProperty("solr.directoryFactory");
|
System.clearProperty("solr.directoryFactory");
|
||||||
|
System.clearProperty("solr.tests.maxBufferedDocs");
|
||||||
|
System.clearProperty("solr.tests.mergePolicy");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setUp() throws Exception {
|
||||||
|
super.setUp();
|
||||||
|
// delete all, then add initial doc
|
||||||
|
assertU(delQ("*:*"));
|
||||||
|
assertU(adoc("foo", "bar"));
|
||||||
|
assertU(commit());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testReaderIsNRT() {
|
public void testReaderIsNRT() {
|
||||||
assertNRT();
|
// core reload
|
||||||
String core = h.getCore().getName();
|
String core = h.getCore().getName();
|
||||||
h.getCoreContainer().reload(core);
|
h.getCoreContainer().reload(core);
|
||||||
assertNRT();
|
assertNRT(1);
|
||||||
|
|
||||||
|
// add a doc and soft commit
|
||||||
|
assertU(adoc("baz", "doc"));
|
||||||
|
assertU(commit("softCommit", "true"));
|
||||||
|
assertNRT(2);
|
||||||
|
|
||||||
|
// add a doc and hard commit
|
||||||
|
assertU(adoc("bazz", "doc"));
|
||||||
|
assertU(commit());
|
||||||
|
assertNRT(3);
|
||||||
|
|
||||||
|
// add a doc and core reload
|
||||||
|
assertU(adoc("bazzz", "doc2"));
|
||||||
|
h.getCoreContainer().reload(core);
|
||||||
|
assertNRT(4);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void assertNRT() {
|
public void testSharedCores() {
|
||||||
|
// clear out any junk
|
||||||
|
assertU(optimize());
|
||||||
|
|
||||||
|
Set<Object> s1 = getCoreCacheKeys();
|
||||||
|
assertEquals(1, s1.size());
|
||||||
|
|
||||||
|
// add a doc, will go in a new segment
|
||||||
|
assertU(adoc("baz", "doc"));
|
||||||
|
assertU(commit("softCommit", "true"));
|
||||||
|
|
||||||
|
Set<Object> s2 = getCoreCacheKeys();
|
||||||
|
assertEquals(2, s2.size());
|
||||||
|
assertTrue(s2.containsAll(s1));
|
||||||
|
|
||||||
|
// add two docs, will go in a new segment
|
||||||
|
assertU(adoc("foo", "doc"));
|
||||||
|
assertU(adoc("foo2", "doc"));
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
Set<Object> s3 = getCoreCacheKeys();
|
||||||
|
assertEquals(3, s3.size());
|
||||||
|
assertTrue(s3.containsAll(s2));
|
||||||
|
|
||||||
|
// delete a doc
|
||||||
|
assertU(delQ("foo2:doc"));
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
// same cores
|
||||||
|
assertEquals(s3, getCoreCacheKeys());
|
||||||
|
}
|
||||||
|
|
||||||
|
static void assertNRT(int maxDoc) {
|
||||||
RefCounted<SolrIndexSearcher> searcher = h.getCore().getSearcher();
|
RefCounted<SolrIndexSearcher> searcher = h.getCore().getSearcher();
|
||||||
try {
|
try {
|
||||||
DirectoryReader ir = searcher.get().getIndexReader();
|
DirectoryReader ir = searcher.get().getIndexReader();
|
||||||
assertEquals(1, ir.maxDoc());
|
assertEquals(maxDoc, ir.maxDoc());
|
||||||
assertTrue("expected NRT reader, got: " + ir, ir.toString().contains(":nrt"));
|
assertTrue("expected NRT reader, got: " + ir, ir.toString().contains(":nrt"));
|
||||||
} finally {
|
} finally {
|
||||||
searcher.decref();
|
searcher.decref();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private Set<Object> getCoreCacheKeys() {
|
||||||
|
RefCounted<SolrIndexSearcher> searcher = h.getCore().getSearcher();
|
||||||
|
Set<Object> set = Collections.newSetFromMap(new IdentityHashMap<Object,Boolean>());
|
||||||
|
try {
|
||||||
|
DirectoryReader ir = searcher.get().getIndexReader();
|
||||||
|
for (AtomicReaderContext context : ir.leaves()) {
|
||||||
|
set.add(context.reader().getCoreCacheKey());
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
searcher.decref();
|
||||||
|
}
|
||||||
|
return set;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,154 @@
|
||||||
|
package org.apache.solr.core;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright ownership.
|
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
* (the "License"); you may not use this file except in compliance with
|
||||||
|
* the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.IdentityHashMap;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
import org.apache.lucene.index.AtomicReaderContext;
|
||||||
|
import org.apache.lucene.index.DirectoryReader;
|
||||||
|
import org.apache.solr.SolrTestCaseJ4;
|
||||||
|
import org.apache.solr.search.SolrIndexSearcher;
|
||||||
|
import org.apache.solr.util.RefCounted;
|
||||||
|
import org.junit.AfterClass;
|
||||||
|
import org.junit.BeforeClass;
|
||||||
|
|
||||||
|
public class TestNonNRTOpen extends SolrTestCaseJ4 {
|
||||||
|
|
||||||
|
@BeforeClass
|
||||||
|
public static void beforeClass() throws Exception {
|
||||||
|
// use a filesystem, because we need to create an index, then "start up solr"
|
||||||
|
System.setProperty("solr.directoryFactory", "solr.StandardDirectoryFactory");
|
||||||
|
// and dont delete it initially
|
||||||
|
System.setProperty("solr.test.leavedatadir", "true");
|
||||||
|
// turn off nrt
|
||||||
|
System.setProperty("solr.tests.nrtMode", "false");
|
||||||
|
// set these so that merges won't break the test
|
||||||
|
System.setProperty("solr.tests.maxBufferedDocs", "100000");
|
||||||
|
System.setProperty("solr.tests.mergePolicy", "org.apache.lucene.index.LogDocMergePolicy");
|
||||||
|
initCore("solrconfig-basic.xml", "schema-minimal.xml");
|
||||||
|
// add a doc
|
||||||
|
assertU(adoc("foo", "bar"));
|
||||||
|
assertU(commit());
|
||||||
|
File myDir = dataDir;
|
||||||
|
deleteCore();
|
||||||
|
// boot up again over the same index
|
||||||
|
dataDir = myDir;
|
||||||
|
initCore("solrconfig-basic.xml", "schema-minimal.xml");
|
||||||
|
// startup
|
||||||
|
assertNotNRT(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setUp() throws Exception {
|
||||||
|
super.setUp();
|
||||||
|
// delete all, then add initial doc
|
||||||
|
assertU(delQ("*:*"));
|
||||||
|
assertU(adoc("foo", "bar"));
|
||||||
|
assertU(commit());
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterClass
|
||||||
|
public static void afterClass() throws Exception {
|
||||||
|
// ensure we clean up after ourselves, this will fire before superclass...
|
||||||
|
System.clearProperty("solr.test.leavedatadir");
|
||||||
|
System.clearProperty("solr.directoryFactory");
|
||||||
|
System.clearProperty("solr.tests.maxBufferedDocs");
|
||||||
|
System.clearProperty("solr.tests.mergePolicy");
|
||||||
|
System.clearProperty("solr.tests.nrtMode");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testReaderIsNotNRT() {
|
||||||
|
// startup
|
||||||
|
assertNotNRT(1);
|
||||||
|
|
||||||
|
// core reload
|
||||||
|
String core = h.getCore().getName();
|
||||||
|
h.getCoreContainer().reload(core);
|
||||||
|
assertNotNRT(1);
|
||||||
|
|
||||||
|
// add a doc and commit
|
||||||
|
assertU(adoc("baz", "doc"));
|
||||||
|
assertU(commit());
|
||||||
|
assertNotNRT(2);
|
||||||
|
|
||||||
|
// add a doc and core reload
|
||||||
|
assertU(adoc("bazz", "doc2"));
|
||||||
|
h.getCoreContainer().reload(core);
|
||||||
|
assertNotNRT(3);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testSharedCores() {
|
||||||
|
// clear out any junk
|
||||||
|
assertU(optimize());
|
||||||
|
|
||||||
|
Set<Object> s1 = getCoreCacheKeys();
|
||||||
|
assertEquals(1, s1.size());
|
||||||
|
|
||||||
|
// add a doc, will go in a new segment
|
||||||
|
assertU(adoc("baz", "doc"));
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
Set<Object> s2 = getCoreCacheKeys();
|
||||||
|
assertEquals(2, s2.size());
|
||||||
|
assertTrue(s2.containsAll(s1));
|
||||||
|
|
||||||
|
// add two docs, will go in a new segment
|
||||||
|
assertU(adoc("foo", "doc"));
|
||||||
|
assertU(adoc("foo2", "doc"));
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
Set<Object> s3 = getCoreCacheKeys();
|
||||||
|
assertEquals(3, s3.size());
|
||||||
|
assertTrue(s3.containsAll(s2));
|
||||||
|
|
||||||
|
// delete a doc
|
||||||
|
assertU(delQ("foo2:doc"));
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
// same cores
|
||||||
|
assertEquals(s3, getCoreCacheKeys());
|
||||||
|
}
|
||||||
|
|
||||||
|
static void assertNotNRT(int maxDoc) {
|
||||||
|
RefCounted<SolrIndexSearcher> searcher = h.getCore().getSearcher();
|
||||||
|
try {
|
||||||
|
DirectoryReader ir = searcher.get().getIndexReader();
|
||||||
|
assertEquals(maxDoc, ir.maxDoc());
|
||||||
|
assertFalse("expected non-NRT reader, got: " + ir, ir.toString().contains(":nrt"));
|
||||||
|
} finally {
|
||||||
|
searcher.decref();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private Set<Object> getCoreCacheKeys() {
|
||||||
|
RefCounted<SolrIndexSearcher> searcher = h.getCore().getSearcher();
|
||||||
|
Set<Object> set = Collections.newSetFromMap(new IdentityHashMap<Object,Boolean>());
|
||||||
|
try {
|
||||||
|
DirectoryReader ir = searcher.get().getIndexReader();
|
||||||
|
for (AtomicReaderContext context : ir.leaves()) {
|
||||||
|
set.add(context.reader().getCoreCacheKey());
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
searcher.decref();
|
||||||
|
}
|
||||||
|
return set;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -348,7 +348,7 @@ public class TestReplicationHandler extends SolrTestCaseJ4 {
|
||||||
useFactory(null); // force a persistent directory
|
useFactory(null); // force a persistent directory
|
||||||
|
|
||||||
// read-only setting (no opening from indexwriter)
|
// read-only setting (no opening from indexwriter)
|
||||||
System.setProperty("solr.tests.reopenReaders", "false");
|
System.setProperty("solr.tests.nrtMode", "false");
|
||||||
try {
|
try {
|
||||||
// stop and start so they see the new directory setting
|
// stop and start so they see the new directory setting
|
||||||
slaveJetty.stop();
|
slaveJetty.stop();
|
||||||
|
@ -361,7 +361,7 @@ public class TestReplicationHandler extends SolrTestCaseJ4 {
|
||||||
slaveJetty.stop();
|
slaveJetty.stop();
|
||||||
slaveJetty.start(true);
|
slaveJetty.start(true);
|
||||||
} finally {
|
} finally {
|
||||||
System.clearProperty("solr.tests.reopenReaders"); // dont mess with other tests
|
System.clearProperty("solr.tests.nrtMode"); // dont mess with other tests
|
||||||
}
|
}
|
||||||
|
|
||||||
// Currently we open a writer on-demand. This is to test that we are correctly testing
|
// Currently we open a writer on-demand. This is to test that we are correctly testing
|
||||||
|
|
|
@ -257,11 +257,13 @@
|
||||||
<unlockOnStartup>false</unlockOnStartup>
|
<unlockOnStartup>false</unlockOnStartup>
|
||||||
-->
|
-->
|
||||||
|
|
||||||
<!-- If true, IndexReaders will be reopened (often more efficient)
|
<!-- If true, IndexReaders will be opened/reopened from the IndexWriter
|
||||||
instead of closed and then opened. Default: true
|
instead of from the Directory. Hosts in a master/slave setup
|
||||||
|
should have this set to false while those in a SolrCloud
|
||||||
|
cluster need to be set to true. Default: true
|
||||||
-->
|
-->
|
||||||
<!--
|
<!--
|
||||||
<reopenReaders>true</reopenReaders>
|
<nrtMode>true</nrtMode>
|
||||||
-->
|
-->
|
||||||
|
|
||||||
<!-- Commit Deletion Policy
|
<!-- Commit Deletion Policy
|
||||||
|
|
|
@ -258,11 +258,13 @@
|
||||||
<unlockOnStartup>false</unlockOnStartup>
|
<unlockOnStartup>false</unlockOnStartup>
|
||||||
-->
|
-->
|
||||||
|
|
||||||
<!-- If true, IndexReaders will be reopened (often more efficient)
|
<!-- If true, IndexReaders will be opened/reopened from the IndexWriter
|
||||||
instead of closed and then opened. Default: true
|
instead of from the Directory. Hosts in a master/slave setup
|
||||||
|
should have this set to false while those in a SolrCloud
|
||||||
|
cluster need to be set to true. Default: true
|
||||||
-->
|
-->
|
||||||
<!--
|
<!--
|
||||||
<reopenReaders>true</reopenReaders>
|
<nrtMode>true</nrtMode>
|
||||||
-->
|
-->
|
||||||
|
|
||||||
<!-- Commit Deletion Policy
|
<!-- Commit Deletion Policy
|
||||||
|
|
Loading…
Reference in New Issue