mirror of https://github.com/apache/lucene.git
LUCENE-2804: port to trunk
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1044008 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
f6d9b703d6
commit
c669127dc7
|
@ -26,7 +26,7 @@ import org.apache.lucene.queryParser.QueryParser;
|
|||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.Searcher;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.tools.ant.Project;
|
||||
import org.apache.tools.ant.types.FileSet;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
@ -41,7 +41,7 @@ public class IndexTaskTest extends LuceneTestCase {
|
|||
|
||||
private Searcher searcher;
|
||||
private Analyzer analyzer;
|
||||
private FSDirectory dir;
|
||||
private Directory dir;
|
||||
|
||||
|
||||
/**
|
||||
|
@ -68,7 +68,7 @@ public class IndexTaskTest extends LuceneTestCase {
|
|||
task.setProject(project);
|
||||
task.execute();
|
||||
|
||||
dir = FSDirectory.open(indexDir);
|
||||
dir = newFSDirectory(indexDir);
|
||||
searcher = new IndexSearcher(dir, true);
|
||||
analyzer = new StopAnalyzer(TEST_VERSION_CURRENT);
|
||||
}
|
||||
|
|
|
@ -23,7 +23,7 @@ import org.apache.lucene.benchmark.quality.trec.TrecTopicsReader;
|
|||
import org.apache.lucene.benchmark.quality.utils.SimpleQQParser;
|
||||
import org.apache.lucene.benchmark.quality.utils.SubmissionReport;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
|
@ -67,7 +67,8 @@ public class TestQualityRun extends BenchmarkTestCase {
|
|||
// validate topics & judgments match each other
|
||||
judge.validateData(qqs, logger);
|
||||
|
||||
IndexSearcher searcher = new IndexSearcher(FSDirectory.open(new File(getWorkDir(),"index")), true);
|
||||
Directory dir = newFSDirectory(new File(getWorkDir(),"index"));
|
||||
IndexSearcher searcher = new IndexSearcher(dir, true);
|
||||
|
||||
QualityQueryParser qqParser = new SimpleQQParser("title","body");
|
||||
QualityBenchmark qrun = new QualityBenchmark(qqs, qqParser, searcher, docNameField);
|
||||
|
@ -131,8 +132,9 @@ public class TestQualityRun extends BenchmarkTestCase {
|
|||
for (int j = 1; j <= QualityStats.MAX_POINTS; j++) {
|
||||
assertTrue("avg p_at_"+j+" should be hurt: "+avg.getPrecisionAt(j), 1.0 > avg.getPrecisionAt(j));
|
||||
}
|
||||
|
||||
|
||||
searcher.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
public void testTrecTopicsReader() throws Exception {
|
||||
|
|
|
@ -21,7 +21,7 @@ import java.io.File;
|
|||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util._TestUtil;
|
||||
|
||||
|
@ -33,7 +33,7 @@ public class TestIndexSplitter extends LuceneTestCase {
|
|||
File destDir = new File(TEMP_DIR, "testfilesplitterdest");
|
||||
_TestUtil.rmDir(destDir);
|
||||
destDir.mkdirs();
|
||||
FSDirectory fsDir = FSDirectory.open(dir);
|
||||
Directory fsDir = newFSDirectory(dir);
|
||||
|
||||
LogMergePolicy mergePolicy = new LogByteSizeMergePolicy();
|
||||
mergePolicy.setNoCFSRatio(1);
|
||||
|
@ -58,14 +58,19 @@ public class TestIndexSplitter extends LuceneTestCase {
|
|||
iw.addDocument(doc);
|
||||
}
|
||||
iw.commit();
|
||||
assertEquals(3, iw.getReader().getSequentialSubReaders().length);
|
||||
IndexReader iwReader = iw.getReader();
|
||||
assertEquals(3, iwReader.getSequentialSubReaders().length);
|
||||
iwReader.close();
|
||||
iw.close();
|
||||
// we should have 2 segments now
|
||||
IndexSplitter is = new IndexSplitter(dir);
|
||||
String splitSegName = is.infos.info(1).name;
|
||||
is.split(destDir, new String[] {splitSegName});
|
||||
IndexReader r = IndexReader.open(FSDirectory.open(destDir), true);
|
||||
Directory fsDirDest = newFSDirectory(destDir);
|
||||
IndexReader r = IndexReader.open(fsDirDest, true);
|
||||
assertEquals(50, r.maxDoc());
|
||||
r.close();
|
||||
fsDirDest.close();
|
||||
|
||||
// now test cmdline
|
||||
File destDir2 = new File(TEMP_DIR, "testfilesplitterdest2");
|
||||
|
@ -73,12 +78,17 @@ public class TestIndexSplitter extends LuceneTestCase {
|
|||
destDir2.mkdirs();
|
||||
IndexSplitter.main(new String[] {dir.getAbsolutePath(), destDir2.getAbsolutePath(), splitSegName});
|
||||
assertEquals(3, destDir2.listFiles().length);
|
||||
r = IndexReader.open(FSDirectory.open(destDir2), true);
|
||||
Directory fsDirDest2 = newFSDirectory(destDir2);
|
||||
r = IndexReader.open(fsDirDest2, true);
|
||||
assertEquals(50, r.maxDoc());
|
||||
r.close();
|
||||
fsDirDest2.close();
|
||||
|
||||
// now remove the copied segment from src
|
||||
IndexSplitter.main(new String[] {dir.getAbsolutePath(), "-d", splitSegName});
|
||||
r = IndexReader.open(FSDirectory.open(dir), true);
|
||||
r = IndexReader.open(fsDir, true);
|
||||
assertEquals(2, r.getSequentialSubReaders().length);
|
||||
r.close();
|
||||
fsDir.close();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,14 +28,14 @@ import org.apache.lucene.search.IndexSearcher;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.Searcher;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
||||
public class TestWordnet extends LuceneTestCase {
|
||||
private Searcher searcher;
|
||||
|
||||
String storePathName =
|
||||
new File(TEMP_DIR,"testLuceneWordnet").getAbsolutePath();
|
||||
private Directory dir;
|
||||
|
||||
String storePathName = new File(TEMP_DIR,"testLuceneWordnet").getAbsolutePath();
|
||||
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
|
@ -48,7 +48,8 @@ public class TestWordnet extends LuceneTestCase {
|
|||
Syns2Index.main(commandLineArgs);
|
||||
} catch (Throwable t) { throw new RuntimeException(t); }
|
||||
|
||||
searcher = new IndexSearcher(FSDirectory.open(new File(storePathName)), true);
|
||||
dir = newFSDirectory(new File(storePathName));
|
||||
searcher = new IndexSearcher(dir, true);
|
||||
}
|
||||
|
||||
public void testExpansion() throws IOException {
|
||||
|
@ -72,6 +73,7 @@ public class TestWordnet extends LuceneTestCase {
|
|||
@Override
|
||||
public void tearDown() throws Exception {
|
||||
searcher.close();
|
||||
dir.close();
|
||||
rmDir(storePathName); // delete our temporary synonym index
|
||||
super.tearDown();
|
||||
}
|
||||
|
|
|
@ -153,7 +153,7 @@ public abstract class Directory implements Closeable {
|
|||
*
|
||||
* @param lockFactory instance of {@link LockFactory}.
|
||||
*/
|
||||
public void setLockFactory(LockFactory lockFactory) {
|
||||
public void setLockFactory(LockFactory lockFactory) throws IOException {
|
||||
assert lockFactory != null;
|
||||
this.lockFactory = lockFactory;
|
||||
lockFactory.setLockPrefix(this.getLockID());
|
||||
|
|
|
@ -156,20 +156,6 @@ public abstract class FSDirectory extends Directory {
|
|||
throw new NoSuchDirectoryException("file '" + directory + "' exists but is not a directory");
|
||||
|
||||
setLockFactory(lockFactory);
|
||||
|
||||
// for filesystem based LockFactory, delete the lockPrefix, if the locks are placed
|
||||
// in index dir. If no index dir is given, set ourselves
|
||||
if (lockFactory instanceof FSLockFactory) {
|
||||
final FSLockFactory lf = (FSLockFactory) lockFactory;
|
||||
final File dir = lf.getLockDir();
|
||||
// if the lock factory has no lockDir set, use the this directory as lockDir
|
||||
if (dir == null) {
|
||||
lf.setLockDir(directory);
|
||||
lf.setLockPrefix(null);
|
||||
} else if (dir.getCanonicalPath().equals(directory.getCanonicalPath())) {
|
||||
lf.setLockPrefix(null);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** Creates an FSDirectory instance, trying to pick the
|
||||
|
@ -209,6 +195,26 @@ public abstract class FSDirectory extends Directory {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setLockFactory(LockFactory lockFactory) throws IOException {
|
||||
super.setLockFactory(lockFactory);
|
||||
|
||||
// for filesystem based LockFactory, delete the lockPrefix, if the locks are placed
|
||||
// in index dir. If no index dir is given, set ourselves
|
||||
if (lockFactory instanceof FSLockFactory) {
|
||||
final FSLockFactory lf = (FSLockFactory) lockFactory;
|
||||
final File dir = lf.getLockDir();
|
||||
// if the lock factory has no lockDir set, use the this directory as lockDir
|
||||
if (dir == null) {
|
||||
lf.setLockDir(directory);
|
||||
lf.setLockPrefix(null);
|
||||
} else if (dir.getCanonicalPath().equals(directory.getCanonicalPath())) {
|
||||
lf.setLockPrefix(null);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/** Lists all files (not subdirectories) in the
|
||||
* directory. This method never returns null (throws
|
||||
* {@link IOException} instead).
|
||||
|
|
|
@ -48,7 +48,11 @@ public class RAMDirectory extends Directory implements Serializable {
|
|||
|
||||
/** Constructs an empty {@link Directory}. */
|
||||
public RAMDirectory() {
|
||||
setLockFactory(new SingleInstanceLockFactory());
|
||||
try {
|
||||
setLockFactory(new SingleInstanceLockFactory());
|
||||
} catch (IOException e) {
|
||||
// Cannot happen
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -58,6 +58,7 @@ public class Test2BTerms extends LuceneTestCase {
|
|||
bytes.length = TOKEN_LEN;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean incrementToken() {
|
||||
if (tokenCount >= tokensPerDoc) {
|
||||
return false;
|
||||
|
@ -67,6 +68,7 @@ public class Test2BTerms extends LuceneTestCase {
|
|||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void reset() {
|
||||
tokenCount = 0;
|
||||
}
|
||||
|
@ -131,7 +133,7 @@ public class Test2BTerms extends LuceneTestCase {
|
|||
|
||||
int TERMS_PER_DOC = 1000000;
|
||||
|
||||
Directory dir = FSDirectory.open(_TestUtil.getTempDir("2BTerms"));
|
||||
Directory dir = newFSDirectory(_TestUtil.getTempDir("2BTerms"));
|
||||
IndexWriter w = new IndexWriter(
|
||||
dir,
|
||||
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
|
|
|
@ -195,7 +195,7 @@ public class TestAtomicUpdate extends LuceneTestCase {
|
|||
|
||||
// Second in an FSDirectory:
|
||||
File dirPath = _TestUtil.getTempDir("lucene.test.atomic");
|
||||
directory = FSDirectory.open(dirPath);
|
||||
directory = newFSDirectory(dirPath);
|
||||
runTest(directory);
|
||||
directory.close();
|
||||
_TestUtil.rmDir(dirPath);
|
||||
|
|
|
@ -45,7 +45,6 @@ import org.apache.lucene.search.ScoreDoc;
|
|||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.search.NumericRangeQuery;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util._TestUtil;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
|
@ -152,7 +151,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
unzip(getDataFile("unsupported." + unsupportedNames[i] + ".zip"), unsupportedNames[i]);
|
||||
|
||||
String fullPath = fullDir(unsupportedNames[i]);
|
||||
Directory dir = FSDirectory.open(new File(fullPath));
|
||||
Directory dir = newFSDirectory(new File(fullPath));
|
||||
|
||||
IndexReader reader = null;
|
||||
IndexWriter writer = null;
|
||||
|
@ -212,7 +211,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
unzip(getDataFile("index." + oldNames[i] + ".zip"), oldNames[i]);
|
||||
|
||||
String fullPath = fullDir(oldNames[i]);
|
||||
Directory dir = FSDirectory.open(new File(fullPath));
|
||||
Directory dir = newFSDirectory(new File(fullPath));
|
||||
|
||||
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
|
@ -230,7 +229,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
for (String name : oldNames) {
|
||||
unzip(getDataFile("index." + name + ".zip"), name);
|
||||
String fullPath = fullDir(name);
|
||||
Directory dir = FSDirectory.open(new File(fullPath));
|
||||
Directory dir = newFSDirectory(new File(fullPath));
|
||||
|
||||
Directory targetDir = newDirectory();
|
||||
IndexWriter w = new IndexWriter(targetDir, newIndexWriterConfig(
|
||||
|
@ -250,7 +249,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
for (String name : oldNames) {
|
||||
unzip(getDataFile("index." + name + ".zip"), name);
|
||||
String fullPath = fullDir(name);
|
||||
Directory dir = FSDirectory.open(new File(fullPath));
|
||||
Directory dir = newFSDirectory(new File(fullPath));
|
||||
IndexReader reader = IndexReader.open(dir);
|
||||
|
||||
Directory targetDir = newDirectory();
|
||||
|
@ -307,7 +306,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
|
||||
dirName = fullDir(dirName);
|
||||
|
||||
Directory dir = FSDirectory.open(new File(dirName));
|
||||
Directory dir = newFSDirectory(new File(dirName));
|
||||
IndexSearcher searcher = new IndexSearcher(dir, true);
|
||||
IndexReader reader = searcher.getIndexReader();
|
||||
|
||||
|
@ -372,7 +371,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
String origDirName = dirName;
|
||||
dirName = fullDir(dirName);
|
||||
|
||||
Directory dir = FSDirectory.open(new File(dirName));
|
||||
Directory dir = newFSDirectory(new File(dirName));
|
||||
// open writer
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
|
||||
// add 10 docs
|
||||
|
@ -437,7 +436,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
|
||||
dirName = fullDir(dirName);
|
||||
|
||||
Directory dir = FSDirectory.open(new File(dirName));
|
||||
Directory dir = newFSDirectory(new File(dirName));
|
||||
|
||||
// make sure searching sees right # hits
|
||||
IndexSearcher searcher = new IndexSearcher(dir, true);
|
||||
|
@ -486,7 +485,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
|
||||
dirName = fullDir(dirName);
|
||||
|
||||
Directory dir = FSDirectory.open(new File(dirName));
|
||||
Directory dir = newFSDirectory(new File(dirName));
|
||||
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(10);
|
||||
((LogMergePolicy) conf.getMergePolicy()).setUseCompoundFile(doCFS);
|
||||
((LogMergePolicy) conf.getMergePolicy()).setUseCompoundDocStore(doCFS);
|
||||
|
@ -525,7 +524,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
rmDir(outputDir);
|
||||
|
||||
try {
|
||||
Directory dir = FSDirectory.open(new File(fullDir(outputDir)));
|
||||
Directory dir = newFSDirectory(new File(fullDir(outputDir)));
|
||||
|
||||
LogMergePolicy mergePolicy = newLogMergePolicy(true, 10);
|
||||
mergePolicy.setNoCFSRatio(1); // This test expects all of its segments to be in CFS
|
||||
|
@ -658,7 +657,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
for(int i=0;i<oldNames.length;i++) {
|
||||
unzip(getDataFile("index." + oldNames[i] + ".zip"), oldNames[i]);
|
||||
String fullPath = fullDir(oldNames[i]);
|
||||
Directory dir = FSDirectory.open(new File(fullPath));
|
||||
Directory dir = newFSDirectory(new File(fullPath));
|
||||
IndexReader r = IndexReader.open(dir);
|
||||
TermsEnum terms = MultiFields.getFields(r).terms("content").iterator();
|
||||
BytesRef t = terms.next();
|
||||
|
@ -704,7 +703,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
|
||||
unzip(getDataFile("index." + oldNames[i] + ".zip"), oldNames[i]);
|
||||
String fullPath = fullDir(oldNames[i]);
|
||||
Directory dir = FSDirectory.open(new File(fullPath));
|
||||
Directory dir = newFSDirectory(new File(fullPath));
|
||||
IndexSearcher searcher = new IndexSearcher(dir, true);
|
||||
|
||||
for (int id=10; id<15; id++) {
|
||||
|
|
|
@ -33,8 +33,8 @@ import org.apache.lucene.analysis.MockAnalyzer;
|
|||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.index.codecs.CodecProvider;
|
||||
|
||||
|
@ -47,12 +47,10 @@ public class TestDoc extends LuceneTestCase {
|
|||
TestRunner.run (new TestSuite(TestDoc.class));
|
||||
}
|
||||
|
||||
|
||||
private File workDir;
|
||||
private File indexDir;
|
||||
private LinkedList<File> files;
|
||||
|
||||
|
||||
/** Set the test case. This test case needs
|
||||
* a few text files created in the current working directory.
|
||||
*/
|
||||
|
@ -65,7 +63,7 @@ public class TestDoc extends LuceneTestCase {
|
|||
indexDir = new File(workDir, "testIndex");
|
||||
indexDir.mkdirs();
|
||||
|
||||
Directory directory = FSDirectory.open(indexDir);
|
||||
Directory directory = newFSDirectory(indexDir);
|
||||
directory.close();
|
||||
|
||||
files = new LinkedList<File>();
|
||||
|
@ -110,7 +108,7 @@ public class TestDoc extends LuceneTestCase {
|
|||
StringWriter sw = new StringWriter();
|
||||
PrintWriter out = new PrintWriter(sw, true);
|
||||
|
||||
Directory directory = FSDirectory.open(indexDir);
|
||||
Directory directory = newFSDirectory(indexDir);
|
||||
IndexWriter writer = new IndexWriter(
|
||||
directory,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
|
@ -144,7 +142,7 @@ public class TestDoc extends LuceneTestCase {
|
|||
sw = new StringWriter();
|
||||
out = new PrintWriter(sw, true);
|
||||
|
||||
directory = FSDirectory.open(indexDir);
|
||||
directory = newFSDirectory(indexDir);
|
||||
writer = new IndexWriter(
|
||||
directory,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
|
@ -233,7 +231,7 @@ public class TestDoc extends LuceneTestCase {
|
|||
|
||||
DocsAndPositionsEnum positions = tis.docsAndPositions(reader.getDeletedDocs(), null);
|
||||
|
||||
while (positions.nextDoc() != positions.NO_MORE_DOCS) {
|
||||
while (positions.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
|
||||
out.print(" doc=" + positions.docID());
|
||||
out.print(" TF=" + positions.freq());
|
||||
out.print(" pos=");
|
||||
|
|
|
@ -33,7 +33,6 @@ import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
|||
import org.apache.lucene.store.AlreadyClosedException;
|
||||
import org.apache.lucene.store.BufferedIndexInput;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.store.IndexInput;
|
||||
import org.apache.lucene.store.IndexOutput;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
@ -160,7 +159,6 @@ public class TestFieldsReader extends LuceneTestCase {
|
|||
assertTrue(dir != null);
|
||||
assertTrue(fieldInfos != null);
|
||||
FieldsReader reader = new FieldsReader(dir, TEST_SEGMENT_NAME, fieldInfos);
|
||||
assertTrue(reader != null);
|
||||
assertTrue(reader.size() == 1);
|
||||
Set<String> loadFieldNames = new HashSet<String>();
|
||||
loadFieldNames.add(DocHelper.TEXT_FIELD_1_KEY);
|
||||
|
@ -174,6 +172,7 @@ public class TestFieldsReader extends LuceneTestCase {
|
|||
|
||||
// Use LATENT instead of LAZY
|
||||
SetBasedFieldSelector fieldSelector = new SetBasedFieldSelector(loadFieldNames, lazyFieldNames) {
|
||||
@Override
|
||||
public FieldSelectorResult accept(String fieldName) {
|
||||
final FieldSelectorResult result = super.accept(fieldName);
|
||||
if (result == FieldSelectorResult.LAZY_LOAD) {
|
||||
|
@ -290,7 +289,7 @@ public class TestFieldsReader extends LuceneTestCase {
|
|||
String userName = System.getProperty("user.name");
|
||||
File file = new File(TEMP_DIR, "lazyDir" + userName);
|
||||
_TestUtil.rmDir(file);
|
||||
FSDirectory tmpDir = FSDirectory.open(file);
|
||||
Directory tmpDir = newFSDirectory(file);
|
||||
assertTrue(tmpDir != null);
|
||||
|
||||
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE);
|
||||
|
@ -345,6 +344,7 @@ public class TestFieldsReader extends LuceneTestCase {
|
|||
reader.close();
|
||||
|
||||
}
|
||||
tmpDir.close();
|
||||
if (VERBOSE) {
|
||||
System.out.println("Average Non-lazy time (should be very close to zero): " + regularTime / length + " ms for " + length + " reads");
|
||||
System.out.println("Average Lazy Time (should be greater than zero): " + lazyTime / length + " ms for " + length + " reads");
|
||||
|
@ -388,9 +388,10 @@ public class TestFieldsReader extends LuceneTestCase {
|
|||
|
||||
public static class FaultyFSDirectory extends Directory {
|
||||
|
||||
FSDirectory fsDir;
|
||||
Directory fsDir;
|
||||
|
||||
public FaultyFSDirectory(File dir) throws IOException {
|
||||
fsDir = FSDirectory.open(dir);
|
||||
fsDir = newFSDirectory(dir);
|
||||
lockFactory = fsDir.getLockFactory();
|
||||
}
|
||||
@Override
|
||||
|
|
|
@ -39,13 +39,13 @@ import org.apache.lucene.document.SetBasedFieldSelector;
|
|||
import org.apache.lucene.index.IndexReader.FieldOption;
|
||||
import org.apache.lucene.index.codecs.CodecProvider;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.FieldCache;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.store.AlreadyClosedException;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.store.LockObtainFailedException;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.store.NoSuchDirectoryException;
|
||||
|
@ -306,15 +306,14 @@ public class TestIndexReader extends LuceneTestCase
|
|||
IndexReader reader,
|
||||
Term term,
|
||||
int expected)
|
||||
throws IOException
|
||||
{
|
||||
throws IOException {
|
||||
DocsEnum tdocs = MultiFields.getTermDocsEnum(reader,
|
||||
MultiFields.getDeletedDocs(reader),
|
||||
term.field(),
|
||||
new BytesRef(term.text()));
|
||||
int count = 0;
|
||||
if (tdocs != null) {
|
||||
while(tdocs.nextDoc()!= tdocs.NO_MORE_DOCS) {
|
||||
while(tdocs.nextDoc()!= DocIdSetIterator.NO_MORE_DOCS) {
|
||||
count++;
|
||||
}
|
||||
}
|
||||
|
@ -524,19 +523,16 @@ public class TestIndexReader extends LuceneTestCase
|
|||
// Make sure you can set norms & commit even if a reader
|
||||
// is open against the index:
|
||||
public void testWritingNorms() throws IOException {
|
||||
File indexDir = new File(TEMP_DIR, "lucenetestnormwriter");
|
||||
Directory dir = FSDirectory.open(indexDir);
|
||||
IndexWriter writer;
|
||||
IndexReader reader;
|
||||
Directory dir = newDirectory();
|
||||
Term searchTerm = new Term("content", "aaa");
|
||||
|
||||
// add 1 documents with term : aaa
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
addDoc(writer, searchTerm.text());
|
||||
writer.close();
|
||||
|
||||
// now open reader & set norm for doc 0
|
||||
reader = IndexReader.open(dir, false);
|
||||
IndexReader reader = IndexReader.open(dir, false);
|
||||
reader.setNorm(0, "content", (float) 2.0);
|
||||
|
||||
// we should be holding the write lock now:
|
||||
|
@ -561,8 +557,6 @@ public class TestIndexReader extends LuceneTestCase
|
|||
|
||||
reader2.close();
|
||||
dir.close();
|
||||
|
||||
rmDir(indexDir);
|
||||
}
|
||||
|
||||
|
||||
|
@ -707,7 +701,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
public void testFilesOpenClose() throws IOException {
|
||||
// Create initial data set
|
||||
File dirFile = _TestUtil.getTempDir("TestIndexReader.testFilesOpenClose");
|
||||
Directory dir = FSDirectory.open(dirFile);
|
||||
Directory dir = newFSDirectory(dirFile);
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
addDoc(writer, "test");
|
||||
writer.close();
|
||||
|
@ -715,7 +709,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
|
||||
// Try to erase the data - this ensures that the writer closed all files
|
||||
_TestUtil.rmDir(dirFile);
|
||||
dir = FSDirectory.open(dirFile);
|
||||
dir = newFSDirectory(dirFile);
|
||||
|
||||
// Now create the data set again, just as before
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
|
||||
|
@ -724,7 +718,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
dir.close();
|
||||
|
||||
// Now open existing directory and test that reader closes all files
|
||||
dir = FSDirectory.open(dirFile);
|
||||
dir = newFSDirectory(dirFile);
|
||||
IndexReader reader1 = IndexReader.open(dir, false);
|
||||
reader1.close();
|
||||
dir.close();
|
||||
|
@ -1131,7 +1125,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
|
||||
public void testOpenReaderAfterDelete() throws IOException {
|
||||
File dirFile = new File(TEMP_DIR, "deletetest");
|
||||
Directory dir = FSDirectory.open(dirFile);
|
||||
Directory dir = newFSDirectory(dirFile);
|
||||
try {
|
||||
IndexReader.open(dir, false);
|
||||
fail("expected FileNotFoundException");
|
||||
|
@ -1313,19 +1307,11 @@ public class TestIndexReader extends LuceneTestCase
|
|||
writer.addDocument(doc);
|
||||
}
|
||||
|
||||
private void addDoc(IndexWriter writer, String value) throws IOException
|
||||
{
|
||||
private void addDoc(IndexWriter writer, String value) throws IOException {
|
||||
Document doc = new Document();
|
||||
doc.add(newField("content", value, Field.Store.NO, Field.Index.ANALYZED));
|
||||
writer.addDocument(doc);
|
||||
}
|
||||
private void rmDir(File dir) {
|
||||
File[] files = dir.listFiles();
|
||||
for (int i = 0; i < files.length; i++) {
|
||||
files[i].delete();
|
||||
}
|
||||
dir.delete();
|
||||
}
|
||||
|
||||
public static void assertIndexEquals(IndexReader index1, IndexReader index2) throws IOException {
|
||||
assertEquals("IndexReaders have different values for numDocs.", index1.numDocs(), index2.numDocs());
|
||||
|
@ -1404,8 +1390,8 @@ public class TestIndexReader extends LuceneTestCase
|
|||
DocsAndPositionsEnum tp1 = enum1.docsAndPositions(delDocs, null);
|
||||
DocsAndPositionsEnum tp2 = enum2.docsAndPositions(delDocs, null);
|
||||
|
||||
while(tp1.nextDoc() != DocsEnum.NO_MORE_DOCS) {
|
||||
assertTrue(tp2.nextDoc() != DocsEnum.NO_MORE_DOCS);
|
||||
while(tp1.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
|
||||
assertTrue(tp2.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
|
||||
assertEquals("Different doc id in postinglist of term " + enum1.term() + ".", tp1.docID(), tp2.docID());
|
||||
assertEquals("Different term frequence in postinglist of term " + enum1.term() + ".", tp1.freq(), tp2.freq());
|
||||
for (int i = 0; i < tp1.freq(); i++) {
|
||||
|
@ -1590,7 +1576,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
// IndexReader on a non-existent directory, you get a
|
||||
// good exception
|
||||
public void testNoDir() throws Throwable {
|
||||
Directory dir = FSDirectory.open(_TestUtil.getTempDir("doesnotexist"));
|
||||
Directory dir = newFSDirectory(_TestUtil.getTempDir("doesnotexist"));
|
||||
try {
|
||||
IndexReader.open(dir, true);
|
||||
fail("did not hit expected exception");
|
||||
|
|
|
@ -17,7 +17,6 @@ package org.apache.lucene.index;
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Random;
|
||||
|
@ -34,7 +33,6 @@ import org.apache.lucene.index.SegmentReader.Norm;
|
|||
import org.apache.lucene.search.DefaultSimilarity;
|
||||
import org.apache.lucene.search.Similarity;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
||||
/**
|
||||
|
@ -79,8 +77,7 @@ public class TestIndexReaderCloneNorms extends LuceneTestCase {
|
|||
*/
|
||||
public void testNorms() throws IOException {
|
||||
// test with a single index: index1
|
||||
File indexDir1 = new File(TEMP_DIR, "lucenetestindex1");
|
||||
Directory dir1 = FSDirectory.open(indexDir1);
|
||||
Directory dir1 = newDirectory();
|
||||
IndexWriter.unlock(dir1);
|
||||
|
||||
norms = new ArrayList<Float>();
|
||||
|
@ -98,15 +95,13 @@ public class TestIndexReaderCloneNorms extends LuceneTestCase {
|
|||
modifiedNorms = new ArrayList<Float>();
|
||||
numDocNorms = 0;
|
||||
|
||||
File indexDir2 = new File(TEMP_DIR, "lucenetestindex2");
|
||||
Directory dir2 = FSDirectory.open(indexDir2);
|
||||
Directory dir2 = newDirectory();
|
||||
|
||||
createIndex(random, dir2);
|
||||
doTestNorms(random, dir2);
|
||||
|
||||
// add index1 and index2 to a third index: index3
|
||||
File indexDir3 = new File(TEMP_DIR, "lucenetestindex3");
|
||||
Directory dir3 = FSDirectory.open(indexDir3);
|
||||
Directory dir3 = newDirectory();
|
||||
|
||||
createIndex(random, dir3);
|
||||
IndexWriter iw = new IndexWriter(
|
||||
|
@ -163,6 +158,9 @@ public class TestIndexReaderCloneNorms extends LuceneTestCase {
|
|||
modifyNormsForF1(irc3);
|
||||
verifyIndex(irc3);
|
||||
irc3.flush();
|
||||
|
||||
ir.close();
|
||||
irc.close();
|
||||
irc3.close();
|
||||
}
|
||||
|
||||
|
|
|
@ -40,7 +40,6 @@ import org.apache.lucene.search.IndexSearcher;
|
|||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.store.AlreadyClosedException;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.BitVector;
|
||||
|
@ -149,12 +148,12 @@ public class TestIndexReaderReopen extends LuceneTestCase {
|
|||
// in each iteration verify the work of previous iteration.
|
||||
// try this once with reopen once recreate, on both RAMDir and FSDir.
|
||||
public void testCommitReopenFS () throws IOException {
|
||||
Directory dir = FSDirectory.open(indexDir);
|
||||
Directory dir = newFSDirectory(indexDir);
|
||||
doTestReopenWithCommit(random, dir, true);
|
||||
dir.close();
|
||||
}
|
||||
public void testCommitRecreateFS () throws IOException {
|
||||
Directory dir = FSDirectory.open(indexDir);
|
||||
Directory dir = newFSDirectory(indexDir);
|
||||
doTestReopenWithCommit(random, dir, false);
|
||||
dir.close();
|
||||
}
|
||||
|
|
|
@ -48,6 +48,7 @@ import org.apache.lucene.document.Field.Index;
|
|||
import org.apache.lucene.document.Field.Store;
|
||||
import org.apache.lucene.document.Field.TermVector;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.PhraseQuery;
|
||||
|
@ -57,7 +58,6 @@ import org.apache.lucene.search.TermQuery;
|
|||
import org.apache.lucene.search.spans.SpanTermQuery;
|
||||
import org.apache.lucene.store.AlreadyClosedException;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.store.IndexOutput;
|
||||
import org.apache.lucene.store.Lock;
|
||||
import org.apache.lucene.store.LockFactory;
|
||||
|
@ -319,34 +319,30 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
// reader holds it open (this fails pre lock-less
|
||||
// commits on windows):
|
||||
public void testCreateWithReader() throws IOException {
|
||||
File indexDir = _TestUtil.getTempDir("lucenetestindexwriter");
|
||||
Directory dir = newDirectory();
|
||||
|
||||
try {
|
||||
Directory dir = FSDirectory.open(indexDir);
|
||||
// add one document & close writer
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
addDoc(writer);
|
||||
writer.close();
|
||||
|
||||
// add one document & close writer
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
addDoc(writer);
|
||||
writer.close();
|
||||
// now open reader:
|
||||
IndexReader reader = IndexReader.open(dir, true);
|
||||
assertEquals("should be one document", reader.numDocs(), 1);
|
||||
|
||||
// now open reader:
|
||||
IndexReader reader = IndexReader.open(dir, true);
|
||||
assertEquals("should be one document", reader.numDocs(), 1);
|
||||
// now open index for create:
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
|
||||
assertEquals("should be zero documents", writer.maxDoc(), 0);
|
||||
addDoc(writer);
|
||||
writer.close();
|
||||
|
||||
// now open index for create:
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
|
||||
assertEquals("should be zero documents", writer.maxDoc(), 0);
|
||||
addDoc(writer);
|
||||
writer.close();
|
||||
|
||||
assertEquals("should be one document", reader.numDocs(), 1);
|
||||
IndexReader reader2 = IndexReader.open(dir, true);
|
||||
assertEquals("should be one document", reader2.numDocs(), 1);
|
||||
reader.close();
|
||||
reader2.close();
|
||||
} finally {
|
||||
rmDir(indexDir);
|
||||
}
|
||||
assertEquals("should be one document", reader.numDocs(), 1);
|
||||
IndexReader reader2 = IndexReader.open(dir, true);
|
||||
assertEquals("should be one document", reader2.numDocs(), 1);
|
||||
reader.close();
|
||||
reader2.close();
|
||||
|
||||
dir.close();
|
||||
}
|
||||
|
||||
public void testChangesAfterClose() throws IOException {
|
||||
|
@ -943,16 +939,6 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
dir.close();
|
||||
}
|
||||
|
||||
private void rmDir(File dir) {
|
||||
File[] files = dir.listFiles();
|
||||
if (files != null) {
|
||||
for (int i = 0; i < files.length; i++) {
|
||||
files[i].delete();
|
||||
}
|
||||
}
|
||||
dir.delete();
|
||||
}
|
||||
|
||||
/**
|
||||
* Test that no NullPointerException will be raised,
|
||||
* when adding one document with a single, empty field
|
||||
|
@ -1727,7 +1713,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
"field",
|
||||
new BytesRef("a"));
|
||||
|
||||
assertTrue(tps.nextDoc() != DocsEnum.NO_MORE_DOCS);
|
||||
assertTrue(tps.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
|
||||
assertEquals(1, tps.freq());
|
||||
assertEquals(0, tps.nextPosition());
|
||||
w.close();
|
||||
|
@ -1967,11 +1953,9 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
// create=true does not remove non-index files
|
||||
|
||||
public void testOtherFiles() throws Throwable {
|
||||
File indexDir = new File(TEMP_DIR, "otherfiles");
|
||||
Directory dir = FSDirectory.open(indexDir);
|
||||
Directory dir = newDirectory();
|
||||
try {
|
||||
// Create my own random file:
|
||||
|
||||
IndexOutput out = dir.createOutput("myrandomfile");
|
||||
out.writeByte((byte) 42);
|
||||
out.close();
|
||||
|
@ -1981,7 +1965,6 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
assertTrue(dir.fileExists("myrandomfile"));
|
||||
} finally {
|
||||
dir.close();
|
||||
_TestUtil.rmDir(indexDir);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2191,12 +2174,12 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
|
||||
// test that the terms were indexed.
|
||||
assertTrue(MultiFields.getTermDocsEnum(ir, null, "binary", new BytesRef("doc1field1")).nextDoc() != DocsEnum.NO_MORE_DOCS);
|
||||
assertTrue(MultiFields.getTermDocsEnum(ir, null, "binary", new BytesRef("doc2field1")).nextDoc() != DocsEnum.NO_MORE_DOCS);
|
||||
assertTrue(MultiFields.getTermDocsEnum(ir, null, "binary", new BytesRef("doc3field1")).nextDoc() != DocsEnum.NO_MORE_DOCS);
|
||||
assertTrue(MultiFields.getTermDocsEnum(ir, null, "string", new BytesRef("doc1field2")).nextDoc() != DocsEnum.NO_MORE_DOCS);
|
||||
assertTrue(MultiFields.getTermDocsEnum(ir, null, "string", new BytesRef("doc2field2")).nextDoc() != DocsEnum.NO_MORE_DOCS);
|
||||
assertTrue(MultiFields.getTermDocsEnum(ir, null, "string", new BytesRef("doc3field2")).nextDoc() != DocsEnum.NO_MORE_DOCS);
|
||||
assertTrue(MultiFields.getTermDocsEnum(ir, null, "binary", new BytesRef("doc1field1")).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
|
||||
assertTrue(MultiFields.getTermDocsEnum(ir, null, "binary", new BytesRef("doc2field1")).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
|
||||
assertTrue(MultiFields.getTermDocsEnum(ir, null, "binary", new BytesRef("doc3field1")).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
|
||||
assertTrue(MultiFields.getTermDocsEnum(ir, null, "string", new BytesRef("doc1field2")).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
|
||||
assertTrue(MultiFields.getTermDocsEnum(ir, null, "string", new BytesRef("doc2field2")).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
|
||||
assertTrue(MultiFields.getTermDocsEnum(ir, null, "string", new BytesRef("doc3field2")).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
|
||||
|
||||
ir.close();
|
||||
dir.close();
|
||||
|
@ -2467,7 +2450,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
while(t.next() != null) {
|
||||
final DocsEnum docs = t.docs(null, null);
|
||||
assertEquals(0, docs.nextDoc());
|
||||
assertEquals(DocsEnum.NO_MORE_DOCS, docs.nextDoc());
|
||||
assertEquals(DocIdSetIterator.NO_MORE_DOCS, docs.nextDoc());
|
||||
count++;
|
||||
}
|
||||
assertEquals(300, count);
|
||||
|
@ -2581,6 +2564,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
public FlushCountingIndexWriter(Directory dir, IndexWriterConfig iwc) throws IOException {
|
||||
super(dir, iwc);
|
||||
}
|
||||
@Override
|
||||
public void doAfterFlush() {
|
||||
flushCount++;
|
||||
}
|
||||
|
@ -2642,8 +2626,9 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
// Tests that if FSDir is opened w/ a NoLockFactory (or SingleInstanceLF),
|
||||
// then IndexWriter ctor succeeds. Previously (LUCENE-2386) it failed
|
||||
// when listAll() was called in IndexFileDeleter.
|
||||
FSDirectory dir = FSDirectory.open(new File(TEMP_DIR, "emptyFSDirNoLock"), NoLockFactory.getNoLockFactory());
|
||||
Directory dir = newFSDirectory(new File(TEMP_DIR, "emptyFSDirNoLock"), NoLockFactory.getNoLockFactory());
|
||||
new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())).close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
public void testEmptyDirRollback() throws Exception {
|
||||
|
@ -2691,28 +2676,23 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testNoSegmentFile() throws IOException {
|
||||
File tempDir = _TestUtil.getTempDir("noSegmentFile");
|
||||
try {
|
||||
Directory dir = FSDirectory.open(tempDir);
|
||||
dir.setLockFactory(NoLockFactory.getNoLockFactory());
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2));
|
||||
|
||||
Document doc = new Document();
|
||||
doc.add(newField("c", "val", Store.YES, Index.ANALYZED, TermVector.WITH_POSITIONS_OFFSETS));
|
||||
w.addDocument(doc);
|
||||
w.addDocument(doc);
|
||||
IndexWriter w2 = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2)
|
||||
.setOpenMode(OpenMode.CREATE));
|
||||
|
||||
w2.close();
|
||||
// If we don't do that, the test fails on Windows
|
||||
w.rollback();
|
||||
dir.close();
|
||||
} finally {
|
||||
_TestUtil.rmDir(tempDir);
|
||||
}
|
||||
Directory dir = newDirectory();
|
||||
dir.setLockFactory(NoLockFactory.getNoLockFactory());
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2));
|
||||
|
||||
Document doc = new Document();
|
||||
doc.add(newField("c", "val", Store.YES, Index.ANALYZED, TermVector.WITH_POSITIONS_OFFSETS));
|
||||
w.addDocument(doc);
|
||||
w.addDocument(doc);
|
||||
IndexWriter w2 = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2)
|
||||
.setOpenMode(OpenMode.CREATE));
|
||||
|
||||
w2.close();
|
||||
// If we don't do that, the test fails on Windows
|
||||
w.rollback();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
public void testFutureCommit() throws Exception {
|
||||
|
|
|
@ -24,7 +24,7 @@ import org.apache.lucene.util.LuceneTestCase;
|
|||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
|
||||
/**
|
||||
* This tests the patch for issue #LUCENE-715 (IndexWriter does not
|
||||
|
@ -73,20 +73,16 @@ public class TestIndexWriterLockRelease extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testIndexWriterLockRelease() throws IOException {
|
||||
FSDirectory dir = FSDirectory.open(this.__test_dir);
|
||||
Directory dir = newFSDirectory(this.__test_dir);
|
||||
try {
|
||||
new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
|
||||
} catch (FileNotFoundException e) {
|
||||
try {
|
||||
new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer())
|
||||
.setOpenMode(OpenMode.APPEND));
|
||||
} catch (FileNotFoundException e) {
|
||||
try {
|
||||
new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer())
|
||||
.setOpenMode(OpenMode.APPEND));
|
||||
} catch (FileNotFoundException e1) {
|
||||
}
|
||||
} finally {
|
||||
dir.close();
|
||||
new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
|
||||
} catch (FileNotFoundException e1) {
|
||||
}
|
||||
} finally {
|
||||
dir.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,7 +17,6 @@ package org.apache.lucene.index;
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.Reader;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
|
@ -36,15 +35,14 @@ import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
|||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.UnicodeUtil;
|
||||
import org.apache.lucene.util._TestUtil;
|
||||
|
||||
|
||||
public class TestPayloads extends LuceneTestCase {
|
||||
|
||||
// Simple tests to test the Payload class
|
||||
|
@ -154,16 +152,9 @@ public class TestPayloads extends LuceneTestCase {
|
|||
|
||||
// Tests if payloads are correctly stored and loaded using both RamDirectory and FSDirectory
|
||||
public void testPayloadsEncoding() throws Exception {
|
||||
// first perform the test using a RAMDirectory
|
||||
Directory dir = newDirectory();
|
||||
performTest(dir);
|
||||
dir.close();
|
||||
// now use a FSDirectory and repeat same test
|
||||
File dirName = _TestUtil.getTempDir("test_payloads");
|
||||
dir = FSDirectory.open(dirName);
|
||||
performTest(dir);
|
||||
_TestUtil.rmDir(dirName);
|
||||
dir.close();
|
||||
}
|
||||
|
||||
// builds an index with payloads in the given Directory and performs
|
||||
|
@ -236,7 +227,7 @@ public class TestPayloads extends LuceneTestCase {
|
|||
new BytesRef(terms[i].text()));
|
||||
}
|
||||
|
||||
while (tps[0].nextDoc() != DocsEnum.NO_MORE_DOCS) {
|
||||
while (tps[0].nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
|
||||
for (int i = 1; i < numTerms; i++) {
|
||||
tps[i].nextDoc();
|
||||
}
|
||||
|
@ -521,7 +512,7 @@ public class TestPayloads extends LuceneTestCase {
|
|||
while (terms.next() != null) {
|
||||
String termText = terms.term().utf8ToString();
|
||||
tp = terms.docsAndPositions(delDocs, tp);
|
||||
while(tp.nextDoc() != DocsEnum.NO_MORE_DOCS) {
|
||||
while(tp.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
|
||||
int freq = tp.freq();
|
||||
for (int i = 0; i < freq; i++) {
|
||||
tp.nextPosition();
|
||||
|
|
|
@ -83,18 +83,9 @@ public class TestSnapshotDeletionPolicy extends LuceneTestCase {
|
|||
|
||||
@Test
|
||||
public void testSnapshotDeletionPolicy() throws Exception {
|
||||
File dir = _TestUtil.getTempDir(INDEX_PATH);
|
||||
try {
|
||||
Directory fsDir = FSDirectory.open(dir);
|
||||
runTest(random, fsDir);
|
||||
fsDir.close();
|
||||
} finally {
|
||||
_TestUtil.rmDir(dir);
|
||||
}
|
||||
|
||||
Directory dir2 = newDirectory();
|
||||
runTest(random, dir2);
|
||||
dir2.close();
|
||||
Directory fsDir = newDirectory();
|
||||
runTest(random, fsDir);
|
||||
fsDir.close();
|
||||
}
|
||||
|
||||
private void runTest(Random random, Directory dir) throws Exception {
|
||||
|
|
|
@ -163,17 +163,8 @@ public class TestStressIndexing extends LuceneTestCase {
|
|||
FSDirectory.
|
||||
*/
|
||||
public void testStressIndexAndSearching() throws Exception {
|
||||
// With ConcurrentMergeScheduler, in RAMDir
|
||||
Directory directory = newDirectory();
|
||||
runStressTest(directory, new ConcurrentMergeScheduler());
|
||||
directory.close();
|
||||
|
||||
// With ConcurrentMergeScheduler, in FSDir
|
||||
File dirPath = _TestUtil.getTempDir("lucene.test.stress");
|
||||
directory = FSDirectory.open(dirPath);
|
||||
runStressTest(directory, new ConcurrentMergeScheduler());
|
||||
directory.close();
|
||||
|
||||
_TestUtil.rmDir(dirPath);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -74,7 +74,6 @@ public class TestStressIndexing2 extends LuceneTestCase {
|
|||
|
||||
public void testRandom() throws Throwable {
|
||||
Directory dir1 = newDirectory();
|
||||
// dir1 = FSDirectory.open("foofoofoo");
|
||||
Directory dir2 = newDirectory();
|
||||
// mergeFactor=2; maxBufferedDocs=2; Map docs = indexRandom(1, 3, 2, dir1);
|
||||
int maxThreadStates = 1+random.nextInt(10);
|
||||
|
|
|
@ -141,12 +141,5 @@ public class TestThreadedOptimize extends LuceneTestCase {
|
|||
runTest(random, directory, new SerialMergeScheduler());
|
||||
runTest(random, directory, new ConcurrentMergeScheduler());
|
||||
directory.close();
|
||||
|
||||
File dirName = new File(TEMP_DIR, "luceneTestThreadedOptimize");
|
||||
directory = FSDirectory.open(dirName);
|
||||
runTest(random, directory, new SerialMergeScheduler());
|
||||
runTest(random, directory, new ConcurrentMergeScheduler());
|
||||
directory.close();
|
||||
_TestUtil.rmDir(dirName);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -502,7 +502,7 @@ public class MockDirectoryWrapper extends Directory {
|
|||
}
|
||||
|
||||
@Override
|
||||
public synchronized void setLockFactory(LockFactory lockFactory) {
|
||||
public synchronized void setLockFactory(LockFactory lockFactory) throws IOException {
|
||||
maybeYield();
|
||||
delegate.setLockFactory(lockFactory);
|
||||
}
|
||||
|
|
|
@ -27,20 +27,14 @@ import java.util.Arrays;
|
|||
public class TestDirectory extends LuceneTestCase {
|
||||
|
||||
public void testDetectClose() throws Throwable {
|
||||
Directory dir = new RAMDirectory();
|
||||
dir.close();
|
||||
try {
|
||||
dir.createOutput("test");
|
||||
fail("did not hit expected exception");
|
||||
} catch (AlreadyClosedException ace) {
|
||||
}
|
||||
|
||||
dir = FSDirectory.open(TEMP_DIR);
|
||||
dir.close();
|
||||
try {
|
||||
dir.createOutput("test");
|
||||
fail("did not hit expected exception");
|
||||
} catch (AlreadyClosedException ace) {
|
||||
Directory[] dirs = new Directory[] { new RAMDirectory(), new SimpleFSDirectory(TEMP_DIR), new NIOFSDirectory(TEMP_DIR) };
|
||||
for (Directory dir : dirs) {
|
||||
dir.close();
|
||||
try {
|
||||
dir.createOutput("test");
|
||||
fail("did not hit expected exception");
|
||||
} catch (AlreadyClosedException ace) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -140,7 +134,7 @@ public class TestDirectory extends LuceneTestCase {
|
|||
|
||||
// LUCENE-1468
|
||||
public void testFSDirectoryFilter() throws IOException {
|
||||
checkDirectoryFilter(FSDirectory.open(new File(TEMP_DIR,"test")));
|
||||
checkDirectoryFilter(newFSDirectory(new File(TEMP_DIR,"test")));
|
||||
}
|
||||
|
||||
// LUCENE-1468
|
||||
|
|
|
@ -145,15 +145,15 @@ public class TestLockFactory extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void _testStressLocks(LockFactory lockFactory, File indexDir) throws Exception {
|
||||
FSDirectory fs1 = FSDirectory.open(indexDir, lockFactory);
|
||||
Directory dir = newFSDirectory(indexDir, lockFactory);
|
||||
|
||||
// First create a 1 doc index:
|
||||
IndexWriter w = new IndexWriter(fs1, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
|
||||
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
|
||||
addDoc(w);
|
||||
w.close();
|
||||
|
||||
WriterThread writer = new WriterThread(100, fs1);
|
||||
SearcherThread searcher = new SearcherThread(100, fs1);
|
||||
WriterThread writer = new WriterThread(100, dir);
|
||||
SearcherThread searcher = new SearcherThread(100, dir);
|
||||
writer.start();
|
||||
searcher.start();
|
||||
|
||||
|
@ -164,6 +164,7 @@ public class TestLockFactory extends LuceneTestCase {
|
|||
assertTrue("IndexWriter hit unexpected exceptions", !writer.hitException);
|
||||
assertTrue("IndexSearcher hit unexpected exceptions", !searcher.hitException);
|
||||
|
||||
dir.close();
|
||||
// Cleanup
|
||||
_TestUtil.rmDir(indexDir);
|
||||
}
|
||||
|
@ -234,9 +235,9 @@ public class TestLockFactory extends LuceneTestCase {
|
|||
|
||||
File fdir1 = _TestUtil.getTempDir("TestLockFactory.8");
|
||||
File fdir2 = _TestUtil.getTempDir("TestLockFactory.8.Lockdir");
|
||||
Directory dir1 = FSDirectory.open(fdir1, new NativeFSLockFactory(fdir1));
|
||||
Directory dir1 = newFSDirectory(fdir1, new NativeFSLockFactory(fdir1));
|
||||
// same directory, but locks are stored somewhere else. The prefix of the lock factory should != null
|
||||
Directory dir2 = FSDirectory.open(fdir1, new NativeFSLockFactory(fdir2));
|
||||
Directory dir2 = newFSDirectory(fdir1, new NativeFSLockFactory(fdir2));
|
||||
|
||||
String prefix1 = dir1.getLockFactory().getLockPrefix();
|
||||
assertNull("Lock prefix for lockDir same as directory should be null", prefix1);
|
||||
|
@ -244,6 +245,8 @@ public class TestLockFactory extends LuceneTestCase {
|
|||
String prefix2 = dir2.getLockFactory().getLockPrefix();
|
||||
assertNotNull("Lock prefix for lockDir outside of directory should be not null", prefix2);
|
||||
|
||||
dir1.close();
|
||||
dir2.close();
|
||||
_TestUtil.rmDir(fdir1);
|
||||
_TestUtil.rmDir(fdir2);
|
||||
}
|
||||
|
@ -254,12 +257,13 @@ public class TestLockFactory extends LuceneTestCase {
|
|||
|
||||
// Make sure we get null prefix:
|
||||
File dirName = _TestUtil.getTempDir("TestLockFactory.10");
|
||||
Directory dir = FSDirectory.open(dirName);
|
||||
Directory dir = newFSDirectory(dirName);
|
||||
|
||||
String prefix = dir.getLockFactory().getLockPrefix();
|
||||
|
||||
assertTrue("Default lock prefix should be null", null == prefix);
|
||||
|
||||
dir.close();
|
||||
_TestUtil.rmDir(dirName);
|
||||
}
|
||||
|
||||
|
|
|
@ -51,7 +51,7 @@ public class TestRAMDirectory extends LuceneTestCase {
|
|||
super.setUp();
|
||||
indexDir = new File(TEMP_DIR, "RAMDirIndex");
|
||||
|
||||
Directory dir = FSDirectory.open(indexDir);
|
||||
Directory dir = newFSDirectory(indexDir);
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
|
||||
// add some documents
|
||||
|
@ -68,7 +68,7 @@ public class TestRAMDirectory extends LuceneTestCase {
|
|||
|
||||
public void testRAMDirectory () throws IOException {
|
||||
|
||||
Directory dir = FSDirectory.open(indexDir);
|
||||
Directory dir = newFSDirectory(indexDir);
|
||||
MockDirectoryWrapper ramDir = new MockDirectoryWrapper(random, new RAMDirectory(dir));
|
||||
|
||||
// close the underlaying directory
|
||||
|
@ -100,7 +100,7 @@ public class TestRAMDirectory extends LuceneTestCase {
|
|||
|
||||
public void testRAMDirectorySize() throws IOException, InterruptedException {
|
||||
|
||||
Directory dir = FSDirectory.open(indexDir);
|
||||
Directory dir = newFSDirectory(indexDir);
|
||||
final MockDirectoryWrapper ramDir = new MockDirectoryWrapper(random, new RAMDirectory(dir));
|
||||
dir.close();
|
||||
|
||||
|
|
|
@ -51,6 +51,8 @@ import org.apache.lucene.search.BooleanQuery;
|
|||
import org.apache.lucene.search.FieldCache;
|
||||
import org.apache.lucene.search.FieldCache.CacheEntry;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.store.LockFactory;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.util.FieldCacheSanityChecker.Insanity;
|
||||
import org.junit.*;
|
||||
|
@ -356,7 +358,17 @@ public abstract class LuceneTestCase extends Assert {
|
|||
for (MockDirectoryWrapper d : stores.keySet()) {
|
||||
if (d.isOpen()) {
|
||||
StackTraceElement elements[] = stores.get(d);
|
||||
StackTraceElement element = (elements.length > 1) ? elements[1] : null;
|
||||
// Look for the first class that is not LuceneTestCase that requested
|
||||
// a Directory. The first two items are of Thread's, so skipping over
|
||||
// them.
|
||||
StackTraceElement element = null;
|
||||
for (int i = 2; i < elements.length; i++) {
|
||||
StackTraceElement ste = elements[i];
|
||||
if (ste.getClassName().indexOf("LuceneTestCase") == -1) {
|
||||
element = ste;
|
||||
break;
|
||||
}
|
||||
}
|
||||
fail("directory of test was not closed, opened from: " + element);
|
||||
}
|
||||
}
|
||||
|
@ -705,7 +717,7 @@ public abstract class LuceneTestCase extends Assert {
|
|||
}
|
||||
|
||||
/**
|
||||
* Returns a new Dictionary instance. Use this when the test does not
|
||||
* Returns a new Directory instance. Use this when the test does not
|
||||
* care about the specific Directory implementation (most tests).
|
||||
* <p>
|
||||
* The Directory is wrapped with {@link MockDirectoryWrapper}.
|
||||
|
@ -719,15 +731,14 @@ public abstract class LuceneTestCase extends Assert {
|
|||
}
|
||||
|
||||
public static MockDirectoryWrapper newDirectory(Random r) throws IOException {
|
||||
StackTraceElement[] stack = new Exception().getStackTrace();
|
||||
Directory impl = newDirectoryImpl(r, TEST_DIRECTORY);
|
||||
MockDirectoryWrapper dir = new MockDirectoryWrapper(r, impl);
|
||||
stores.put(dir, stack);
|
||||
stores.put(dir, Thread.currentThread().getStackTrace());
|
||||
return dir;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a new Dictionary instance, with contents copied from the
|
||||
* Returns a new Directory instance, with contents copied from the
|
||||
* provided directory. See {@link #newDirectory()} for more
|
||||
* information.
|
||||
*/
|
||||
|
@ -735,14 +746,46 @@ public abstract class LuceneTestCase extends Assert {
|
|||
return newDirectory(random, d);
|
||||
}
|
||||
|
||||
/** Returns a new FSDirectory instance over the given file, which must be a folder. */
|
||||
public static MockDirectoryWrapper newFSDirectory(File f) throws IOException {
|
||||
return newFSDirectory(f, null);
|
||||
}
|
||||
|
||||
/** Returns a new FSDirectory instance over the given file, which must be a folder. */
|
||||
public static MockDirectoryWrapper newFSDirectory(File f, LockFactory lf) throws IOException {
|
||||
String fsdirClass = TEST_DIRECTORY;
|
||||
if (fsdirClass.equals("random")) {
|
||||
fsdirClass = FS_DIRECTORIES[random.nextInt(FS_DIRECTORIES.length)];
|
||||
}
|
||||
|
||||
if (fsdirClass.indexOf(".") == -1) {// if not fully qualified, assume .store
|
||||
fsdirClass = "org.apache.lucene.store." + fsdirClass;
|
||||
}
|
||||
|
||||
Class<? extends FSDirectory> clazz;
|
||||
try {
|
||||
try {
|
||||
clazz = Class.forName(fsdirClass).asSubclass(FSDirectory.class);
|
||||
} catch (ClassCastException e) {
|
||||
// TEST_DIRECTORY is not a sub-class of FSDirectory, so draw one at random
|
||||
fsdirClass = FS_DIRECTORIES[random.nextInt(FS_DIRECTORIES.length)];
|
||||
clazz = Class.forName(fsdirClass).asSubclass(FSDirectory.class);
|
||||
}
|
||||
MockDirectoryWrapper dir = new MockDirectoryWrapper(random, newFSDirectoryImpl(clazz, f, lf));
|
||||
stores.put(dir, Thread.currentThread().getStackTrace());
|
||||
return dir;
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
public static MockDirectoryWrapper newDirectory(Random r, Directory d) throws IOException {
|
||||
StackTraceElement[] stack = new Exception().getStackTrace();
|
||||
Directory impl = newDirectoryImpl(r, TEST_DIRECTORY);
|
||||
for (String file : d.listAll()) {
|
||||
d.copy(impl, file, file);
|
||||
}
|
||||
MockDirectoryWrapper dir = new MockDirectoryWrapper(r, impl);
|
||||
stores.put(dir, stack);
|
||||
stores.put(dir, Thread.currentThread().getStackTrace());
|
||||
return dir;
|
||||
}
|
||||
|
||||
|
@ -818,12 +861,16 @@ public abstract class LuceneTestCase extends Assert {
|
|||
}
|
||||
}
|
||||
|
||||
private static String CORE_DIRECTORIES[] = {
|
||||
"RAMDirectory",
|
||||
private static final String FS_DIRECTORIES[] = {
|
||||
"SimpleFSDirectory",
|
||||
"NIOFSDirectory",
|
||||
"MMapDirectory"
|
||||
};
|
||||
|
||||
private static final String CORE_DIRECTORIES[] = {
|
||||
"RAMDirectory",
|
||||
FS_DIRECTORIES[0], FS_DIRECTORIES[1], FS_DIRECTORIES[2]
|
||||
};
|
||||
|
||||
public static String randomDirectory(Random random) {
|
||||
if (random.nextInt(10) == 0) {
|
||||
|
@ -832,6 +879,23 @@ public abstract class LuceneTestCase extends Assert {
|
|||
return "RAMDirectory";
|
||||
}
|
||||
}
|
||||
|
||||
private static Directory newFSDirectoryImpl(
|
||||
Class<? extends FSDirectory> clazz, File file, LockFactory lockFactory)
|
||||
throws IOException {
|
||||
try {
|
||||
// Assuming every FSDirectory has a ctor(File), but not all may take a
|
||||
// LockFactory too, so setting it afterwards.
|
||||
Constructor<? extends FSDirectory> ctor = clazz.getConstructor(File.class);
|
||||
FSDirectory d = ctor.newInstance(file);
|
||||
if (lockFactory != null) {
|
||||
d.setLockFactory(lockFactory);
|
||||
}
|
||||
return d;
|
||||
} catch (Exception e) {
|
||||
return FSDirectory.open(file);
|
||||
}
|
||||
}
|
||||
|
||||
static Directory newDirectoryImpl(Random random, String clazzName) {
|
||||
if (clazzName.equals("random"))
|
||||
|
@ -840,27 +904,22 @@ public abstract class LuceneTestCase extends Assert {
|
|||
clazzName = "org.apache.lucene.store." + clazzName;
|
||||
try {
|
||||
final Class<? extends Directory> clazz = Class.forName(clazzName).asSubclass(Directory.class);
|
||||
try {
|
||||
// try empty ctor
|
||||
return clazz.newInstance();
|
||||
} catch (Exception e) {
|
||||
// If it is a FSDirectory type, try its ctor(File)
|
||||
if (FSDirectory.class.isAssignableFrom(clazz)) {
|
||||
final File tmpFile = File.createTempFile("test", "tmp", TEMP_DIR);
|
||||
tmpFile.delete();
|
||||
tmpFile.mkdir();
|
||||
try {
|
||||
Constructor<? extends Directory> ctor = clazz.getConstructor(File.class);
|
||||
return ctor.newInstance(tmpFile);
|
||||
} catch (Exception e2) {
|
||||
// try .open(File)
|
||||
Method method = clazz.getMethod("open", new Class[] { File.class });
|
||||
return (Directory) method.invoke(null, tmpFile);
|
||||
}
|
||||
return newFSDirectoryImpl(clazz.asSubclass(FSDirectory.class), tmpFile, null);
|
||||
}
|
||||
|
||||
// try empty ctor
|
||||
return clazz.newInstance();
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public String getName() {
|
||||
return this.name;
|
||||
}
|
||||
|
@ -869,6 +928,7 @@ public abstract class LuceneTestCase extends Assert {
|
|||
* if a real file is needed. To get a stream, code should prefer
|
||||
* {@link Class#getResourceAsStream} using {@code this.getClass()}.
|
||||
*/
|
||||
|
||||
protected File getDataFile(String name) throws IOException {
|
||||
try {
|
||||
return new File(this.getClass().getResource(name).toURI());
|
||||
|
@ -1014,17 +1074,20 @@ public abstract class LuceneTestCase extends Assert {
|
|||
Collections.shuffle(knownCodecs, random);
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void register(Codec codec) {
|
||||
if (!codec.name.equals("PreFlex"))
|
||||
knownCodecs.add(codec);
|
||||
super.register(codec);
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void unregister(Codec codec) {
|
||||
knownCodecs.remove(codec);
|
||||
super.unregister(codec);
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized String getFieldCodec(String name) {
|
||||
Codec codec = previousMappings.get(name);
|
||||
if (codec == null) {
|
||||
|
@ -1034,6 +1097,7 @@ public abstract class LuceneTestCase extends Assert {
|
|||
return codec.name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "RandomCodecProvider: " + previousMappings.toString();
|
||||
}
|
||||
|
|
|
@ -42,14 +42,20 @@ public class AlternateDirectoryTest extends SolrTestCaseJ4 {
|
|||
assertQ(req("q","*:*","qt","standard"));
|
||||
assertTrue(TestFSDirectoryFactory.openCalled);
|
||||
assertTrue(TestIndexReaderFactory.newReaderCalled);
|
||||
TestFSDirectoryFactory.dir.close();
|
||||
}
|
||||
|
||||
static public class TestFSDirectoryFactory extends DirectoryFactory {
|
||||
public static volatile boolean openCalled = false;
|
||||
|
||||
public FSDirectory open(String path) throws IOException {
|
||||
public static volatile Directory dir;
|
||||
|
||||
public Directory open(String path) throws IOException {
|
||||
openCalled = true;
|
||||
return FSDirectory.open(new File(path));
|
||||
// need to close the directory, or otherwise the test fails.
|
||||
if (dir != null) {
|
||||
dir.close();
|
||||
}
|
||||
return dir = newFSDirectory(new File(path));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -97,7 +97,7 @@ public class TestArbitraryIndexDir extends AbstractSolrTestCase{
|
|||
}
|
||||
|
||||
//add a doc in the new index dir
|
||||
Directory dir = FSDirectory.open(newDir);
|
||||
Directory dir = newFSDirectory(newDir);
|
||||
IndexWriter iw = new IndexWriter(
|
||||
dir,
|
||||
new IndexWriterConfig(Version.LUCENE_40, new StandardAnalyzer(Version.LUCENE_40)).
|
||||
|
@ -117,6 +117,7 @@ public class TestArbitraryIndexDir extends AbstractSolrTestCase{
|
|||
req("id:2"),
|
||||
"*[count(//doc)=1]"
|
||||
);
|
||||
dir.close();
|
||||
newDir.delete();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.apache.lucene.search.spell.SpellChecker;
|
|||
import org.apache.lucene.search.spell.StringDistance;
|
||||
import org.apache.lucene.search.spell.SuggestWord;
|
||||
import org.apache.lucene.search.spell.SuggestWordFrequencyComparator;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.solr.SolrTestCaseJ4;
|
||||
import org.apache.solr.common.util.NamedList;
|
||||
|
@ -284,8 +285,9 @@ public class IndexBasedSpellCheckerTest extends SolrTestCaseJ4 {
|
|||
File indexDir = new File(TEMP_DIR, "spellingIdx" + new Date().getTime());
|
||||
//create a standalone index
|
||||
File altIndexDir = new File(TEMP_DIR, "alternateIdx" + new Date().getTime());
|
||||
Directory dir = newFSDirectory(altIndexDir);
|
||||
IndexWriter iw = new IndexWriter(
|
||||
FSDirectory.open(altIndexDir),
|
||||
dir,
|
||||
new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).
|
||||
setMaxFieldLength(IndexWriterConfig.UNLIMITED_FIELD_LENGTH)
|
||||
);
|
||||
|
@ -296,6 +298,7 @@ public class IndexBasedSpellCheckerTest extends SolrTestCaseJ4 {
|
|||
}
|
||||
iw.optimize();
|
||||
iw.close();
|
||||
dir.close();
|
||||
indexDir.mkdirs();
|
||||
spellchecker.add(AbstractLuceneSpellChecker.INDEX_DIR, indexDir.getAbsolutePath());
|
||||
spellchecker.add(AbstractLuceneSpellChecker.LOCATION, altIndexDir.getAbsolutePath());
|
||||
|
|
Loading…
Reference in New Issue