mirror of https://github.com/apache/lucene.git
LUCENE-8474: Remove deprecated RAMDirectory.
This commit is contained in:
parent
43f2723213
commit
d7dc53ff7c
|
@ -4,7 +4,12 @@ For more information on past and future Lucene versions, please see:
|
|||
http://s.apache.org/luceneversions
|
||||
|
||||
======================= Lucene 9.0.0 =======================
|
||||
(No Changes)
|
||||
|
||||
API Changes
|
||||
|
||||
* LUCENE-8474: RAMDirectory and associated deprecated classes have been
|
||||
removed. (Dawid Weiss)
|
||||
|
||||
|
||||
======================= Lucene 8.0.0 =======================
|
||||
|
||||
|
|
|
@ -1,5 +1,12 @@
|
|||
# Apache Lucene Migration Guide
|
||||
|
||||
## RAMDirectory, RAMFile, RAMInputStream, RAMOutputStream removed ##
|
||||
|
||||
RAM-based directory implementation have been removed. (LUCENE-8474).
|
||||
ByteBuffersDirectory can be used as a RAM-resident replacement, although it
|
||||
is discouraged in favor of the default memory-mapped directory.
|
||||
|
||||
|
||||
## Similarity.SimScorer.computeXXXFactor methods removed (LUCENE-8014) ##
|
||||
|
||||
SpanQuery and PhraseQuery now always calculate their slops as (1.0 / (1.0 +
|
||||
|
|
|
@ -33,8 +33,8 @@ import org.apache.lucene.index.IndexReader;
|
|||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
|
@ -84,7 +84,7 @@ public class TestKeywordAnalyzer extends BaseTokenStreamTestCase {
|
|||
*/
|
||||
|
||||
public void testMutipleDocument() throws Exception {
|
||||
RAMDirectory dir = new RAMDirectory();
|
||||
Directory dir = new ByteBuffersDirectory();
|
||||
Analyzer analyzer = new KeywordAnalyzer();
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(analyzer));
|
||||
Document doc = new Document();
|
||||
|
|
|
@ -94,7 +94,7 @@ import org.apache.lucene.analysis.snowball.TestSnowball;
|
|||
import org.apache.lucene.analysis.standard.StandardTokenizer;
|
||||
import org.apache.lucene.analysis.synonym.SynonymMap;
|
||||
import org.apache.lucene.analysis.wikipedia.WikipediaTokenizer;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.util.AttributeFactory;
|
||||
import org.apache.lucene.util.AttributeSource;
|
||||
import org.apache.lucene.util.CharsRef;
|
||||
|
@ -387,7 +387,7 @@ public class TestRandomChains extends BaseTokenStreamTestCase {
|
|||
InputStream affixStream = TestHunspellStemFilter.class.getResourceAsStream("simple.aff");
|
||||
InputStream dictStream = TestHunspellStemFilter.class.getResourceAsStream("simple.dic");
|
||||
try {
|
||||
return new Dictionary(new RAMDirectory(), "dictionary", affixStream, dictStream);
|
||||
return new Dictionary(new ByteBuffersDirectory(), "dictionary", affixStream, dictStream);
|
||||
} catch (Exception ex) {
|
||||
Rethrow.rethrow(ex);
|
||||
return null; // unreachable code
|
||||
|
|
|
@ -24,7 +24,7 @@ import java.text.ParseException;
|
|||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.util.CharsRef;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
@ -62,7 +62,7 @@ public abstract class StemmerTestBase extends LuceneTestCase {
|
|||
}
|
||||
|
||||
try {
|
||||
Dictionary dictionary = new Dictionary(new RAMDirectory(), "dictionary", affixStream, Arrays.asList(dictStreams), ignoreCase);
|
||||
Dictionary dictionary = new Dictionary(new ByteBuffersDirectory(), "dictionary", affixStream, Arrays.asList(dictStreams), ignoreCase);
|
||||
stemmer = new Stemmer(dictionary);
|
||||
} finally {
|
||||
IOUtils.closeWhileHandlingException(affixStream);
|
||||
|
|
|
@ -24,7 +24,8 @@ import org.apache.lucene.index.DirectoryReader;
|
|||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
|
@ -32,7 +33,7 @@ import java.util.Collections;
|
|||
public class QueryAutoStopWordAnalyzerTest extends BaseTokenStreamTestCase {
|
||||
String variedFieldValues[] = {"the", "quick", "brown", "fox", "jumped", "over", "the", "lazy", "boring", "dog"};
|
||||
String repetitiveFieldValues[] = {"boring", "boring", "vaguelyboring"};
|
||||
RAMDirectory dir;
|
||||
Directory dir;
|
||||
Analyzer appAnalyzer;
|
||||
IndexReader reader;
|
||||
QueryAutoStopWordAnalyzer protectedAnalyzer;
|
||||
|
@ -40,7 +41,7 @@ public class QueryAutoStopWordAnalyzerTest extends BaseTokenStreamTestCase {
|
|||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
dir = new RAMDirectory();
|
||||
dir = new ByteBuffersDirectory();
|
||||
appAnalyzer = new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false);
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(appAnalyzer));
|
||||
int numDocs = 200;
|
||||
|
|
|
@ -33,7 +33,8 @@ import org.apache.lucene.index.MultiTerms;
|
|||
import org.apache.lucene.index.PostingsEnum;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
|
||||
/** tests for classicanalyzer */
|
||||
|
@ -269,7 +270,7 @@ public class TestClassicAnalyzer extends BaseTokenStreamTestCase {
|
|||
* Make sure we skip wicked long terms.
|
||||
*/
|
||||
public void testWickedLongTerm() throws IOException {
|
||||
RAMDirectory dir = new RAMDirectory();
|
||||
Directory dir = new ByteBuffersDirectory();
|
||||
Analyzer analyzer = new ClassicAnalyzer();
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(analyzer));
|
||||
|
||||
|
|
|
@ -67,10 +67,10 @@ import org.apache.lucene.search.Sort;
|
|||
import org.apache.lucene.search.SortField;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.store.BaseDirectoryWrapper;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.store.NIOFSDirectory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.store.SimpleFSDirectory;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
|
@ -1450,7 +1450,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
|
||||
// create a bunch of dummy segments
|
||||
int id = 40;
|
||||
RAMDirectory ramDir = new RAMDirectory();
|
||||
Directory ramDir = new ByteBuffersDirectory();
|
||||
for (int i = 0; i < 3; i++) {
|
||||
// only use Log- or TieredMergePolicy, to make document addition predictable and not suddenly merge:
|
||||
MergePolicy mp = random().nextBoolean() ? newLogMergePolicy() : newTieredMergePolicy();
|
||||
|
|
|
@ -25,7 +25,7 @@ compound=true
|
|||
|
||||
analyzer=org.apache.lucene.analysis.standard.StandardAnalyzer
|
||||
directory=FSDirectory
|
||||
#directory=RamDirectory
|
||||
#directory=ByteBuffersDirectory
|
||||
|
||||
doc.stored=true
|
||||
doc.tokenized=true
|
||||
|
|
|
@ -25,7 +25,7 @@ collector.class=coll:topScoreDocOrdered:topScoreDocUnordered:topScoreDocOrdered:
|
|||
|
||||
analyzer=org.apache.lucene.analysis.core.WhitespaceAnalyzer
|
||||
directory=FSDirectory
|
||||
#directory=RamDirectory
|
||||
#directory=ByteBuffersDirectory
|
||||
|
||||
doc.stored=true
|
||||
doc.tokenized=true
|
||||
|
|
|
@ -25,7 +25,7 @@ collector.class=coll:topScoreDocOrdered:topScoreDocUnordered:topScoreDocOrdered:
|
|||
|
||||
analyzer=org.apache.lucene.analysis.core.WhitespaceAnalyzer
|
||||
directory=FSDirectory
|
||||
#directory=RamDirectory
|
||||
#directory=ByteBuffersDirectory
|
||||
|
||||
doc.stored=true
|
||||
doc.tokenized=true
|
||||
|
|
|
@ -29,7 +29,7 @@ compound=compnd:true:false
|
|||
|
||||
analyzer=org.apache.lucene.analysis.standard.StandardAnalyzer
|
||||
directory=FSDirectory
|
||||
#directory=RamDirectory
|
||||
#directory=ByteBuffersDirectory
|
||||
|
||||
doc.stored=stored:true:true:false:false
|
||||
doc.tokenized=true
|
||||
|
|
|
@ -25,7 +25,7 @@ compound=cmpnd:true:true:true:true:false:false:false:false
|
|||
|
||||
analyzer=org.apache.lucene.analysis.standard.StandardAnalyzer
|
||||
directory=FSDirectory
|
||||
#directory=RamDirectory
|
||||
#directory=ByteBuffersDirectory
|
||||
|
||||
doc.stored=true
|
||||
doc.tokenized=true
|
||||
|
|
|
@ -25,7 +25,7 @@ compound=cmpnd:true:true:true:true:false:false:false:false
|
|||
|
||||
analyzer=org.apache.lucene.analysis.standard.StandardAnalyzer
|
||||
directory=FSDirectory
|
||||
#directory=RamDirectory
|
||||
#directory=ByteBuffersDirectory
|
||||
|
||||
doc.stored=true
|
||||
doc.tokenized=true
|
||||
|
|
|
@ -25,7 +25,7 @@ compound=cmpnd:true:true:true:true:false:false:false:false
|
|||
|
||||
analyzer=org.apache.lucene.analysis.standard.StandardAnalyzer
|
||||
directory=FSDirectory
|
||||
#directory=RamDirectory
|
||||
#directory=ByteBuffersDirectory
|
||||
|
||||
doc.stored=true
|
||||
doc.tokenized=true
|
||||
|
|
|
@ -25,7 +25,7 @@ compound=cmpnd:true:true:true:true:false:false:false:false
|
|||
|
||||
analyzer=org.apache.lucene.analysis.standard.StandardAnalyzer
|
||||
directory=FSDirectory
|
||||
#directory=RamDirectory
|
||||
#directory=ByteBuffersDirectory
|
||||
|
||||
doc.stored=true
|
||||
doc.tokenized=true
|
||||
|
|
|
@ -24,7 +24,7 @@ compound=true
|
|||
|
||||
analyzer=org.apache.lucene.analysis.standard.StandardAnalyzer
|
||||
directory=FSDirectory
|
||||
#directory=RamDirectory
|
||||
#directory=ByteBuffersDirectory
|
||||
|
||||
doc.stored=true
|
||||
doc.tokenized=true
|
||||
|
|
|
@ -23,7 +23,7 @@ compound=true
|
|||
|
||||
analyzer=org.apache.lucene.analysis.standard.StandardAnalyzer
|
||||
directory=FSDirectory
|
||||
#directory=RamDirectory
|
||||
#directory=ByteBuffersDirectory
|
||||
|
||||
doc.stored=true
|
||||
doc.tokenized=true
|
||||
|
|
|
@ -35,7 +35,7 @@ compound=true
|
|||
|
||||
analyzer=org.apache.lucene.analysis.standard.StandardAnalyzer
|
||||
directory=FSDirectory
|
||||
#directory=RamDirectory
|
||||
#directory=ByteBuffersDirectory
|
||||
|
||||
doc.stored=true
|
||||
doc.tokenized=true
|
||||
|
|
|
@ -23,7 +23,7 @@ compound=true
|
|||
|
||||
analyzer=org.apache.lucene.analysis.standard.StandardAnalyzer
|
||||
directory=FSDirectory
|
||||
#directory=RamDirectory
|
||||
#directory=ByteBuffersDirectory
|
||||
|
||||
doc.stored=false
|
||||
doc.tokenized=true
|
||||
|
|
|
@ -24,7 +24,7 @@ sort.rng=20000:10000:20000:10000
|
|||
|
||||
analyzer=org.apache.lucene.analysis.standard.StandardAnalyzer
|
||||
directory=FSDirectory
|
||||
#directory=RamDirectory
|
||||
#directory=ByteBuffersDirectory
|
||||
|
||||
doc.stored=true
|
||||
doc.tokenized=true
|
||||
|
|
|
@ -54,7 +54,7 @@ doc.tokenized=false
|
|||
|
||||
### Directory
|
||||
directory=FSDirectory
|
||||
#directory=RamDirectory
|
||||
#directory=ByteBuffersDirectory
|
||||
compound=false
|
||||
merge.factor=10
|
||||
ram.flush.mb=64
|
||||
|
|
|
@ -24,7 +24,7 @@ compound=cmpnd:true:true:true:true:false:false:false:false
|
|||
|
||||
analyzer=org.apache.lucene.analysis.standard.StandardAnalyzer
|
||||
directory=FSDirectory
|
||||
#directory=RamDirectory
|
||||
#directory=ByteBuffersDirectory
|
||||
|
||||
doc.stored=true
|
||||
doc.tokenized=true
|
||||
|
|
|
@ -24,7 +24,7 @@ compound=cmpnd:true:true:true:true:false:false:false:false
|
|||
|
||||
analyzer=org.apache.lucene.analysis.standard.StandardAnalyzer
|
||||
directory=FSDirectory
|
||||
#directory=RamDirectory
|
||||
#directory=ByteBuffersDirectory
|
||||
|
||||
doc.stored=true
|
||||
doc.tokenized=true
|
||||
|
|
|
@ -44,9 +44,9 @@ import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
|
|||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
|
||||
/**
|
||||
|
@ -70,12 +70,13 @@ import org.apache.lucene.util.IOUtils;
|
|||
* <li><b>facet.source</b>=<class name for facet-source| Default: RandomFacetSource>
|
||||
* <li><b>query.maker</b>=<class name for query-maker| Default: SimpleQueryMaker>
|
||||
* <li><b>log.queries</b>=<whether queries should be printed| Default: false>
|
||||
* <li><b>directory</b>=<type of directory to use for the index| Default: RAMDirectory>
|
||||
* <li><b>taxonomy.directory</b>=<type of directory for taxonomy index| Default: RAMDirectory>
|
||||
* <li><b>directory</b>=<type of directory to use for the index| Default: ByteBuffersDirectory>
|
||||
* <li><b>taxonomy.directory</b>=<type of directory for taxonomy index| Default: ByteBuffersDirectory>
|
||||
* </ul>
|
||||
*/
|
||||
public class PerfRunData implements Closeable {
|
||||
|
||||
private static final String DEFAULT_DIRECTORY = "ByteBuffersDirectory";
|
||||
private Points points;
|
||||
|
||||
// objects used during performance test run
|
||||
|
@ -192,17 +193,26 @@ public class PerfRunData implements Closeable {
|
|||
|
||||
private Directory createDirectory(boolean eraseIndex, String dirName,
|
||||
String dirParam) throws IOException {
|
||||
if ("FSDirectory".equals(config.get(dirParam,"RAMDirectory"))) {
|
||||
Path workDir = Paths.get(config.get("work.dir","work"));
|
||||
String dirImpl = config.get(dirParam, DEFAULT_DIRECTORY);
|
||||
if ("FSDirectory".equals(dirImpl)) {
|
||||
Path workDir = Paths.get(config.get("work.dir", "work"));
|
||||
Path indexDir = workDir.resolve(dirName);
|
||||
if (eraseIndex && Files.exists(indexDir)) {
|
||||
IOUtils.rm(indexDir);
|
||||
}
|
||||
Files.createDirectories(indexDir);
|
||||
return FSDirectory.open(indexDir);
|
||||
}
|
||||
}
|
||||
|
||||
return new RAMDirectory();
|
||||
if ("RAMDirectory".equals(dirImpl)) {
|
||||
throw new IOException("RAMDirectory has been removed, use ByteBuffersDirectory.");
|
||||
}
|
||||
|
||||
if ("ByteBuffersDirectory".equals(dirImpl)) {
|
||||
return new ByteBuffersDirectory();
|
||||
}
|
||||
|
||||
throw new IOException("Directory type not supported: " + dirImpl);
|
||||
}
|
||||
|
||||
/** Returns an object that was previously set by {@link #setPerfObject(String, Object)}. */
|
||||
|
|
|
@ -79,7 +79,7 @@ public abstract class BenchmarkTestCase extends LuceneTestCase {
|
|||
// properties in effect in all tests here
|
||||
final String propLines [] = {
|
||||
"work.dir=" + getWorkDirPath(),
|
||||
"directory=RAMDirectory",
|
||||
"directory=ByteBuffersDirectory",
|
||||
"print.props=false",
|
||||
};
|
||||
|
||||
|
|
|
@ -170,7 +170,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
|
|||
"content.source.log.step=1",
|
||||
"doc.term.vector=false",
|
||||
"content.source.forever=false",
|
||||
"directory=RAMDirectory",
|
||||
"directory=ByteBuffersDirectory",
|
||||
"doc.stored=false",
|
||||
"doc.tokenized=false",
|
||||
"# ----- alg ",
|
||||
|
@ -211,7 +211,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
|
|||
"doc.term.vector=false",
|
||||
"log.step.AddDoc=10000",
|
||||
"content.source.forever=true",
|
||||
"directory=RAMDirectory",
|
||||
"directory=ByteBuffersDirectory",
|
||||
"doc.reuse.fields=false",
|
||||
"doc.stored=true",
|
||||
"doc.tokenized=false",
|
||||
|
@ -412,7 +412,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
|
|||
"content.source.log.step=3",
|
||||
"doc.term.vector=false",
|
||||
"content.source.forever=false",
|
||||
"directory=RAMDirectory",
|
||||
"directory=ByteBuffersDirectory",
|
||||
"doc.stored=false",
|
||||
"doc.tokenized=false",
|
||||
"task.max.depth.log=1",
|
||||
|
@ -447,7 +447,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
|
|||
"content.source.log.step=3",
|
||||
"doc.term.vector=false",
|
||||
"content.source.forever=false",
|
||||
"directory=RAMDirectory",
|
||||
"directory=ByteBuffersDirectory",
|
||||
"doc.stored=false",
|
||||
"doc.tokenized=false",
|
||||
"task.max.depth.log=1",
|
||||
|
@ -484,7 +484,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
|
|||
"content.source.log.step=3",
|
||||
"doc.term.vector=false",
|
||||
"content.source.forever=false",
|
||||
"directory=RAMDirectory",
|
||||
"directory=ByteBuffersDirectory",
|
||||
"doc.stored=false",
|
||||
"doc.tokenized=false",
|
||||
"debug.level=1",
|
||||
|
@ -527,7 +527,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
|
|||
"content.source.log.step=3",
|
||||
"doc.term.vector=false",
|
||||
"content.source.forever=false",
|
||||
"directory=RAMDirectory",
|
||||
"directory=ByteBuffersDirectory",
|
||||
"merge.scheduler=" + MyMergeScheduler.class.getName(),
|
||||
"doc.stored=false",
|
||||
"doc.tokenized=false",
|
||||
|
@ -575,7 +575,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
|
|||
"max.buffered=2",
|
||||
"doc.term.vector=false",
|
||||
"content.source.forever=false",
|
||||
"directory=RAMDirectory",
|
||||
"directory=ByteBuffersDirectory",
|
||||
"merge.policy=" + MyMergePolicy.class.getName(),
|
||||
"doc.stored=false",
|
||||
"doc.tokenized=false",
|
||||
|
@ -615,7 +615,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
|
|||
"compound=cmpnd:true:false",
|
||||
"doc.term.vector=vector:false:true",
|
||||
"content.source.forever=false",
|
||||
"directory=RAMDirectory",
|
||||
"directory=ByteBuffersDirectory",
|
||||
"doc.stored=false",
|
||||
"merge.factor=3",
|
||||
"doc.tokenized=false",
|
||||
|
@ -656,7 +656,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
|
|||
"docs.file=" + getReuters20LinesFile(),
|
||||
"content.source.log.step=100",
|
||||
"content.source.forever=false",
|
||||
"directory=RAMDirectory",
|
||||
"directory=ByteBuffersDirectory",
|
||||
"doc.stored=false",
|
||||
"merge.factor=3",
|
||||
"doc.tokenized=false",
|
||||
|
@ -695,7 +695,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
|
|||
"max.buffered=3",
|
||||
"doc.term.vector=false",
|
||||
"content.source.forever=false",
|
||||
"directory=RAMDirectory",
|
||||
"directory=ByteBuffersDirectory",
|
||||
"merge.policy=org.apache.lucene.index.LogDocMergePolicy",
|
||||
"doc.stored=false",
|
||||
"doc.tokenized=false",
|
||||
|
@ -767,7 +767,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
|
|||
"content.source.log.step=30",
|
||||
"doc.term.vector=false",
|
||||
"content.source.forever=false",
|
||||
"directory=RAMDirectory",
|
||||
"directory=ByteBuffersDirectory",
|
||||
"doc.stored=false",
|
||||
"doc.tokenized=false",
|
||||
"task.max.depth.log=1",
|
||||
|
@ -815,7 +815,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
|
|||
"docs.file=" + getReuters20LinesFile(),
|
||||
"content.source.log.step=3",
|
||||
"content.source.forever=false",
|
||||
"directory=RAMDirectory",
|
||||
"directory=ByteBuffersDirectory",
|
||||
"# ----- alg ",
|
||||
"{ \"Rounds\"",
|
||||
" ResetSystemErase",
|
||||
|
@ -880,7 +880,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
|
|||
"docs.file=" + getReuters20LinesFile(),
|
||||
"content.source.log.step=3",
|
||||
"content.source.forever=false",
|
||||
"directory=RAMDirectory",
|
||||
"directory=ByteBuffersDirectory",
|
||||
"# ----- alg ",
|
||||
"{ \"Rounds\"",
|
||||
" ResetSystemErase",
|
||||
|
@ -946,7 +946,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
|
|||
"docs.file=" + getReuters20LinesFile(),
|
||||
"work.dir=" + getWorkDir().toAbsolutePath().toString().replaceAll("\\\\", "/"), // Fix Windows path
|
||||
"content.source.forever=false",
|
||||
"directory=RAMDirectory",
|
||||
"directory=ByteBuffersDirectory",
|
||||
"AnalyzerFactory(name:'" + singleQuoteEscapedName + "', " + params + ")",
|
||||
"NewAnalyzer('" + singleQuoteEscapedName + "')",
|
||||
"CreateIndex",
|
||||
|
|
|
@ -34,11 +34,10 @@ import org.apache.lucene.benchmark.byTask.tasks.TaskSequence;
|
|||
import org.apache.lucene.benchmark.byTask.utils.Algorithm;
|
||||
import org.apache.lucene.benchmark.byTask.utils.Config;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.LuceneTestCase.SuppressSysoutChecks;
|
||||
|
||||
/** Test very simply that perf tasks are parses as expected. */
|
||||
/** Test very simply that perf tasks are parsed as expected. */
|
||||
@SuppressSysoutChecks(bugUrl = "very noisy")
|
||||
public class TestPerfTasksParse extends LuceneTestCase {
|
||||
|
||||
|
@ -47,7 +46,7 @@ public class TestPerfTasksParse extends LuceneTestCase {
|
|||
|
||||
// properties in effect in all tests here
|
||||
static final String propPart =
|
||||
INDENT + "directory=RAMDirectory" + NEW_LINE +
|
||||
INDENT + "directory=ByteBuffersDirectory" + NEW_LINE +
|
||||
INDENT + "print.props=false" + NEW_LINE;
|
||||
|
||||
/** Test the repetiotion parsing for parallel tasks */
|
||||
|
@ -122,7 +121,7 @@ public class TestPerfTasksParse extends LuceneTestCase {
|
|||
config.set("content.source", MockContentSource.class.getName());
|
||||
String dir = config.get("content.source", null);
|
||||
if (dir != null) { Class.forName(dir); }
|
||||
config.set("directory", RAMDirectory.class.getName());
|
||||
config.set("directory", "ByteBuffersDirectory");
|
||||
if (config.get("line.file.out", null) != null) {
|
||||
config.set("line.file.out", createTempFile("linefile", ".txt").toAbsolutePath().toString());
|
||||
}
|
||||
|
|
|
@ -76,7 +76,7 @@ public class DocMakerTest extends BenchmarkTestCase {
|
|||
// Indexing configuration.
|
||||
props.setProperty("analyzer", WhitespaceAnalyzer.class.getName());
|
||||
props.setProperty("content.source", OneDocSource.class.getName());
|
||||
props.setProperty("directory", "RAMDirectory");
|
||||
props.setProperty("directory", "ByteBuffersDirectory");
|
||||
if (setIndexProps) {
|
||||
props.setProperty("doc.index.props", Boolean.toString(indexPropsVal));
|
||||
}
|
||||
|
@ -105,7 +105,7 @@ public class DocMakerTest extends BenchmarkTestCase {
|
|||
|
||||
// Indexing configuration.
|
||||
props.setProperty("analyzer", WhitespaceAnalyzer.class.getName());
|
||||
props.setProperty("directory", "RAMDirectory");
|
||||
props.setProperty("directory", "ByteBuffersDirectory");
|
||||
if (setNormsProp) {
|
||||
props.setProperty("doc.tokenized.norms", Boolean.toString(normsPropVal));
|
||||
}
|
||||
|
|
|
@ -131,7 +131,7 @@ public class LineDocSourceTest extends BenchmarkTestCase {
|
|||
// Indexing configuration.
|
||||
props.setProperty("analyzer", WhitespaceAnalyzer.class.getName());
|
||||
props.setProperty("content.source", LineDocSource.class.getName());
|
||||
props.setProperty("directory", "RAMDirectory");
|
||||
props.setProperty("directory", "ByteBuffersDirectory");
|
||||
props.setProperty("doc.stored", "true");
|
||||
props.setProperty("doc.index.props", "true");
|
||||
|
||||
|
|
|
@ -28,8 +28,8 @@ import org.apache.lucene.index.DirectoryReader;
|
|||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
|
@ -66,7 +66,7 @@ public class AddIndexesTaskTest extends BenchmarkTestCase {
|
|||
Properties props = new Properties();
|
||||
props.setProperty("writer.version", Version.LATEST.toString());
|
||||
props.setProperty("print.props", "false"); // don't print anything
|
||||
props.setProperty("directory", "RAMDirectory");
|
||||
props.setProperty("directory", "ByteBuffersDirectory");
|
||||
props.setProperty(AddIndexesTask.ADDINDEXES_INPUT_DIR, inputDir.toAbsolutePath().toString());
|
||||
Config config = new Config(props);
|
||||
return new PerfRunData(config);
|
||||
|
@ -74,7 +74,7 @@ public class AddIndexesTaskTest extends BenchmarkTestCase {
|
|||
|
||||
private void assertIndex(PerfRunData runData) throws Exception {
|
||||
Directory taskDir = runData.getDirectory();
|
||||
assertSame(RAMDirectory.class, taskDir.getClass());
|
||||
assertSame(ByteBuffersDirectory.class, taskDir.getClass());
|
||||
IndexReader r = DirectoryReader.open(taskDir);
|
||||
try {
|
||||
assertEquals(10, r.numDocs());
|
||||
|
|
|
@ -32,7 +32,7 @@ public class CommitIndexTaskTest extends BenchmarkTestCase {
|
|||
Properties props = new Properties();
|
||||
props.setProperty("writer.version", Version.LATEST.toString());
|
||||
props.setProperty("print.props", "false"); // don't print anything
|
||||
props.setProperty("directory", "RAMDirectory");
|
||||
props.setProperty("directory", "ByteBuffersDirectory");
|
||||
Config config = new Config(props);
|
||||
return new PerfRunData(config);
|
||||
}
|
||||
|
|
|
@ -40,7 +40,7 @@ public class CreateIndexTaskTest extends BenchmarkTestCase {
|
|||
Properties props = new Properties();
|
||||
props.setProperty("writer.version", Version.LATEST.toString());
|
||||
props.setProperty("print.props", "false"); // don't print anything
|
||||
props.setProperty("directory", "RAMDirectory");
|
||||
props.setProperty("directory", "ByteBuffersDirectory");
|
||||
if (infoStreamValue != null) {
|
||||
props.setProperty("writer.info.stream", infoStreamValue);
|
||||
}
|
||||
|
|
|
@ -50,7 +50,7 @@ public class PerfTaskTest extends BenchmarkTestCase {
|
|||
if (setTaskLogStep) {
|
||||
props.setProperty("log.step.MyPerf", Integer.toString(taskLogStepVal));
|
||||
}
|
||||
props.setProperty("directory", "RAMDirectory"); // no accidental FS dir.
|
||||
props.setProperty("directory", "ByteBuffersDirectory"); // no accidental FS dir.
|
||||
Config config = new Config(props);
|
||||
return new PerfRunData(config);
|
||||
}
|
||||
|
|
|
@ -58,7 +58,7 @@ public class WriteEnwikiLineDocTaskTest extends BenchmarkTestCase {
|
|||
Properties props = new Properties();
|
||||
props.setProperty("doc.maker", docMakerName);
|
||||
props.setProperty("line.file.out", file.toAbsolutePath().toString());
|
||||
props.setProperty("directory", "RAMDirectory"); // no accidental FS dir.
|
||||
props.setProperty("directory", "ByteBuffersDirectory"); // no accidental FS dir.
|
||||
Config config = new Config(props);
|
||||
return new PerfRunData(config);
|
||||
}
|
||||
|
|
|
@ -142,7 +142,7 @@ public class WriteLineDocTaskTest extends BenchmarkTestCase {
|
|||
Properties props = new Properties();
|
||||
props.setProperty("doc.maker", docMakerName);
|
||||
props.setProperty("line.file.out", file.toAbsolutePath().toString());
|
||||
props.setProperty("directory", "RAMDirectory"); // no accidental FS dir.
|
||||
props.setProperty("directory", "ByteBuffersDirectory"); // no accidental FS dir.
|
||||
if (allowEmptyDocs) {
|
||||
props.setProperty("sufficient.fields", ",");
|
||||
}
|
||||
|
|
|
@ -157,6 +157,12 @@ public final class ByteBuffersDirectory extends BaseDirectory {
|
|||
return file.length();
|
||||
}
|
||||
|
||||
public boolean fileExists(String name) {
|
||||
ensureOpen();
|
||||
FileEntry file = files.get(name);
|
||||
return file != null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexOutput createOutput(String name, IOContext context) throws IOException {
|
||||
ensureOpen();
|
||||
|
|
|
@ -44,7 +44,7 @@ import org.apache.lucene.util.IOUtils;
|
|||
* </ul>
|
||||
*
|
||||
* @see FSDirectory
|
||||
* @see RAMDirectory
|
||||
* @see ByteBuffersDirectory
|
||||
* @see FilterDirectory
|
||||
*/
|
||||
public abstract class Directory implements Closeable {
|
||||
|
|
|
@ -19,15 +19,16 @@ package org.apache.lucene.store;
|
|||
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.nio.file.NoSuchFileException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
import org.apache.lucene.util.Accountable;
|
||||
import org.apache.lucene.util.Accountables;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
|
||||
// TODO
|
||||
|
@ -35,8 +36,7 @@ import org.apache.lucene.util.IOUtils;
|
|||
// - rename to MergeCacheingDir? NRTCachingDir
|
||||
|
||||
/**
|
||||
* Wraps a {@link RAMDirectory}
|
||||
* around any provided delegate directory, to
|
||||
* Wraps a RAM-resident directory around any provided delegate directory, to
|
||||
* be used during NRT search.
|
||||
*
|
||||
* <p>This class is likely only useful in a near-real-time
|
||||
|
@ -67,9 +67,24 @@ import org.apache.lucene.util.IOUtils;
|
|||
*/
|
||||
|
||||
public class NRTCachingDirectory extends FilterDirectory implements Accountable {
|
||||
private final AtomicBoolean closed = new AtomicBoolean(false);
|
||||
|
||||
private final RAMDirectory cache = new RAMDirectory();
|
||||
/**
|
||||
* Current total size of files in the cache is maintained separately for faster access.
|
||||
*/
|
||||
private final AtomicLong cacheSize = new AtomicLong();
|
||||
|
||||
/**
|
||||
* RAM-resident directory that updates {@link #cacheSize} when files are successfully closed.
|
||||
*/
|
||||
private final ByteBuffersDirectory cacheDirectory = new ByteBuffersDirectory(
|
||||
new SingleInstanceLockFactory(),
|
||||
ByteBuffersDataOutput::new,
|
||||
(fileName, content) -> {
|
||||
cacheSize.addAndGet(content.size());
|
||||
return ByteBuffersDirectory.OUTPUT_AS_MANY_BUFFERS_LUCENE.apply(fileName, content);
|
||||
}
|
||||
);
|
||||
|
||||
private final long maxMergeSizeBytes;
|
||||
private final long maxCachedBytes;
|
||||
|
@ -83,8 +98,8 @@ public class NRTCachingDirectory extends FilterDirectory implements Accountable
|
|||
* {@code <= maxCachedMB} */
|
||||
public NRTCachingDirectory(Directory delegate, double maxMergeSizeMB, double maxCachedMB) {
|
||||
super(delegate);
|
||||
maxMergeSizeBytes = (long) (maxMergeSizeMB*1024*1024);
|
||||
maxCachedBytes = (long) (maxCachedMB*1024*1024);
|
||||
maxMergeSizeBytes = (long) (maxMergeSizeMB * 1024 * 1024);
|
||||
maxCachedBytes = (long) (maxCachedMB * 1024 * 1024);
|
||||
}
|
||||
|
||||
|
||||
|
@ -96,10 +111,10 @@ public class NRTCachingDirectory extends FilterDirectory implements Accountable
|
|||
@Override
|
||||
public synchronized String[] listAll() throws IOException {
|
||||
final Set<String> files = new HashSet<>();
|
||||
for(String f : cache.listAll()) {
|
||||
for (String f : cacheDirectory.listAll()) {
|
||||
files.add(f);
|
||||
}
|
||||
for(String f : in.listAll()) {
|
||||
for (String f : in.listAll()) {
|
||||
files.add(f);
|
||||
}
|
||||
String[] result = files.toArray(new String[files.size()]);
|
||||
|
@ -112,8 +127,8 @@ public class NRTCachingDirectory extends FilterDirectory implements Accountable
|
|||
if (VERBOSE) {
|
||||
System.out.println("nrtdir.deleteFile name=" + name);
|
||||
}
|
||||
if (cache.fileNameExists(name)) {
|
||||
cache.deleteFile(name);
|
||||
if (cacheDirectory.fileExists(name)) {
|
||||
cacheDirectory.deleteFile(name);
|
||||
} else {
|
||||
in.deleteFile(name);
|
||||
}
|
||||
|
@ -121,15 +136,19 @@ public class NRTCachingDirectory extends FilterDirectory implements Accountable
|
|||
|
||||
@Override
|
||||
public synchronized long fileLength(String name) throws IOException {
|
||||
if (cache.fileNameExists(name)) {
|
||||
return cache.fileLength(name);
|
||||
if (cacheDirectory.fileExists(name)) {
|
||||
return cacheDirectory.fileLength(name);
|
||||
} else {
|
||||
return in.fileLength(name);
|
||||
}
|
||||
}
|
||||
|
||||
public String[] listCachedFiles() {
|
||||
return cache.listAll();
|
||||
try {
|
||||
return cacheDirectory.listAll();
|
||||
} catch (IOException e) {
|
||||
throw new UncheckedIOException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -141,7 +160,7 @@ public class NRTCachingDirectory extends FilterDirectory implements Accountable
|
|||
if (VERBOSE) {
|
||||
System.out.println(" to cache");
|
||||
}
|
||||
return cache.createOutput(name, context);
|
||||
return cacheDirectory.createOutput(name, context);
|
||||
} else {
|
||||
return in.createOutput(name, context);
|
||||
}
|
||||
|
@ -161,7 +180,7 @@ public class NRTCachingDirectory extends FilterDirectory implements Accountable
|
|||
@Override
|
||||
public void rename(String source, String dest) throws IOException {
|
||||
unCache(source);
|
||||
if (cache.fileNameExists(dest)) {
|
||||
if (cacheDirectory.fileExists(dest)) {
|
||||
throw new IllegalArgumentException("target file " + dest + " already exists");
|
||||
}
|
||||
in.rename(source, dest);
|
||||
|
@ -172,11 +191,11 @@ public class NRTCachingDirectory extends FilterDirectory implements Accountable
|
|||
if (VERBOSE) {
|
||||
System.out.println("nrtdir.openInput name=" + name);
|
||||
}
|
||||
if (cache.fileNameExists(name)) {
|
||||
if (cacheDirectory.fileExists(name)) {
|
||||
if (VERBOSE) {
|
||||
System.out.println(" from cache");
|
||||
}
|
||||
return cache.openInput(name, context);
|
||||
return cacheDirectory.openInput(name, context);
|
||||
} else {
|
||||
return in.openInput(name, context);
|
||||
}
|
||||
|
@ -191,25 +210,20 @@ public class NRTCachingDirectory extends FilterDirectory implements Accountable
|
|||
// it for defensive reasons... or in case the app is
|
||||
// doing something custom (creating outputs directly w/o
|
||||
// using IndexWriter):
|
||||
boolean success = false;
|
||||
try {
|
||||
if (cache.isOpen) {
|
||||
for(String fileName : cache.listAll()) {
|
||||
unCache(fileName);
|
||||
}
|
||||
}
|
||||
success = true;
|
||||
} finally {
|
||||
if (success) {
|
||||
IOUtils.close(cache, in);
|
||||
} else {
|
||||
IOUtils.closeWhileHandlingException(cache, in);
|
||||
}
|
||||
}
|
||||
IOUtils.close(
|
||||
() -> {
|
||||
if (!closed.getAndSet(true)) {
|
||||
for(String fileName : cacheDirectory.listAll()) {
|
||||
unCache(fileName);
|
||||
}
|
||||
}
|
||||
},
|
||||
cacheDirectory,
|
||||
in);
|
||||
}
|
||||
|
||||
/** Subclass can override this to customize logic; return
|
||||
* true if this file should be written to the RAMDirectory. */
|
||||
* true if this file should be written to the RAM-based cache first. */
|
||||
protected boolean doCacheWrite(String name, IOContext context) {
|
||||
//System.out.println(Thread.currentThread().getName() + ": CACHE check merge=" + merge + " size=" + (merge==null ? 0 : merge.estimatedMergeBytes));
|
||||
|
||||
|
@ -220,7 +234,7 @@ public class NRTCachingDirectory extends FilterDirectory implements Accountable
|
|||
bytes = context.flushInfo.estimatedSegmentSize;
|
||||
}
|
||||
|
||||
return (bytes <= maxMergeSizeBytes) && (bytes + cache.ramBytesUsed()) <= maxCachedBytes;
|
||||
return (bytes <= maxMergeSizeBytes) && (bytes + cacheSize.get()) <= maxCachedBytes;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -236,11 +250,11 @@ public class NRTCachingDirectory extends FilterDirectory implements Accountable
|
|||
Directory first;
|
||||
Directory second;
|
||||
if (doCacheWrite(prefix, context)) {
|
||||
first = cache;
|
||||
first = cacheDirectory;
|
||||
second = in;
|
||||
} else {
|
||||
first = in;
|
||||
second = cache;
|
||||
second = cacheDirectory;
|
||||
}
|
||||
|
||||
IndexOutput out = null;
|
||||
|
@ -282,47 +296,27 @@ public class NRTCachingDirectory extends FilterDirectory implements Accountable
|
|||
}
|
||||
}
|
||||
|
||||
private final Object uncacheLock = new Object();
|
||||
|
||||
private void unCache(String fileName) throws IOException {
|
||||
// Only let one thread uncache at a time; this only
|
||||
// happens during commit() or close():
|
||||
synchronized(uncacheLock) {
|
||||
// Must sync here because other sync methods have
|
||||
// if (cache.fileNameExists(name)) { ... } else { ... }:
|
||||
synchronized (this) {
|
||||
if (VERBOSE) {
|
||||
System.out.println("nrtdir.unCache name=" + fileName);
|
||||
}
|
||||
if (!cache.fileNameExists(fileName)) {
|
||||
if (!cacheDirectory.fileExists(fileName)) {
|
||||
// Another thread beat us...
|
||||
return;
|
||||
}
|
||||
assert slowFileExists(in, fileName) == false: "fileName=" + fileName + " exists both in cache and in delegate";
|
||||
|
||||
final IOContext context = IOContext.DEFAULT;
|
||||
final IndexOutput out = in.createOutput(fileName, context);
|
||||
IndexInput in = null;
|
||||
try {
|
||||
in = cache.openInput(fileName, context);
|
||||
out.copyBytes(in, in.length());
|
||||
} finally {
|
||||
IOUtils.close(in, out);
|
||||
}
|
||||
|
||||
// Lock order: uncacheLock -> this
|
||||
synchronized(this) {
|
||||
// Must sync here because other sync methods have
|
||||
// if (cache.fileNameExists(name)) { ... } else { ... }:
|
||||
cache.deleteFile(fileName);
|
||||
}
|
||||
in.copyFrom(cacheDirectory, fileName, fileName, IOContext.DEFAULT);
|
||||
cacheSize.addAndGet(-cacheDirectory.fileLength(fileName));
|
||||
cacheDirectory.deleteFile(fileName);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long ramBytesUsed() {
|
||||
return cache.ramBytesUsed();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Accountable> getChildResources() {
|
||||
return Collections.singleton(Accountables.namedAccountable("cache", cache));
|
||||
return cacheSize.get();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,259 +0,0 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.store;
|
||||
|
||||
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.FileAlreadyExistsException;
|
||||
import java.nio.file.Files;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
import org.apache.lucene.index.IndexFileNames;
|
||||
import org.apache.lucene.util.Accountable;
|
||||
import org.apache.lucene.util.Accountables;
|
||||
|
||||
/**
|
||||
* A memory-resident {@link Directory} implementation. Locking
|
||||
* implementation is by default the {@link SingleInstanceLockFactory}.
|
||||
*
|
||||
* <p><b>Warning:</b> This class is not intended to work with huge
|
||||
* indexes. Everything beyond several hundred megabytes will waste
|
||||
* resources (GC cycles), because it uses an internal buffer size
|
||||
* of 1024 bytes, producing millions of {@code byte[1024]} arrays.
|
||||
* This class is optimized for small memory-resident indexes.
|
||||
* It also has bad concurrency on multithreaded environments.
|
||||
*
|
||||
* <p>It is recommended to materialize large indexes on disk and use
|
||||
* {@link MMapDirectory}, which is a high-performance directory
|
||||
* implementation working directly on the file system cache of the
|
||||
* operating system, so copying data to Java heap space is not useful.
|
||||
*
|
||||
* @deprecated This class uses inefficient synchronization and is discouraged
|
||||
* in favor of {@link MMapDirectory}. It will be removed in future versions
|
||||
* of Lucene.
|
||||
*/
|
||||
@Deprecated
|
||||
public class RAMDirectory extends BaseDirectory implements Accountable {
|
||||
protected final Map<String,RAMFile> fileMap = new ConcurrentHashMap<>();
|
||||
protected final AtomicLong sizeInBytes = new AtomicLong();
|
||||
|
||||
/** Used to generate temp file names in {@link #createTempOutput}. */
|
||||
private final AtomicLong nextTempFileCounter = new AtomicLong();
|
||||
|
||||
/** Constructs an empty {@link Directory}. */
|
||||
public RAMDirectory() {
|
||||
this(new SingleInstanceLockFactory());
|
||||
}
|
||||
|
||||
/** Constructs an empty {@link Directory} with the given {@link LockFactory}. */
|
||||
public RAMDirectory(LockFactory lockFactory) {
|
||||
super(lockFactory);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new <code>RAMDirectory</code> instance from a different
|
||||
* <code>Directory</code> implementation. This can be used to load
|
||||
* a disk-based index into memory.
|
||||
*
|
||||
* <p><b>Warning:</b> This class is not intended to work with huge
|
||||
* indexes. Everything beyond several hundred megabytes will waste
|
||||
* resources (GC cycles), because it uses an internal buffer size
|
||||
* of 1024 bytes, producing millions of {@code byte[1024]} arrays.
|
||||
* This class is optimized for small memory-resident indexes.
|
||||
* It also has bad concurrency on multithreaded environments.
|
||||
*
|
||||
* <p>For disk-based indexes it is recommended to use
|
||||
* {@link MMapDirectory}, which is a high-performance directory
|
||||
* implementation working directly on the file system cache of the
|
||||
* operating system, so copying data to Java heap space is not useful.
|
||||
*
|
||||
* <p>Note that the resulting <code>RAMDirectory</code> instance is fully
|
||||
* independent from the original <code>Directory</code> (it is a
|
||||
* complete copy). Any subsequent changes to the
|
||||
* original <code>Directory</code> will not be visible in the
|
||||
* <code>RAMDirectory</code> instance.
|
||||
*
|
||||
* @param dir a <code>Directory</code> value
|
||||
* @exception IOException if an error occurs
|
||||
*/
|
||||
public RAMDirectory(FSDirectory dir, IOContext context) throws IOException {
|
||||
this(dir, false, context);
|
||||
}
|
||||
|
||||
private RAMDirectory(FSDirectory dir, boolean closeDir, IOContext context) throws IOException {
|
||||
this();
|
||||
for (String file : dir.listAll()) {
|
||||
if (!Files.isDirectory(dir.getDirectory().resolve(file))) {
|
||||
copyFrom(dir, file, file, context);
|
||||
}
|
||||
}
|
||||
if (closeDir) {
|
||||
dir.close();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String[] listAll() {
|
||||
ensureOpen();
|
||||
// NOTE: this returns a "weakly consistent view". Unless we change Dir API, keep this,
|
||||
// and do not synchronize or anything stronger. it's great for testing!
|
||||
// NOTE: fileMap.keySet().toArray(new String[0]) is broken in non Sun JDKs,
|
||||
// and the code below is resilient to map changes during the array population.
|
||||
// NOTE: don't replace this with return names.toArray(new String[names.size()]);
|
||||
// or some files could be null at the end of the array if files are being deleted
|
||||
// concurrently
|
||||
Set<String> fileNames = fileMap.keySet();
|
||||
List<String> names = new ArrayList<>(fileNames.size());
|
||||
for (String name : fileNames) {
|
||||
names.add(name);
|
||||
}
|
||||
String[] namesArray = names.toArray(new String[names.size()]);
|
||||
Arrays.sort(namesArray);
|
||||
return namesArray;
|
||||
}
|
||||
|
||||
public final boolean fileNameExists(String name) {
|
||||
ensureOpen();
|
||||
return fileMap.containsKey(name);
|
||||
}
|
||||
|
||||
/** Returns the length in bytes of a file in the directory.
|
||||
* @throws IOException if the file does not exist
|
||||
*/
|
||||
@Override
|
||||
public final long fileLength(String name) throws IOException {
|
||||
ensureOpen();
|
||||
RAMFile file = fileMap.get(name);
|
||||
if (file == null) {
|
||||
throw new FileNotFoundException(name);
|
||||
}
|
||||
return file.getLength();
|
||||
}
|
||||
|
||||
/**
|
||||
* Return total size in bytes of all files in this directory. This is
|
||||
* currently quantized to RAMOutputStream.BUFFER_SIZE.
|
||||
*/
|
||||
@Override
|
||||
public final long ramBytesUsed() {
|
||||
ensureOpen();
|
||||
return sizeInBytes.get();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Accountable> getChildResources() {
|
||||
return Accountables.namedAccountables("file", fileMap);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deleteFile(String name) throws IOException {
|
||||
ensureOpen();
|
||||
RAMFile file = fileMap.remove(name);
|
||||
if (file != null) {
|
||||
file.directory = null;
|
||||
sizeInBytes.addAndGet(-file.sizeInBytes);
|
||||
} else {
|
||||
throw new FileNotFoundException(name);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexOutput createOutput(String name, IOContext context) throws IOException {
|
||||
ensureOpen();
|
||||
RAMFile file = newRAMFile();
|
||||
if (fileMap.putIfAbsent(name, file) != null) {
|
||||
throw new FileAlreadyExistsException(name);
|
||||
}
|
||||
return new RAMOutputStream(name, file, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexOutput createTempOutput(String prefix, String suffix, IOContext context) throws IOException {
|
||||
ensureOpen();
|
||||
|
||||
// Make the file first...
|
||||
RAMFile file = newRAMFile();
|
||||
|
||||
// ... then try to find a unique name for it:
|
||||
while (true) {
|
||||
String name = IndexFileNames.segmentFileName(prefix, suffix + "_" + Long.toString(nextTempFileCounter.getAndIncrement(), Character.MAX_RADIX), "tmp");
|
||||
if (fileMap.putIfAbsent(name, file) == null) {
|
||||
return new RAMOutputStream(name, file, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a new {@link RAMFile} for storing data. This method can be
|
||||
* overridden to return different {@link RAMFile} impls, that e.g. override
|
||||
* {@link RAMFile#newBuffer(int)}.
|
||||
*/
|
||||
protected RAMFile newRAMFile() {
|
||||
return new RAMFile(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void sync(Collection<String> names) throws IOException {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void rename(String source, String dest) throws IOException {
|
||||
ensureOpen();
|
||||
RAMFile file = fileMap.get(source);
|
||||
if (file == null) {
|
||||
throw new FileNotFoundException(source);
|
||||
}
|
||||
if (fileMap.putIfAbsent(dest, file) != null) {
|
||||
throw new FileAlreadyExistsException(dest);
|
||||
}
|
||||
if (!fileMap.remove(source, file)) {
|
||||
throw new IllegalStateException("file was unexpectedly replaced: " + source);
|
||||
}
|
||||
fileMap.remove(source);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void syncMetaData() throws IOException {
|
||||
// we are by definition not durable!
|
||||
}
|
||||
|
||||
/** Returns a stream reading an existing file. */
|
||||
@Override
|
||||
public IndexInput openInput(String name, IOContext context) throws IOException {
|
||||
ensureOpen();
|
||||
RAMFile file = fileMap.get(name);
|
||||
if (file == null) {
|
||||
throw new FileNotFoundException(name);
|
||||
}
|
||||
return new RAMInputStream(name, file);
|
||||
}
|
||||
|
||||
/** Closes the store to future operations, releasing associated memory. */
|
||||
@Override
|
||||
public void close() {
|
||||
isOpen = false;
|
||||
fileMap.clear();
|
||||
}
|
||||
}
|
|
@ -1,123 +0,0 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.store;
|
||||
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.apache.lucene.util.Accountable;
|
||||
|
||||
/**
|
||||
* Represents a file in RAM as a list of byte[] buffers.
|
||||
*
|
||||
* @lucene.internal
|
||||
* @deprecated This class uses inefficient synchronization and is discouraged
|
||||
* in favor of {@link MMapDirectory}. It will be removed in future versions
|
||||
* of Lucene.
|
||||
*/
|
||||
@Deprecated
|
||||
public class RAMFile implements Accountable {
|
||||
protected final ArrayList<byte[]> buffers = new ArrayList<>();
|
||||
long length;
|
||||
RAMDirectory directory;
|
||||
protected long sizeInBytes;
|
||||
|
||||
// File used as buffer, in no RAMDirectory
|
||||
public RAMFile() {}
|
||||
|
||||
RAMFile(RAMDirectory directory) {
|
||||
this.directory = directory;
|
||||
}
|
||||
|
||||
// For non-stream access from thread that might be concurrent with writing
|
||||
public synchronized long getLength() {
|
||||
return length;
|
||||
}
|
||||
|
||||
protected synchronized void setLength(long length) {
|
||||
this.length = length;
|
||||
}
|
||||
|
||||
protected final byte[] addBuffer(int size) {
|
||||
byte[] buffer = newBuffer(size);
|
||||
synchronized(this) {
|
||||
buffers.add(buffer);
|
||||
sizeInBytes += size;
|
||||
}
|
||||
|
||||
if (directory != null) {
|
||||
directory.sizeInBytes.getAndAdd(size);
|
||||
}
|
||||
return buffer;
|
||||
}
|
||||
|
||||
protected final synchronized byte[] getBuffer(int index) {
|
||||
return buffers.get(index);
|
||||
}
|
||||
|
||||
protected final synchronized int numBuffers() {
|
||||
return buffers.size();
|
||||
}
|
||||
|
||||
/**
|
||||
* Expert: allocate a new buffer.
|
||||
* Subclasses can allocate differently.
|
||||
* @param size size of allocated buffer.
|
||||
* @return allocated buffer.
|
||||
*/
|
||||
protected byte[] newBuffer(int size) {
|
||||
return new byte[size];
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized long ramBytesUsed() {
|
||||
return sizeInBytes;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return getClass().getSimpleName() + "(length=" + length + ")";
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int h = (int) (length ^ (length >>> 32));
|
||||
for (byte[] block : buffers) {
|
||||
h = 31 * h + Arrays.hashCode(block);
|
||||
}
|
||||
return h;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) return true;
|
||||
if (obj == null) return false;
|
||||
if (getClass() != obj.getClass()) return false;
|
||||
RAMFile other = (RAMFile) obj;
|
||||
if (length != other.length) return false;
|
||||
if (buffers.size() != other.buffers.size()) {
|
||||
return false;
|
||||
}
|
||||
for (int i = 0; i < buffers.size(); i++) {
|
||||
if (!Arrays.equals(buffers.get(i), other.buffers.get(i))) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
|
@ -1,182 +0,0 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.store;
|
||||
|
||||
|
||||
import java.io.EOFException;
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.apache.lucene.store.RAMOutputStream.BUFFER_SIZE;
|
||||
|
||||
/**
|
||||
* A memory-resident {@link IndexInput} implementation.
|
||||
*
|
||||
* @lucene.internal
|
||||
* @deprecated This class uses inefficient synchronization and is discouraged
|
||||
* in favor of {@link MMapDirectory}. It will be removed in future versions
|
||||
* of Lucene.
|
||||
*/
|
||||
@Deprecated
|
||||
public class RAMInputStream extends IndexInput implements Cloneable {
|
||||
|
||||
private final RAMFile file;
|
||||
private final long length;
|
||||
|
||||
private byte[] currentBuffer;
|
||||
private int currentBufferIndex;
|
||||
|
||||
private int bufferPosition;
|
||||
private int bufferLength;
|
||||
|
||||
public RAMInputStream(String name, RAMFile f) throws IOException {
|
||||
this(name, f, f.length);
|
||||
}
|
||||
|
||||
RAMInputStream(String name, RAMFile f, long length) throws IOException {
|
||||
super("RAMInputStream(name=" + name + ")");
|
||||
this.file = f;
|
||||
this.length = length;
|
||||
if (length/BUFFER_SIZE >= Integer.MAX_VALUE) {
|
||||
throw new IOException("RAMInputStream too large length=" + length + ": " + name);
|
||||
}
|
||||
|
||||
setCurrentBuffer();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
// nothing to do here
|
||||
}
|
||||
|
||||
@Override
|
||||
public long length() {
|
||||
return length;
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte readByte() throws IOException {
|
||||
if (bufferPosition == bufferLength) {
|
||||
nextBuffer();
|
||||
}
|
||||
if (currentBuffer == null) {
|
||||
throw new EOFException();
|
||||
} else {
|
||||
return currentBuffer[bufferPosition++];
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readBytes(byte[] b, int offset, int len) throws IOException {
|
||||
while (len > 0) {
|
||||
if (bufferPosition == bufferLength) {
|
||||
nextBuffer();
|
||||
}
|
||||
|
||||
if (currentBuffer == null) {
|
||||
throw new EOFException();
|
||||
}
|
||||
|
||||
int remainInBuffer = bufferLength - bufferPosition;
|
||||
int bytesToCopy = len < remainInBuffer ? len : remainInBuffer;
|
||||
System.arraycopy(currentBuffer, bufferPosition, b, offset, bytesToCopy);
|
||||
offset += bytesToCopy;
|
||||
len -= bytesToCopy;
|
||||
bufferPosition += bytesToCopy;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getFilePointer() {
|
||||
return (long) currentBufferIndex * BUFFER_SIZE + bufferPosition;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void seek(long pos) throws IOException {
|
||||
int newBufferIndex = (int) (pos / BUFFER_SIZE);
|
||||
|
||||
if (newBufferIndex != currentBufferIndex) {
|
||||
// we seek'd to a different buffer:
|
||||
currentBufferIndex = newBufferIndex;
|
||||
setCurrentBuffer();
|
||||
}
|
||||
|
||||
bufferPosition = (int) (pos % BUFFER_SIZE);
|
||||
|
||||
// This is not >= because seeking to exact end of file is OK: this is where
|
||||
// you'd also be if you did a readBytes of all bytes in the file
|
||||
if (getFilePointer() > length()) {
|
||||
throw new EOFException("seek beyond EOF: pos=" + getFilePointer() + " vs length=" + length() + ": " + this);
|
||||
}
|
||||
}
|
||||
|
||||
private void nextBuffer() throws IOException {
|
||||
// This is >= because we are called when there is at least 1 more byte to read:
|
||||
if (getFilePointer() >= length()) {
|
||||
throw new EOFException("cannot read another byte at EOF: pos=" + getFilePointer() + " vs length=" + length() + ": " + this);
|
||||
}
|
||||
currentBufferIndex++;
|
||||
setCurrentBuffer();
|
||||
assert currentBuffer != null;
|
||||
bufferPosition = 0;
|
||||
}
|
||||
|
||||
private final void setCurrentBuffer() throws IOException {
|
||||
if (currentBufferIndex < file.numBuffers()) {
|
||||
currentBuffer = file.getBuffer(currentBufferIndex);
|
||||
assert currentBuffer != null;
|
||||
long bufferStart = (long) BUFFER_SIZE * (long) currentBufferIndex;
|
||||
bufferLength = (int) Math.min(BUFFER_SIZE, length - bufferStart);
|
||||
} else {
|
||||
currentBuffer = null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexInput slice(String sliceDescription, final long offset, final long sliceLength) throws IOException {
|
||||
if (offset < 0 || sliceLength < 0 || offset + sliceLength > this.length) {
|
||||
throw new IllegalArgumentException("slice() " + sliceDescription + " out of bounds: " + this);
|
||||
}
|
||||
return new RAMInputStream(getFullSliceDescription(sliceDescription), file, offset + sliceLength) {
|
||||
{
|
||||
seek(0L);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void seek(long pos) throws IOException {
|
||||
if (pos < 0L) {
|
||||
throw new IllegalArgumentException("Seeking to negative position: " + this);
|
||||
}
|
||||
super.seek(pos + offset);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getFilePointer() {
|
||||
return super.getFilePointer() - offset;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long length() {
|
||||
return sliceLength;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexInput slice(String sliceDescription, long ofs, long len) throws IOException {
|
||||
return super.slice(sliceDescription, offset + ofs, len);
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
|
@ -1,213 +0,0 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.store;
|
||||
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.zip.CRC32;
|
||||
import java.util.zip.Checksum;
|
||||
|
||||
import org.apache.lucene.util.Accountable;
|
||||
import org.apache.lucene.util.Accountables;
|
||||
|
||||
/**
|
||||
* A memory-resident {@link IndexOutput} implementation.
|
||||
*
|
||||
* @lucene.internal
|
||||
* @deprecated This class uses inefficient synchronization and is discouraged
|
||||
* in favor of {@link MMapDirectory}. It will be removed in future versions
|
||||
* of Lucene.
|
||||
*/
|
||||
@Deprecated
|
||||
public class RAMOutputStream extends IndexOutput implements Accountable {
|
||||
static final int BUFFER_SIZE = 1024;
|
||||
|
||||
private final RAMFile file;
|
||||
|
||||
private byte[] currentBuffer;
|
||||
private int currentBufferIndex;
|
||||
|
||||
private int bufferPosition;
|
||||
private long bufferStart;
|
||||
private int bufferLength;
|
||||
|
||||
private final Checksum crc;
|
||||
|
||||
/** Construct an empty output buffer. */
|
||||
public RAMOutputStream() {
|
||||
this("noname", new RAMFile(), false);
|
||||
}
|
||||
|
||||
/** Creates this, with no name. */
|
||||
public RAMOutputStream(RAMFile f, boolean checksum) {
|
||||
this("noname", f, checksum);
|
||||
}
|
||||
|
||||
/** Creates this, with specified name. */
|
||||
public RAMOutputStream(String name, RAMFile f, boolean checksum) {
|
||||
super("RAMOutputStream(name=\"" + name + "\")", name);
|
||||
file = f;
|
||||
|
||||
// make sure that we switch to the
|
||||
// first needed buffer lazily
|
||||
currentBufferIndex = -1;
|
||||
currentBuffer = null;
|
||||
if (checksum) {
|
||||
crc = new BufferedChecksum(new CRC32());
|
||||
} else {
|
||||
crc = null;
|
||||
}
|
||||
}
|
||||
|
||||
/** Copy the current contents of this buffer to the provided {@link DataOutput}. */
|
||||
public void writeTo(DataOutput out) throws IOException {
|
||||
flush();
|
||||
final long end = file.length;
|
||||
long pos = 0;
|
||||
int buffer = 0;
|
||||
while (pos < end) {
|
||||
int length = BUFFER_SIZE;
|
||||
long nextPos = pos + length;
|
||||
if (nextPos > end) { // at the last buffer
|
||||
length = (int)(end - pos);
|
||||
}
|
||||
out.writeBytes(file.getBuffer(buffer++), length);
|
||||
pos = nextPos;
|
||||
}
|
||||
}
|
||||
|
||||
/** Copy the current contents of this buffer to output
|
||||
* byte array */
|
||||
public void writeTo(byte[] bytes, int offset) throws IOException {
|
||||
flush();
|
||||
final long end = file.length;
|
||||
long pos = 0;
|
||||
int buffer = 0;
|
||||
int bytesUpto = offset;
|
||||
while (pos < end) {
|
||||
int length = BUFFER_SIZE;
|
||||
long nextPos = pos + length;
|
||||
if (nextPos > end) { // at the last buffer
|
||||
length = (int)(end - pos);
|
||||
}
|
||||
System.arraycopy(file.getBuffer(buffer++), 0, bytes, bytesUpto, length);
|
||||
bytesUpto += length;
|
||||
pos = nextPos;
|
||||
}
|
||||
}
|
||||
|
||||
/** Resets this to an empty file. */
|
||||
public void reset() {
|
||||
currentBuffer = null;
|
||||
currentBufferIndex = -1;
|
||||
bufferPosition = 0;
|
||||
bufferStart = 0;
|
||||
bufferLength = 0;
|
||||
file.setLength(0);
|
||||
if (crc != null) {
|
||||
crc.reset();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
flush();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeByte(byte b) throws IOException {
|
||||
if (bufferPosition == bufferLength) {
|
||||
currentBufferIndex++;
|
||||
switchCurrentBuffer();
|
||||
}
|
||||
if (crc != null) {
|
||||
crc.update(b);
|
||||
}
|
||||
currentBuffer[bufferPosition++] = b;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeBytes(byte[] b, int offset, int len) throws IOException {
|
||||
assert b != null;
|
||||
if (crc != null) {
|
||||
crc.update(b, offset, len);
|
||||
}
|
||||
while (len > 0) {
|
||||
if (bufferPosition == bufferLength) {
|
||||
currentBufferIndex++;
|
||||
switchCurrentBuffer();
|
||||
}
|
||||
|
||||
int remainInBuffer = currentBuffer.length - bufferPosition;
|
||||
int bytesToCopy = len < remainInBuffer ? len : remainInBuffer;
|
||||
System.arraycopy(b, offset, currentBuffer, bufferPosition, bytesToCopy);
|
||||
offset += bytesToCopy;
|
||||
len -= bytesToCopy;
|
||||
bufferPosition += bytesToCopy;
|
||||
}
|
||||
}
|
||||
|
||||
private final void switchCurrentBuffer() {
|
||||
if (currentBufferIndex == file.numBuffers()) {
|
||||
currentBuffer = file.addBuffer(BUFFER_SIZE);
|
||||
} else {
|
||||
currentBuffer = file.getBuffer(currentBufferIndex);
|
||||
}
|
||||
bufferPosition = 0;
|
||||
bufferStart = (long) BUFFER_SIZE * (long) currentBufferIndex;
|
||||
bufferLength = currentBuffer.length;
|
||||
}
|
||||
|
||||
private void setFileLength() {
|
||||
long pointer = bufferStart + bufferPosition;
|
||||
if (pointer > file.length) {
|
||||
file.setLength(pointer);
|
||||
}
|
||||
}
|
||||
|
||||
/** Forces any buffered output to be written. */
|
||||
protected void flush() throws IOException {
|
||||
setFileLength();
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getFilePointer() {
|
||||
return currentBufferIndex < 0 ? 0 : bufferStart + bufferPosition;
|
||||
}
|
||||
|
||||
/** Returns byte usage of all buffers. */
|
||||
@Override
|
||||
public long ramBytesUsed() {
|
||||
return (long) file.numBuffers() * (long) BUFFER_SIZE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Accountable> getChildResources() {
|
||||
return Collections.singleton(Accountables.namedAccountable("file", file));
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getChecksum() throws IOException {
|
||||
if (crc == null) {
|
||||
throw new IllegalStateException("internal RAMOutputStream created with checksum disabled");
|
||||
} else {
|
||||
return crc.getValue();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -25,8 +25,7 @@ import java.util.HashSet;
|
|||
* meaning all locking will take place through this one instance.
|
||||
* Only use this {@link LockFactory} when you are certain all
|
||||
* IndexWriters for a given index are running
|
||||
* against a single shared in-process Directory instance. This is
|
||||
* currently the default locking for RAMDirectory.
|
||||
* against a single shared in-process Directory instance.
|
||||
*
|
||||
* @see LockFactory
|
||||
*/
|
||||
|
|
|
@ -46,7 +46,6 @@ import org.apache.lucene.store.Directory;
|
|||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.store.FileSwitchDirectory;
|
||||
import org.apache.lucene.store.FilterDirectory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
|
||||
/** This class emulates the new Java 7 "Try-With-Resources" statement.
|
||||
* Remove once Lucene is on Java 7.
|
||||
|
@ -487,7 +486,7 @@ public final class IOUtils {
|
|||
FileSwitchDirectory fsd = (FileSwitchDirectory) dir;
|
||||
// Spinning is contagious:
|
||||
return spins(fsd.getPrimaryDir()) || spins(fsd.getSecondaryDir());
|
||||
} else if (dir instanceof RAMDirectory || dir instanceof ByteBuffersDirectory) {
|
||||
} else if (dir instanceof ByteBuffersDirectory) {
|
||||
return false;
|
||||
} else if (dir instanceof FSDirectory) {
|
||||
return spins(((FSDirectory) dir).getDirectory());
|
||||
|
|
|
@ -34,9 +34,9 @@ import org.apache.lucene.index.MergePolicy.OneMerge;
|
|||
import org.apache.lucene.index.MergePolicy;
|
||||
import org.apache.lucene.index.MergeScheduler;
|
||||
import org.apache.lucene.index.MergeTrigger;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.InfoStream;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
@ -175,7 +175,7 @@ public class TestMergeSchedulerExternal extends LuceneTestCase {
|
|||
// we don't really need to execute anything, just to make sure the custom MS
|
||||
// compiles. But ensure that it can be used as well, e.g., no other hidden
|
||||
// dependencies or something. Therefore, don't use any random API !
|
||||
Directory dir = new RAMDirectory();
|
||||
Directory dir = new ByteBuffersDirectory();
|
||||
IndexWriterConfig conf = new IndexWriterConfig(null);
|
||||
conf.setMergeScheduler(new ReportingMergeScheduler());
|
||||
IndexWriter writer = new IndexWriter(dir, conf);
|
||||
|
@ -187,5 +187,4 @@ public class TestMergeSchedulerExternal extends LuceneTestCase {
|
|||
writer.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -23,11 +23,11 @@ import static org.apache.lucene.codecs.lucene50.ForUtil.MAX_ENCODED_SIZE;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.store.IndexInput;
|
||||
import org.apache.lucene.store.IndexOutput;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.ArrayUtil;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.packed.PackedInts;
|
||||
|
@ -55,7 +55,7 @@ public class TestForUtil extends LuceneTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
final Directory d = new RAMDirectory();
|
||||
final Directory d = new ByteBuffersDirectory();
|
||||
final long endPointer;
|
||||
|
||||
{
|
||||
|
|
|
@ -61,10 +61,9 @@ import org.apache.lucene.index.Term;
|
|||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.index.TermsEnum.SeekStatus;
|
||||
import org.apache.lucene.store.ByteBuffersDataInput;
|
||||
import org.apache.lucene.store.ByteBuffersDataOutput;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMFile;
|
||||
import org.apache.lucene.store.RAMInputStream;
|
||||
import org.apache.lucene.store.RAMOutputStream;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
|
@ -445,8 +444,7 @@ public class TestLucene80DocValuesFormat extends BaseCompressingDocValuesFormatT
|
|||
for (int maxDoc = frontier - 1; maxDoc <= frontier + 1; ++maxDoc) {
|
||||
final Directory dir = newDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setMergePolicy(newLogMergePolicy()));
|
||||
RAMFile buffer = new RAMFile();
|
||||
RAMOutputStream out = new RAMOutputStream(buffer, false);
|
||||
ByteBuffersDataOutput out = new ByteBuffersDataOutput();
|
||||
Document doc = new Document();
|
||||
SortedSetDocValuesField field1 = new SortedSetDocValuesField("sset", new BytesRef());
|
||||
doc.add(field1);
|
||||
|
@ -465,7 +463,7 @@ public class TestLucene80DocValuesFormat extends BaseCompressingDocValuesFormatT
|
|||
out.writeBytes(ref.bytes, ref.offset, ref.length);
|
||||
}
|
||||
}
|
||||
out.close();
|
||||
|
||||
w.forceMerge(1);
|
||||
DirectoryReader r = DirectoryReader.open(w);
|
||||
w.close();
|
||||
|
@ -473,21 +471,20 @@ public class TestLucene80DocValuesFormat extends BaseCompressingDocValuesFormatT
|
|||
assertEquals(maxDoc, sr.maxDoc());
|
||||
SortedSetDocValues values = sr.getSortedSetDocValues("sset");
|
||||
assertNotNull(values);
|
||||
try (RAMInputStream in = new RAMInputStream("", buffer)) {
|
||||
BytesRefBuilder b = new BytesRefBuilder();
|
||||
for (int i = 0; i < maxDoc; ++i) {
|
||||
assertEquals(i, values.nextDoc());
|
||||
final int numValues = in.readVInt();
|
||||
ByteBuffersDataInput in = out.toDataInput();
|
||||
BytesRefBuilder b = new BytesRefBuilder();
|
||||
for (int i = 0; i < maxDoc; ++i) {
|
||||
assertEquals(i, values.nextDoc());
|
||||
final int numValues = in.readVInt();
|
||||
|
||||
for (int j = 0; j < numValues; ++j) {
|
||||
b.setLength(in.readVInt());
|
||||
b.grow(b.length());
|
||||
in.readBytes(b.bytes(), 0, b.length());
|
||||
assertEquals(b.get(), values.lookupOrd(values.nextOrd()));
|
||||
}
|
||||
|
||||
assertEquals(SortedSetDocValues.NO_MORE_ORDS, values.nextOrd());
|
||||
for (int j = 0; j < numValues; ++j) {
|
||||
b.setLength(in.readVInt());
|
||||
b.grow(b.length());
|
||||
in.readBytes(b.bytes(), 0, b.length());
|
||||
assertEquals(b.get(), values.lookupOrd(values.nextOrd()));
|
||||
}
|
||||
|
||||
assertEquals(SortedSetDocValues.NO_MORE_ORDS, values.nextOrd());
|
||||
}
|
||||
r.close();
|
||||
dir.close();
|
||||
|
@ -500,8 +497,8 @@ public class TestLucene80DocValuesFormat extends BaseCompressingDocValuesFormatT
|
|||
for (int maxDoc = frontier - 1; maxDoc <= frontier + 1; ++maxDoc) {
|
||||
final Directory dir = newDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setMergePolicy(newLogMergePolicy()));
|
||||
RAMFile buffer = new RAMFile();
|
||||
RAMOutputStream out = new RAMOutputStream(buffer, false);
|
||||
ByteBuffersDataOutput buffer = new ByteBuffersDataOutput();
|
||||
|
||||
Document doc = new Document();
|
||||
SortedNumericDocValuesField field1 = new SortedNumericDocValuesField("snum", 0L);
|
||||
doc.add(field1);
|
||||
|
@ -513,10 +510,10 @@ public class TestLucene80DocValuesFormat extends BaseCompressingDocValuesFormatT
|
|||
field1.setLongValue(s1);
|
||||
field2.setLongValue(s2);
|
||||
w.addDocument(doc);
|
||||
out.writeVLong(Math.min(s1, s2));
|
||||
out.writeVLong(Math.max(s1, s2));
|
||||
buffer.writeVLong(Math.min(s1, s2));
|
||||
buffer.writeVLong(Math.max(s1, s2));
|
||||
}
|
||||
out.close();
|
||||
|
||||
w.forceMerge(1);
|
||||
DirectoryReader r = DirectoryReader.open(w);
|
||||
w.close();
|
||||
|
@ -524,13 +521,12 @@ public class TestLucene80DocValuesFormat extends BaseCompressingDocValuesFormatT
|
|||
assertEquals(maxDoc, sr.maxDoc());
|
||||
SortedNumericDocValues values = sr.getSortedNumericDocValues("snum");
|
||||
assertNotNull(values);
|
||||
try (RAMInputStream in = new RAMInputStream("", buffer)) {
|
||||
for (int i = 0; i < maxDoc; ++i) {
|
||||
assertEquals(i, values.nextDoc());
|
||||
assertEquals(2, values.docValueCount());
|
||||
assertEquals(in.readVLong(), values.nextValue());
|
||||
assertEquals(in.readVLong(), values.nextValue());
|
||||
}
|
||||
ByteBuffersDataInput dataInput = buffer.toDataInput();
|
||||
for (int i = 0; i < maxDoc; ++i) {
|
||||
assertEquals(i, values.nextDoc());
|
||||
assertEquals(2, values.docValueCount());
|
||||
assertEquals(dataInput.readVLong(), values.nextValue());
|
||||
assertEquals(dataInput.readVLong(), values.nextValue());
|
||||
}
|
||||
r.close();
|
||||
dir.close();
|
||||
|
|
|
@ -43,10 +43,11 @@ import org.apache.lucene.search.Sort;
|
|||
import org.apache.lucene.search.SortField;
|
||||
import org.apache.lucene.store.AlreadyClosedException;
|
||||
import org.apache.lucene.store.BaseDirectoryWrapper;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.LockObtainFailedException;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
|
@ -669,7 +670,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
|
||||
public RunAddIndexesThreads(int numCopy) throws Throwable {
|
||||
NUM_COPY = numCopy;
|
||||
dir = new MockDirectoryWrapper(random(), new RAMDirectory());
|
||||
dir = new MockDirectoryWrapper(random(), new ByteBuffersDirectory());
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random()))
|
||||
.setMaxBufferedDocs(2));
|
||||
for (int i = 0; i < NUM_INIT_DOCS; i++)
|
||||
|
@ -1103,7 +1104,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
public void testNonCFSLeftovers() throws Exception {
|
||||
Directory[] dirs = new Directory[2];
|
||||
for (int i = 0; i < dirs.length; i++) {
|
||||
dirs[i] = new RAMDirectory();
|
||||
dirs[i] = new ByteBuffersDirectory();
|
||||
IndexWriter w = new IndexWriter(dirs[i], new IndexWriterConfig(new MockAnalyzer(random())));
|
||||
Document d = new Document();
|
||||
FieldType customType = new FieldType(TextField.TYPE_STORED);
|
||||
|
@ -1115,7 +1116,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
|
||||
DirectoryReader[] readers = new DirectoryReader[] { DirectoryReader.open(dirs[0]), DirectoryReader.open(dirs[1]) };
|
||||
|
||||
MockDirectoryWrapper dir = new MockDirectoryWrapper(random(), new RAMDirectory());
|
||||
MockDirectoryWrapper dir = new MockDirectoryWrapper(random(), new ByteBuffersDirectory());
|
||||
IndexWriterConfig conf = new IndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy(true));
|
||||
MergePolicy lmp = conf.getMergePolicy();
|
||||
// Force creation of CFS:
|
||||
|
|
|
@ -24,12 +24,12 @@ import java.util.Collections;
|
|||
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.store.BaseDirectoryWrapper;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.store.IndexInput;
|
||||
import org.apache.lucene.store.IndexOutput;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.LineFileDocs;
|
||||
import org.apache.lucene.util.LuceneTestCase.SuppressFileSystems;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
@ -87,7 +87,7 @@ public class TestAllFilesCheckIndexHeader extends LuceneTestCase {
|
|||
|
||||
private void checkOneFile(Directory dir, String victim) throws IOException {
|
||||
// use ramdir explicit, as we do evil things like try to generate broken files, deletes must work.
|
||||
try (BaseDirectoryWrapper dirCopy = new MockDirectoryWrapper(random(), new RAMDirectory())) {
|
||||
try (BaseDirectoryWrapper dirCopy = new MockDirectoryWrapper(random(), new ByteBuffersDirectory())) {
|
||||
dirCopy.setCheckIndexOnClose(false);
|
||||
|
||||
long victimLength = dir.fileLength(victim);
|
||||
|
|
|
@ -163,19 +163,16 @@ public class TestAtomicUpdate extends LuceneTestCase {
|
|||
//System.out.println("Searcher 2: " + searcherThread2.count + " searchers created");
|
||||
}
|
||||
|
||||
/*
|
||||
Run above stress test against RAMDirectory and then
|
||||
FSDirectory.
|
||||
*/
|
||||
/* */
|
||||
public void testAtomicUpdates() throws Exception {
|
||||
Directory directory;
|
||||
|
||||
// First in a RAM directory:
|
||||
directory = new MockDirectoryWrapper(random(), new RAMDirectory());
|
||||
// run against a random directory.
|
||||
directory = new MockDirectoryWrapper(random(), new ByteBuffersDirectory());
|
||||
runTest(directory);
|
||||
directory.close();
|
||||
|
||||
// Second in an FSDirectory:
|
||||
// then against an FSDirectory.
|
||||
Path dirPath = createTempDir("lucene.test.atomic");
|
||||
directory = newFSDirectory(dirPath);
|
||||
runTest(directory);
|
||||
|
|
|
@ -39,10 +39,10 @@ import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
|||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper.FakeIOException;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
|
@ -688,7 +688,7 @@ public class TestDirectoryReaderReopen extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testNPEAfterInvalidReindex1() throws Exception {
|
||||
Directory dir = new RAMDirectory();
|
||||
Directory dir = new ByteBuffersDirectory();
|
||||
|
||||
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE));
|
||||
Document doc = new Document();
|
||||
|
@ -735,7 +735,7 @@ public class TestDirectoryReaderReopen extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testNPEAfterInvalidReindex2() throws Exception {
|
||||
Directory dir = new RAMDirectory();
|
||||
Directory dir = new ByteBuffersDirectory();
|
||||
|
||||
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE));
|
||||
Document doc = new Document();
|
||||
|
@ -974,7 +974,7 @@ public class TestDirectoryReaderReopen extends LuceneTestCase {
|
|||
// LUCENE-5931: we make a "best effort" to catch this abuse and throw a clear(er)
|
||||
// exception than what would otherwise look like hard to explain index corruption during searching
|
||||
public void testDeleteIndexFilesWhileReaderStillOpen() throws Exception {
|
||||
RAMDirectory dir = new RAMDirectory();
|
||||
Directory dir = new ByteBuffersDirectory();
|
||||
IndexWriter w = new IndexWriter(dir,
|
||||
new IndexWriterConfig(new MockAnalyzer(random())));
|
||||
Document doc = new Document();
|
||||
|
|
|
@ -80,6 +80,7 @@ import org.apache.lucene.search.TermQuery;
|
|||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.store.AlreadyClosedException;
|
||||
import org.apache.lucene.store.BaseDirectoryWrapper;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.store.FilterDirectory;
|
||||
|
@ -91,7 +92,6 @@ import org.apache.lucene.store.MMapDirectory;
|
|||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.store.NIOFSDirectory;
|
||||
import org.apache.lucene.store.NoLockFactory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.store.SimpleFSDirectory;
|
||||
import org.apache.lucene.store.SimpleFSLockFactory;
|
||||
import org.apache.lucene.util.Bits;
|
||||
|
@ -869,7 +869,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
this.random = new Random(random().nextLong());
|
||||
// make a little directory for addIndexes
|
||||
// LUCENE-2239: won't work with NIOFS/MMAP
|
||||
adder = new MockDirectoryWrapper(random, new RAMDirectory());
|
||||
adder = new MockDirectoryWrapper(random, new ByteBuffersDirectory());
|
||||
IndexWriterConfig conf = newIndexWriterConfig(random, new MockAnalyzer(random));
|
||||
if (conf.getMergeScheduler() instanceof ConcurrentMergeScheduler) {
|
||||
conf.setMergeScheduler(new SuppressingConcurrentMergeScheduler() {
|
||||
|
@ -910,7 +910,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
@Override
|
||||
public void run() {
|
||||
// LUCENE-2239: won't work with NIOFS/MMAP
|
||||
MockDirectoryWrapper dir = new MockDirectoryWrapper(random, new RAMDirectory());
|
||||
MockDirectoryWrapper dir = new MockDirectoryWrapper(random, new ByteBuffersDirectory());
|
||||
|
||||
// open/close slowly sometimes
|
||||
dir.setUseSlowOpenClosers(true);
|
||||
|
@ -1596,7 +1596,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
public void testDeleteAllNRTLeftoverFiles() throws Exception {
|
||||
|
||||
MockDirectoryWrapper d = new MockDirectoryWrapper(random(), new RAMDirectory());
|
||||
MockDirectoryWrapper d = new MockDirectoryWrapper(random(), new ByteBuffersDirectory());
|
||||
IndexWriter w = new IndexWriter(d, new IndexWriterConfig(new MockAnalyzer(random())));
|
||||
Document doc = new Document();
|
||||
for(int i = 0; i < 20; i++) {
|
||||
|
@ -1618,7 +1618,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testNRTReaderVersion() throws Exception {
|
||||
Directory d = new MockDirectoryWrapper(random(), new RAMDirectory());
|
||||
Directory d = new MockDirectoryWrapper(random(), new ByteBuffersDirectory());
|
||||
IndexWriter w = new IndexWriter(d, new IndexWriterConfig(new MockAnalyzer(random())));
|
||||
Document doc = new Document();
|
||||
doc.add(newStringField("id", "0", Field.Store.YES));
|
||||
|
|
|
@ -52,13 +52,14 @@ import org.apache.lucene.search.IndexSearcher;
|
|||
import org.apache.lucene.search.PhraseQuery;
|
||||
import org.apache.lucene.store.AlreadyClosedException;
|
||||
import org.apache.lucene.store.BaseDirectoryWrapper;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.FilterDirectory;
|
||||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.store.IndexInput;
|
||||
import org.apache.lucene.store.IndexOutput;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper.FakeIOException;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.IOSupplier;
|
||||
|
@ -1713,9 +1714,15 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
|||
|
||||
// TODO: we could also check isValid, to catch "broken" bytesref values, might be too much?
|
||||
|
||||
static class UOEDirectory extends RAMDirectory {
|
||||
static class UOEDirectory extends FilterDirectory {
|
||||
boolean doFail = false;
|
||||
|
||||
/**
|
||||
*/
|
||||
protected UOEDirectory() {
|
||||
super(new ByteBuffersDirectory());
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexInput openInput(String name, IOContext context) throws IOException {
|
||||
if (doFail && name.startsWith("segments_")) {
|
||||
|
|
|
@ -32,9 +32,9 @@ import org.apache.lucene.search.IndexSearcher;
|
|||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.store.AlreadyClosedException;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.IOSupplier;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
|
@ -62,7 +62,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
|
|||
if (VERBOSE) {
|
||||
System.out.println("TEST: cycle: diskFree=" + diskFree);
|
||||
}
|
||||
MockDirectoryWrapper dir = new MockDirectoryWrapper(random(), new RAMDirectory());
|
||||
MockDirectoryWrapper dir = new MockDirectoryWrapper(random(), new ByteBuffersDirectory());
|
||||
dir.setMaxSizeInBytes(diskFree);
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
|
||||
MergeScheduler ms = writer.getConfig().getMergeScheduler();
|
||||
|
|
|
@ -33,10 +33,10 @@ import org.apache.lucene.search.IndexSearcher;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.store.AlreadyClosedException;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper.FakeIOException;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.InfoStream;
|
||||
|
@ -1111,7 +1111,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
/** Make sure if all we do is open NRT reader against
|
||||
* writer, we don't see merge starvation. */
|
||||
public void testTooManySegments() throws Exception {
|
||||
Directory dir = getAssertNoDeletesDirectory(new RAMDirectory());
|
||||
Directory dir = getAssertNoDeletesDirectory(new ByteBuffersDirectory());
|
||||
// Don't use newIndexWriterConfig, because we need a
|
||||
// "sane" mergePolicy:
|
||||
IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
|
||||
|
|
|
@ -25,11 +25,11 @@ import org.apache.lucene.document.Field;
|
|||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.PhraseQuery;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.store.IndexInput;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
|
@ -73,7 +73,7 @@ public class TestLazyProxSkipping extends LuceneTestCase {
|
|||
return new TokenStreamComponents(new MockTokenizer(MockTokenizer.WHITESPACE, true));
|
||||
}
|
||||
};
|
||||
Directory directory = new SeekCountingDirectory(new RAMDirectory());
|
||||
Directory directory = new SeekCountingDirectory(new ByteBuffersDirectory());
|
||||
// note: test explicitly disables payloads
|
||||
IndexWriter writer = new IndexWriter(
|
||||
directory,
|
||||
|
|
|
@ -24,11 +24,11 @@ import org.apache.lucene.analysis.*;
|
|||
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.store.IndexInput;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
|
@ -44,8 +44,8 @@ import org.junit.Before;
|
|||
*/
|
||||
public class TestMultiLevelSkipList extends LuceneTestCase {
|
||||
|
||||
class CountingRAMDirectory extends MockDirectoryWrapper {
|
||||
public CountingRAMDirectory(Directory delegate) {
|
||||
class CountingDirectory extends MockDirectoryWrapper {
|
||||
public CountingDirectory(Directory delegate) {
|
||||
super(random(), delegate);
|
||||
}
|
||||
|
||||
|
@ -66,7 +66,7 @@ public class TestMultiLevelSkipList extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testSimpleSkip() throws IOException {
|
||||
Directory dir = new CountingRAMDirectory(new RAMDirectory());
|
||||
Directory dir = new CountingDirectory(new ByteBuffersDirectory());
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new PayloadAnalyzer())
|
||||
.setCodec(TestUtil.alwaysPostingsFormat(TestUtil.getDefaultPostingsFormat()))
|
||||
.setMergePolicy(newLogMergePolicy()));
|
||||
|
|
|
@ -27,8 +27,8 @@ import org.apache.lucene.codecs.FieldsProducer;
|
|||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.StringField;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.Accountable;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
|
@ -38,7 +38,7 @@ public class TestMultiTermsEnum extends LuceneTestCase {
|
|||
|
||||
// LUCENE-6826
|
||||
public void testNoTermsInField() throws Exception {
|
||||
Directory directory = new RAMDirectory();
|
||||
Directory directory = new ByteBuffersDirectory();
|
||||
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(new MockAnalyzer(random())));
|
||||
Document document = new Document();
|
||||
document.add(new StringField("deleted", "0", Field.Store.YES));
|
||||
|
@ -47,7 +47,7 @@ public class TestMultiTermsEnum extends LuceneTestCase {
|
|||
DirectoryReader reader = DirectoryReader.open(writer);
|
||||
writer.close();
|
||||
|
||||
Directory directory2 = new RAMDirectory();
|
||||
Directory directory2 = new ByteBuffersDirectory();
|
||||
writer = new IndexWriter(directory2, new IndexWriterConfig(new MockAnalyzer(random())));
|
||||
|
||||
List<LeafReaderContext> leaves = reader.leaves();
|
||||
|
|
|
@ -120,7 +120,7 @@ public class TestPayloads extends LuceneTestCase {
|
|||
ram.close();
|
||||
}
|
||||
|
||||
// Tests if payloads are correctly stored and loaded using both RamDirectory and FSDirectory
|
||||
// Tests if payloads are correctly stored and loaded.
|
||||
public void testPayloadsEncoding() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
performTest(dir);
|
||||
|
|
|
@ -22,8 +22,9 @@ import java.util.Collections;
|
|||
import java.util.HashMap;
|
||||
|
||||
import org.apache.lucene.codecs.Codec;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.StringHelper;
|
||||
|
@ -37,7 +38,7 @@ public class TestPendingDeletes extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testDeleteDoc() throws IOException {
|
||||
RAMDirectory dir = new RAMDirectory();
|
||||
Directory dir = new ByteBuffersDirectory();
|
||||
SegmentInfo si = new SegmentInfo(dir, Version.LATEST, Version.LATEST, "test", 10, false, Codec.getDefault(),
|
||||
Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
|
||||
SegmentCommitInfo commitInfo = new SegmentCommitInfo(si, 0, 0, -1, -1, -1);
|
||||
|
@ -71,7 +72,7 @@ public class TestPendingDeletes extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testWriteLiveDocs() throws IOException {
|
||||
RAMDirectory dir = new RAMDirectory();
|
||||
Directory dir = new ByteBuffersDirectory();
|
||||
SegmentInfo si = new SegmentInfo(dir, Version.LATEST, Version.LATEST, "test", 6, false, Codec.getDefault(),
|
||||
Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
|
||||
SegmentCommitInfo commitInfo = new SegmentCommitInfo(si, 0, 0, -1, -1, -1);
|
||||
|
@ -128,7 +129,7 @@ public class TestPendingDeletes extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testIsFullyDeleted() throws IOException {
|
||||
RAMDirectory dir = new RAMDirectory();
|
||||
Directory dir = new ByteBuffersDirectory();
|
||||
SegmentInfo si = new SegmentInfo(dir, Version.LATEST, Version.LATEST, "test", 3, false, Codec.getDefault(),
|
||||
Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
|
||||
SegmentCommitInfo commitInfo = new SegmentCommitInfo(si, 0, 0, -1, -1, -1);
|
||||
|
|
|
@ -29,8 +29,8 @@ import org.apache.lucene.document.Field;
|
|||
import org.apache.lucene.document.NumericDocValuesField;
|
||||
import org.apache.lucene.document.StringField;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
|
@ -147,7 +147,7 @@ public class TestPendingSoftDeletes extends TestPendingDeletes {
|
|||
}
|
||||
|
||||
public void testApplyUpdates() throws IOException {
|
||||
RAMDirectory dir = new RAMDirectory();
|
||||
Directory dir = new ByteBuffersDirectory();
|
||||
SegmentInfo si = new SegmentInfo(dir, Version.LATEST, Version.LATEST, "test", 10, false, Codec.getDefault(),
|
||||
Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
|
||||
SegmentCommitInfo commitInfo = new SegmentCommitInfo(si, 0, 0, -1, -1, -1);
|
||||
|
|
|
@ -23,9 +23,9 @@ import java.util.Random;
|
|||
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.ArrayUtil;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
|
@ -35,7 +35,7 @@ import org.apache.lucene.util.TestUtil;
|
|||
public class TestPerSegmentDeletes extends LuceneTestCase {
|
||||
public void testDeletes1() throws Exception {
|
||||
//IndexWriter.debug2 = System.out;
|
||||
Directory dir = new MockDirectoryWrapper(new Random(random().nextLong()), new RAMDirectory());
|
||||
Directory dir = new MockDirectoryWrapper(new Random(random().nextLong()), new ByteBuffersDirectory());
|
||||
IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
|
||||
iwc.setMergeScheduler(new SerialMergeScheduler());
|
||||
iwc.setMaxBufferedDocs(5000);
|
||||
|
|
|
@ -33,9 +33,9 @@ import org.apache.lucene.document.StringField;
|
|||
import org.apache.lucene.document.Field.Store;
|
||||
import org.apache.lucene.index.PointValues.IntersectVisitor;
|
||||
import org.apache.lucene.index.PointValues.Relation;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
|
@ -624,7 +624,7 @@ public class TestPointValues extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testCheckIndexIncludesPoints() throws Exception {
|
||||
Directory dir = new RAMDirectory();
|
||||
Directory dir = new ByteBuffersDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null));
|
||||
Document doc = new Document();
|
||||
doc.add(new IntPoint("int1", 17));
|
||||
|
@ -659,7 +659,7 @@ public class TestPointValues extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testMergedStatsOneSegmentWithoutPoints() throws IOException {
|
||||
Directory dir = new RAMDirectory();
|
||||
Directory dir = new ByteBuffersDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null).setMergePolicy(NoMergePolicy.INSTANCE));
|
||||
w.addDocument(new Document());
|
||||
DirectoryReader.open(w).close();
|
||||
|
@ -680,7 +680,7 @@ public class TestPointValues extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testMergedStatsAllPointsDeleted() throws IOException {
|
||||
Directory dir = new RAMDirectory();
|
||||
Directory dir = new ByteBuffersDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null));
|
||||
w.addDocument(new Document());
|
||||
Document doc = new Document();
|
||||
|
@ -718,7 +718,7 @@ public class TestPointValues extends LuceneTestCase {
|
|||
private void doTestMergedStats() throws IOException {
|
||||
final int numDims = TestUtil.nextInt(random(), 1, 8);
|
||||
final int numBytesPerDim = TestUtil.nextInt(random(), 1, 16);
|
||||
Directory dir = new RAMDirectory();
|
||||
Directory dir = new ByteBuffersDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null));
|
||||
final int numDocs = TestUtil.nextInt(random(), 10, 20);
|
||||
for (int i = 0; i < numDocs; ++i) {
|
||||
|
|
|
@ -22,8 +22,8 @@ import java.io.IOException;
|
|||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.StringField;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
||||
public class TestSizeBoundedForceMerge extends LuceneTestCase {
|
||||
|
@ -54,7 +54,7 @@ public class TestSizeBoundedForceMerge extends LuceneTestCase {
|
|||
|
||||
public void testByteSizeLimit() throws Exception {
|
||||
// tests that the max merge size constraint is applied during forceMerge.
|
||||
Directory dir = new RAMDirectory();
|
||||
Directory dir = new ByteBuffersDirectory();
|
||||
|
||||
// Prepare an index w/ several small segments and a large one.
|
||||
IndexWriterConfig conf = newWriterConfig();
|
||||
|
@ -85,7 +85,7 @@ public class TestSizeBoundedForceMerge extends LuceneTestCase {
|
|||
|
||||
public void testNumDocsLimit() throws Exception {
|
||||
// tests that the max merge docs constraint is applied during forceMerge.
|
||||
Directory dir = new RAMDirectory();
|
||||
Directory dir = new ByteBuffersDirectory();
|
||||
|
||||
// Prepare an index w/ several small segments and a large one.
|
||||
IndexWriterConfig conf = newWriterConfig();
|
||||
|
@ -116,7 +116,7 @@ public class TestSizeBoundedForceMerge extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testLastSegmentTooLarge() throws Exception {
|
||||
Directory dir = new RAMDirectory();
|
||||
Directory dir = new ByteBuffersDirectory();
|
||||
|
||||
IndexWriterConfig conf = newWriterConfig();
|
||||
IndexWriter writer = new IndexWriter(dir, conf);
|
||||
|
@ -142,7 +142,7 @@ public class TestSizeBoundedForceMerge extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testFirstSegmentTooLarge() throws Exception {
|
||||
Directory dir = new RAMDirectory();
|
||||
Directory dir = new ByteBuffersDirectory();
|
||||
|
||||
IndexWriterConfig conf = newWriterConfig();
|
||||
IndexWriter writer = new IndexWriter(dir, conf);
|
||||
|
@ -168,7 +168,7 @@ public class TestSizeBoundedForceMerge extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testAllSegmentsSmall() throws Exception {
|
||||
Directory dir = new RAMDirectory();
|
||||
Directory dir = new ByteBuffersDirectory();
|
||||
|
||||
IndexWriterConfig conf = newWriterConfig();
|
||||
IndexWriter writer = new IndexWriter(dir, conf);
|
||||
|
@ -194,7 +194,7 @@ public class TestSizeBoundedForceMerge extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testAllSegmentsLarge() throws Exception {
|
||||
Directory dir = new RAMDirectory();
|
||||
Directory dir = new ByteBuffersDirectory();
|
||||
|
||||
IndexWriterConfig conf = newWriterConfig();
|
||||
IndexWriter writer = new IndexWriter(dir, conf);
|
||||
|
@ -219,7 +219,7 @@ public class TestSizeBoundedForceMerge extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testOneLargeOneSmall() throws Exception {
|
||||
Directory dir = new RAMDirectory();
|
||||
Directory dir = new ByteBuffersDirectory();
|
||||
|
||||
IndexWriterConfig conf = newWriterConfig();
|
||||
IndexWriter writer = new IndexWriter(dir, conf);
|
||||
|
@ -245,7 +245,7 @@ public class TestSizeBoundedForceMerge extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testMergeFactor() throws Exception {
|
||||
Directory dir = new RAMDirectory();
|
||||
Directory dir = new ByteBuffersDirectory();
|
||||
|
||||
IndexWriterConfig conf = newWriterConfig();
|
||||
IndexWriter writer = new IndexWriter(dir, conf);
|
||||
|
@ -277,7 +277,7 @@ public class TestSizeBoundedForceMerge extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testSingleMergeableSegment() throws Exception {
|
||||
Directory dir = new RAMDirectory();
|
||||
Directory dir = new ByteBuffersDirectory();
|
||||
|
||||
IndexWriterConfig conf = newWriterConfig();
|
||||
IndexWriter writer = new IndexWriter(dir, conf);
|
||||
|
@ -306,7 +306,7 @@ public class TestSizeBoundedForceMerge extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testSingleNonMergeableSegment() throws Exception {
|
||||
Directory dir = new RAMDirectory();
|
||||
Directory dir = new ByteBuffersDirectory();
|
||||
|
||||
IndexWriterConfig conf = newWriterConfig();
|
||||
IndexWriter writer = new IndexWriter(dir, conf);
|
||||
|
@ -330,7 +330,7 @@ public class TestSizeBoundedForceMerge extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testSingleMergeableTooLargeSegment() throws Exception {
|
||||
Directory dir = new RAMDirectory();
|
||||
Directory dir = new ByteBuffersDirectory();
|
||||
|
||||
IndexWriterConfig conf = newWriterConfig();
|
||||
IndexWriter writer = new IndexWriter(dir, conf);
|
||||
|
|
|
@ -159,10 +159,7 @@ public class TestStressIndexing extends LuceneTestCase {
|
|||
//System.out.println("Searcher 2: " + searcherThread2.count + " searchers created");
|
||||
}
|
||||
|
||||
/*
|
||||
Run above stress test against RAMDirectory and then
|
||||
FSDirectory.
|
||||
*/
|
||||
/* */
|
||||
public void testStressIndexAndSearching() throws Exception {
|
||||
Directory directory = newMaybeVirusCheckingDirectory();
|
||||
if (directory instanceof MockDirectoryWrapper) {
|
||||
|
|
|
@ -137,10 +137,7 @@ public class TestThreadedForceMerge extends LuceneTestCase {
|
|||
writer.close();
|
||||
}
|
||||
|
||||
/*
|
||||
Run above stress test against RAMDirectory and then
|
||||
FSDirectory.
|
||||
*/
|
||||
/* */
|
||||
public void testThreadedForceMerge() throws Exception {
|
||||
Directory directory = newDirectory();
|
||||
runTest(random(), directory);
|
||||
|
|
|
@ -24,9 +24,9 @@ import org.apache.lucene.document.Document;
|
|||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.document.StringField;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.English;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
@ -231,8 +231,8 @@ public class TestTransactions extends LuceneTestCase {
|
|||
|
||||
public void testTransactions() throws Throwable {
|
||||
// we cant use non-ramdir on windows, because this test needs to double-write.
|
||||
MockDirectoryWrapper dir1 = new MockDirectoryWrapper(random(), new RAMDirectory());
|
||||
MockDirectoryWrapper dir2 = new MockDirectoryWrapper(random(), new RAMDirectory());
|
||||
MockDirectoryWrapper dir1 = new MockDirectoryWrapper(random(), new ByteBuffersDirectory());
|
||||
MockDirectoryWrapper dir2 = new MockDirectoryWrapper(random(), new ByteBuffersDirectory());
|
||||
dir1.failOn(new RandomFailure());
|
||||
dir2.failOn(new RandomFailure());
|
||||
dir1.setFailOnOpenInput(false);
|
||||
|
|
|
@ -30,8 +30,8 @@ import org.apache.lucene.search.SearcherFactory;
|
|||
import org.apache.lucene.search.SearcherManager;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
||||
|
||||
|
@ -53,7 +53,7 @@ public class TestTryDelete extends LuceneTestCase
|
|||
private static Directory createIndex ()
|
||||
throws IOException
|
||||
{
|
||||
Directory directory = new RAMDirectory();
|
||||
Directory directory = new ByteBuffersDirectory();
|
||||
|
||||
IndexWriter writer = getWriter(directory);
|
||||
|
||||
|
|
|
@ -33,8 +33,8 @@ import org.apache.lucene.index.MultiTerms;
|
|||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.junit.Ignore;
|
||||
|
@ -337,7 +337,7 @@ public class TestMultiPhraseQuery extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testZeroPosIncr() throws IOException {
|
||||
Directory dir = new RAMDirectory();
|
||||
Directory dir = new ByteBuffersDirectory();
|
||||
final Token[] tokens = new Token[3];
|
||||
tokens[0] = new Token();
|
||||
tokens[0].append("a");
|
||||
|
|
|
@ -23,7 +23,6 @@ import org.apache.lucene.analysis.MockAnalyzer;
|
|||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
|
@ -35,30 +34,11 @@ import org.apache.lucene.util.LuceneTestCase;
|
|||
|
||||
|
||||
public class TestScorerPerf extends LuceneTestCase {
|
||||
boolean validate = true; // set to false when doing performance testing
|
||||
|
||||
FixedBitSet[] sets;
|
||||
Term[] terms;
|
||||
IndexSearcher s;
|
||||
IndexReader r;
|
||||
Directory d;
|
||||
|
||||
// TODO: this should be setUp()....
|
||||
public void createDummySearcher() throws Exception {
|
||||
// Create a dummy index with nothing in it.
|
||||
// This could possibly fail if Lucene starts checking for docid ranges...
|
||||
d = newDirectory();
|
||||
IndexWriter iw = new IndexWriter(d, newIndexWriterConfig(new MockAnalyzer(random())));
|
||||
iw.addDocument(new Document());
|
||||
iw.close();
|
||||
r = DirectoryReader.open(d);
|
||||
s = newSearcher(r);
|
||||
s.setQueryCache(null);
|
||||
}
|
||||
private final boolean validate = true; // set to false when doing performance testing
|
||||
|
||||
public void createRandomTerms(int nDocs, int nTerms, double power, Directory dir) throws Exception {
|
||||
int[] freq = new int[nTerms];
|
||||
terms = new Term[nTerms];
|
||||
Term[] terms = new Term[nTerms];
|
||||
for (int i=0; i<nTerms; i++) {
|
||||
int f = (nTerms+1)-i; // make first terms less frequent
|
||||
freq[i] = (int)Math.ceil(Math.pow(f,power));
|
||||
|
@ -180,7 +160,7 @@ public class TestScorerPerf extends LuceneTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
FixedBitSet addClause(BooleanQuery.Builder bq, FixedBitSet result) {
|
||||
FixedBitSet addClause(FixedBitSet[] sets, BooleanQuery.Builder bq, FixedBitSet result) {
|
||||
final FixedBitSet rnd = sets[random().nextInt(sets.length)];
|
||||
Query q = new BitSetQuery(rnd);
|
||||
bq.add(q, BooleanClause.Occur.MUST);
|
||||
|
@ -192,7 +172,7 @@ public class TestScorerPerf extends LuceneTestCase {
|
|||
}
|
||||
|
||||
|
||||
public int doConjunctions(int iter, int maxClauses) throws IOException {
|
||||
public int doConjunctions(IndexSearcher s, FixedBitSet[] sets, int iter, int maxClauses) throws IOException {
|
||||
int ret=0;
|
||||
|
||||
for (int i=0; i<iter; i++) {
|
||||
|
@ -200,7 +180,7 @@ public class TestScorerPerf extends LuceneTestCase {
|
|||
BooleanQuery.Builder bq = new BooleanQuery.Builder();
|
||||
FixedBitSet result=null;
|
||||
for (int j=0; j<nClauses; j++) {
|
||||
result = addClause(bq,result);
|
||||
result = addClause(sets, bq, result);
|
||||
}
|
||||
|
||||
CountingHitCollector hc = validate ? new MatchingHitCollector(result)
|
||||
|
@ -215,7 +195,11 @@ public class TestScorerPerf extends LuceneTestCase {
|
|||
return ret;
|
||||
}
|
||||
|
||||
public int doNestedConjunctions(int iter, int maxOuterClauses, int maxClauses) throws IOException {
|
||||
public int doNestedConjunctions(IndexSearcher s,
|
||||
FixedBitSet[] sets,
|
||||
int iter,
|
||||
int maxOuterClauses,
|
||||
int maxClauses) throws IOException {
|
||||
int ret=0;
|
||||
long nMatches=0;
|
||||
|
||||
|
@ -229,7 +213,7 @@ public class TestScorerPerf extends LuceneTestCase {
|
|||
int nClauses = random().nextInt(maxClauses-1)+2; // min 2 clauses
|
||||
BooleanQuery.Builder bq = new BooleanQuery.Builder();
|
||||
for (int j=0; j<nClauses; j++) {
|
||||
result = addClause(bq,result);
|
||||
result = addClause(sets, bq,result);
|
||||
}
|
||||
|
||||
oq.add(bq.build(), BooleanClause.Occur.MUST);
|
||||
|
@ -247,8 +231,8 @@ public class TestScorerPerf extends LuceneTestCase {
|
|||
return ret;
|
||||
}
|
||||
|
||||
|
||||
public int doTermConjunctions(IndexSearcher s,
|
||||
public int doTermConjunctions(Term[] terms,
|
||||
IndexSearcher s,
|
||||
int termsInIndex,
|
||||
int maxClauses,
|
||||
int iter
|
||||
|
@ -283,10 +267,11 @@ public class TestScorerPerf extends LuceneTestCase {
|
|||
|
||||
|
||||
public int doNestedTermConjunctions(IndexSearcher s,
|
||||
int termsInIndex,
|
||||
int maxOuterClauses,
|
||||
int maxClauses,
|
||||
int iter
|
||||
Term[] terms,
|
||||
int termsInIndex,
|
||||
int maxOuterClauses,
|
||||
int maxClauses,
|
||||
int iter
|
||||
) throws IOException {
|
||||
int ret=0;
|
||||
long nMatches=0;
|
||||
|
@ -349,102 +334,22 @@ public class TestScorerPerf extends LuceneTestCase {
|
|||
return ret;
|
||||
}
|
||||
|
||||
|
||||
public void testConjunctions() throws Exception {
|
||||
// test many small sets... the bugs will be found on boundary conditions
|
||||
createDummySearcher();
|
||||
validate=true;
|
||||
sets=randBitSets(atLeast(1000), atLeast(10));
|
||||
doConjunctions(atLeast(10000), atLeast(5));
|
||||
doNestedConjunctions(atLeast(10000), atLeast(3), atLeast(3));
|
||||
r.close();
|
||||
d.close();
|
||||
}
|
||||
try (Directory d = newDirectory()) {
|
||||
IndexWriter iw = new IndexWriter(d, newIndexWriterConfig(new MockAnalyzer(random())));
|
||||
iw.addDocument(new Document());
|
||||
iw.close();
|
||||
|
||||
/***
|
||||
int bigIter=10;
|
||||
try (DirectoryReader r = DirectoryReader.open(d)) {
|
||||
IndexSearcher s = newSearcher(r);
|
||||
s.setQueryCache(null);
|
||||
|
||||
public void testConjunctionPerf() throws Exception {
|
||||
r = newRandom();
|
||||
createDummySearcher();
|
||||
validate=false;
|
||||
sets=randBitSets(32,1000000);
|
||||
for (int i=0; i<bigIter; i++) {
|
||||
long start = System.currentTimeMillis();
|
||||
doConjunctions(500,6);
|
||||
long end = System.currentTimeMillis();
|
||||
if (VERBOSE) System.out.println("milliseconds="+(end-start));
|
||||
FixedBitSet[] sets = randBitSets(atLeast(1000), atLeast(10));
|
||||
|
||||
doConjunctions(s, sets, atLeast(10000), atLeast(5));
|
||||
doNestedConjunctions(s, sets, atLeast(10000), atLeast(3), atLeast(3));
|
||||
}
|
||||
}
|
||||
s.close();
|
||||
}
|
||||
|
||||
public void testNestedConjunctionPerf() throws Exception {
|
||||
r = newRandom();
|
||||
createDummySearcher();
|
||||
validate=false;
|
||||
sets=randBitSets(32,1000000);
|
||||
for (int i=0; i<bigIter; i++) {
|
||||
long start = System.currentTimeMillis();
|
||||
doNestedConjunctions(500,3,3);
|
||||
long end = System.currentTimeMillis();
|
||||
if (VERBOSE) System.out.println("milliseconds="+(end-start));
|
||||
}
|
||||
s.close();
|
||||
}
|
||||
|
||||
|
||||
public void testConjunctionTerms() throws Exception {
|
||||
r = newRandom();
|
||||
validate=false;
|
||||
RAMDirectory dir = new RAMDirectory();
|
||||
if (VERBOSE) System.out.println("Creating index");
|
||||
createRandomTerms(100000,25,.5, dir);
|
||||
s = newSearcher(dir, true);
|
||||
if (VERBOSE) System.out.println("Starting performance test");
|
||||
for (int i=0; i<bigIter; i++) {
|
||||
long start = System.currentTimeMillis();
|
||||
doTermConjunctions(s,25,5,1000);
|
||||
long end = System.currentTimeMillis();
|
||||
if (VERBOSE) System.out.println("milliseconds="+(end-start));
|
||||
}
|
||||
s.close();
|
||||
}
|
||||
|
||||
public void testNestedConjunctionTerms() throws Exception {
|
||||
r = newRandom();
|
||||
validate=false;
|
||||
RAMDirectory dir = new RAMDirectory();
|
||||
if (VERBOSE) System.out.println("Creating index");
|
||||
createRandomTerms(100000,25,.2, dir);
|
||||
s = newSearcher(dir, true);
|
||||
if (VERBOSE) System.out.println("Starting performance test");
|
||||
for (int i=0; i<bigIter; i++) {
|
||||
long start = System.currentTimeMillis();
|
||||
doNestedTermConjunctions(s,25,3,3,200);
|
||||
long end = System.currentTimeMillis();
|
||||
if (VERBOSE) System.out.println("milliseconds="+(end-start));
|
||||
}
|
||||
s.close();
|
||||
}
|
||||
|
||||
|
||||
public void testSloppyPhrasePerf() throws Exception {
|
||||
r = newRandom();
|
||||
validate=false;
|
||||
RAMDirectory dir = new RAMDirectory();
|
||||
if (VERBOSE) System.out.println("Creating index");
|
||||
createRandomTerms(100000,25,2,dir);
|
||||
s = newSearcher(dir, true);
|
||||
if (VERBOSE) System.out.println("Starting performance test");
|
||||
for (int i=0; i<bigIter; i++) {
|
||||
long start = System.currentTimeMillis();
|
||||
doSloppyPhrase(s,25,2,1000);
|
||||
long end = System.currentTimeMillis();
|
||||
if (VERBOSE) System.out.println("milliseconds="+(end-start));
|
||||
}
|
||||
s.close();
|
||||
}
|
||||
***/
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -28,9 +28,9 @@ import org.apache.lucene.document.TextField;
|
|||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
||||
public class TestSloppyPhraseQuery extends LuceneTestCase {
|
||||
|
@ -144,7 +144,7 @@ public class TestSloppyPhraseQuery extends LuceneTestCase {
|
|||
builder.setSlop(slop);
|
||||
query = builder.build();
|
||||
|
||||
MockDirectoryWrapper ramDir = new MockDirectoryWrapper(random(), new RAMDirectory());
|
||||
MockDirectoryWrapper ramDir = new MockDirectoryWrapper(random(), new ByteBuffersDirectory());
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random(), ramDir, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false));
|
||||
writer.addDocument(doc);
|
||||
|
||||
|
|
|
@ -34,8 +34,8 @@ import org.apache.lucene.index.RandomIndexWriter;
|
|||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.BooleanClause.Occur;
|
||||
import org.apache.lucene.search.similarities.Similarity;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
|
@ -48,7 +48,7 @@ public class TestSubScorerFreqs extends LuceneTestCase {
|
|||
|
||||
@BeforeClass
|
||||
public static void makeIndex() throws Exception {
|
||||
dir = new RAMDirectory();
|
||||
dir = new ByteBuffersDirectory();
|
||||
RandomIndexWriter w = new RandomIndexWriter(
|
||||
random(), dir, newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
|
||||
// make sure we have more than one segment occationally
|
||||
|
|
|
@ -114,17 +114,6 @@ public class TestDirectory extends LuceneTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
// LUCENE-1468
|
||||
@SuppressWarnings("resource")
|
||||
public void testCopySubdir() throws Throwable {
|
||||
Path path = createTempDir("testsubdir");
|
||||
Files.createDirectory(path.resolve("subdir"));
|
||||
FSDirectory fsDir = new SimpleFSDirectory(path);
|
||||
RAMDirectory ramDir = new RAMDirectory(fsDir, newIOContext(random()));
|
||||
List<String> files = Arrays.asList(ramDir.listAll());
|
||||
assertFalse(files.contains("subdir"));
|
||||
}
|
||||
|
||||
// LUCENE-1468
|
||||
public void testNotDirectory() throws Throwable {
|
||||
Path path = createTempDir("testnotdir");
|
||||
|
|
|
@ -44,9 +44,9 @@ public class TestFileSwitchDirectory extends BaseDirectoryTestCase {
|
|||
fileExtensions.add(CompressingStoredFieldsWriter.FIELDS_EXTENSION);
|
||||
fileExtensions.add(CompressingStoredFieldsWriter.FIELDS_INDEX_EXTENSION);
|
||||
|
||||
MockDirectoryWrapper primaryDir = new MockDirectoryWrapper(random(), new RAMDirectory());
|
||||
MockDirectoryWrapper primaryDir = new MockDirectoryWrapper(random(), new ByteBuffersDirectory());
|
||||
primaryDir.setCheckIndexOnClose(false); // only part of an index
|
||||
MockDirectoryWrapper secondaryDir = new MockDirectoryWrapper(random(), new RAMDirectory());
|
||||
MockDirectoryWrapper secondaryDir = new MockDirectoryWrapper(random(), new ByteBuffersDirectory());
|
||||
secondaryDir.setCheckIndexOnClose(false); // only part of an index
|
||||
|
||||
FileSwitchDirectory fsd = new FileSwitchDirectory(fileExtensions, primaryDir, secondaryDir, true);
|
||||
|
|
|
@ -29,7 +29,7 @@ public class TestFilterDirectory extends BaseDirectoryTestCase {
|
|||
|
||||
@Override
|
||||
protected Directory getDirectory(Path path) {
|
||||
return new FilterDirectory(new RAMDirectory()) {};
|
||||
return new FilterDirectory(new ByteBuffersDirectory()) {};
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -37,7 +37,7 @@ public class TestLockFactory extends LuceneTestCase {
|
|||
|
||||
public void testCustomLockFactory() throws IOException {
|
||||
MockLockFactory lf = new MockLockFactory();
|
||||
Directory dir = new MockDirectoryWrapper(random(), new RAMDirectory(lf));
|
||||
Directory dir = new MockDirectoryWrapper(random(), new ByteBuffersDirectory(lf));
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())));
|
||||
|
||||
|
@ -52,14 +52,14 @@ public class TestLockFactory extends LuceneTestCase {
|
|||
writer.close();
|
||||
}
|
||||
|
||||
// Verify: we can use the NoLockFactory with RAMDirectory w/ no
|
||||
// exceptions raised:
|
||||
// Verify: we can use the NoLockFactory w/ no exceptions raised.
|
||||
// Verify: NoLockFactory allows two IndexWriters
|
||||
public void testRAMDirectoryNoLocking() throws IOException {
|
||||
MockDirectoryWrapper dir = new MockDirectoryWrapper(random(), new RAMDirectory(NoLockFactory.INSTANCE));
|
||||
public void testDirectoryNoLocking() throws IOException {
|
||||
MockDirectoryWrapper dir = new MockDirectoryWrapper(random(), new ByteBuffersDirectory(NoLockFactory.INSTANCE));
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())));
|
||||
writer.commit(); // required so the second open succeed
|
||||
writer.commit(); // required so the second open succeed
|
||||
|
||||
// Create a 2nd IndexWriter. This is normally not allowed but it should run through since we're not
|
||||
// using any locks:
|
||||
IndexWriter writer2 = null;
|
||||
|
|
|
@ -44,7 +44,7 @@ public class TestNRTCachingDirectory extends BaseDirectoryTestCase {
|
|||
// would be good to investigate further...
|
||||
@Override
|
||||
protected Directory getDirectory(Path path) throws IOException {
|
||||
return new NRTCachingDirectory(new RAMDirectory(),
|
||||
return new NRTCachingDirectory(new ByteBuffersDirectory(),
|
||||
.1 + 2.0*random().nextDouble(),
|
||||
.1 + 5.0*random().nextDouble());
|
||||
}
|
||||
|
|
|
@ -33,17 +33,15 @@ public class TestSingleInstanceLockFactory extends BaseLockFactoryTestCase {
|
|||
return newDirectory(random(), new SingleInstanceLockFactory());
|
||||
}
|
||||
|
||||
// Verify: SingleInstanceLockFactory is the default lock for RAMDirectory
|
||||
// Verify: RAMDirectory does basic locking correctly (can't create two IndexWriters)
|
||||
public void testDefaultRAMDirectory() throws IOException {
|
||||
RAMDirectory dir = new RAMDirectory();
|
||||
|
||||
assertTrue("RAMDirectory did not use correct LockFactory: got " + dir.lockFactory,
|
||||
dir.lockFactory instanceof SingleInstanceLockFactory);
|
||||
|
||||
// Verify: basic locking on single instance lock factory (can't create two IndexWriters)
|
||||
public void testDefaultLockFactory() throws IOException {
|
||||
ByteBuffersDirectory dir = new ByteBuffersDirectory();
|
||||
|
||||
assertTrue(dir.lockFactory instanceof SingleInstanceLockFactory);
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())));
|
||||
|
||||
// Create a 2nd IndexWriter. This should fail:
|
||||
// Create a 2nd IndexWriter. This should fail.
|
||||
expectThrows(IOException.class, () -> {
|
||||
new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
|
||||
});
|
||||
|
|
|
@ -25,22 +25,22 @@ public class TestTrackingDirectoryWrapper extends BaseDirectoryTestCase {
|
|||
|
||||
@Override
|
||||
protected Directory getDirectory(Path path) throws IOException {
|
||||
return new TrackingDirectoryWrapper(new RAMDirectory());
|
||||
return new TrackingDirectoryWrapper(new ByteBuffersDirectory());
|
||||
}
|
||||
|
||||
public void testTrackEmpty() throws IOException {
|
||||
TrackingDirectoryWrapper dir = new TrackingDirectoryWrapper(new RAMDirectory());
|
||||
TrackingDirectoryWrapper dir = new TrackingDirectoryWrapper(new ByteBuffersDirectory());
|
||||
assertEquals(Collections.emptySet(), dir.getCreatedFiles());
|
||||
}
|
||||
|
||||
public void testTrackCreate() throws IOException {
|
||||
TrackingDirectoryWrapper dir = new TrackingDirectoryWrapper(new RAMDirectory());
|
||||
TrackingDirectoryWrapper dir = new TrackingDirectoryWrapper(new ByteBuffersDirectory());
|
||||
dir.createOutput("foo", newIOContext(random())).close();
|
||||
assertEquals(asSet("foo"), dir.getCreatedFiles());
|
||||
}
|
||||
|
||||
public void testTrackDelete() throws IOException {
|
||||
TrackingDirectoryWrapper dir = new TrackingDirectoryWrapper(new RAMDirectory());
|
||||
TrackingDirectoryWrapper dir = new TrackingDirectoryWrapper(new ByteBuffersDirectory());
|
||||
dir.createOutput("foo", newIOContext(random())).close();
|
||||
assertEquals(asSet("foo"), dir.getCreatedFiles());
|
||||
dir.deleteFile("foo");
|
||||
|
@ -48,7 +48,7 @@ public class TestTrackingDirectoryWrapper extends BaseDirectoryTestCase {
|
|||
}
|
||||
|
||||
public void testTrackRename() throws IOException {
|
||||
TrackingDirectoryWrapper dir = new TrackingDirectoryWrapper(new RAMDirectory());
|
||||
TrackingDirectoryWrapper dir = new TrackingDirectoryWrapper(new ByteBuffersDirectory());
|
||||
dir.createOutput("foo", newIOContext(random())).close();
|
||||
assertEquals(asSet("foo"), dir.getCreatedFiles());
|
||||
dir.rename("foo", "bar");
|
||||
|
@ -56,8 +56,8 @@ public class TestTrackingDirectoryWrapper extends BaseDirectoryTestCase {
|
|||
}
|
||||
|
||||
public void testTrackCopyFrom() throws IOException {
|
||||
TrackingDirectoryWrapper source = new TrackingDirectoryWrapper(new RAMDirectory());
|
||||
TrackingDirectoryWrapper dest = new TrackingDirectoryWrapper(new RAMDirectory());
|
||||
TrackingDirectoryWrapper source = new TrackingDirectoryWrapper(new ByteBuffersDirectory());
|
||||
TrackingDirectoryWrapper dest = new TrackingDirectoryWrapper(new ByteBuffersDirectory());
|
||||
source.createOutput("foo", newIOContext(random())).close();
|
||||
assertEquals(asSet("foo"), source.getCreatedFiles());
|
||||
dest.copyFrom(source, "foo", "bar", newIOContext(random()));
|
||||
|
|
|
@ -28,12 +28,12 @@ import java.util.Random;
|
|||
|
||||
import org.apache.lucene.codecs.CodecUtil;
|
||||
import org.apache.lucene.store.ByteArrayDataInput;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.DataInput;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.store.IndexInput;
|
||||
import org.apache.lucene.store.IndexOutput;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.ArrayUtil;
|
||||
import org.apache.lucene.util.LongValues;
|
||||
import org.apache.lucene.util.LongsRef;
|
||||
|
@ -815,7 +815,7 @@ public class TestPackedInts extends LuceneTestCase {
|
|||
final int valueCount = TestUtil.nextInt(random(), 1, 2048);
|
||||
for (int bpv = 1; bpv <= 64; ++bpv) {
|
||||
final int maxValue = (int) Math.min(PackedInts.maxValue(31), PackedInts.maxValue(bpv));
|
||||
final RAMDirectory directory = new RAMDirectory();
|
||||
final Directory directory = new ByteBuffersDirectory();
|
||||
List<PackedInts.Mutable> packedInts = createPackedInts(valueCount, bpv);
|
||||
for (PackedInts.Mutable mutable : packedInts) {
|
||||
for (int i = 0; i < mutable.size(); ++i) {
|
||||
|
|
|
@ -41,14 +41,14 @@ import org.apache.lucene.index.IndexWriterConfig;
|
|||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
|
||||
/** Shows example usage of category associations. */
|
||||
public class AssociationsFacetsExample {
|
||||
|
||||
private final Directory indexDir = new RAMDirectory();
|
||||
private final Directory taxoDir = new RAMDirectory();
|
||||
private final Directory indexDir = new ByteBuffersDirectory();
|
||||
private final Directory taxoDir = new ByteBuffersDirectory();
|
||||
private final FacetsConfig config;
|
||||
|
||||
/** Empty constructor */
|
||||
|
|
|
@ -48,8 +48,8 @@ import org.apache.lucene.search.MatchAllDocsQuery;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.SortField;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.SloppyMath;
|
||||
|
||||
/** Shows simple usage of dynamic range faceting, using the
|
||||
|
@ -61,7 +61,7 @@ public class DistanceFacetsExample implements Closeable {
|
|||
final DoubleRange FIVE_KM = new DoubleRange("< 5 km", 0.0, true, 5.0, false);
|
||||
final DoubleRange TEN_KM = new DoubleRange("< 10 km", 0.0, true, 10.0, false);
|
||||
|
||||
private final Directory indexDir = new RAMDirectory();
|
||||
private final Directory indexDir = new ByteBuffersDirectory();
|
||||
private IndexSearcher searcher;
|
||||
private final FacetsConfig config = new FacetsConfig();
|
||||
|
||||
|
|
|
@ -43,15 +43,15 @@ import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
|||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.search.SortField;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
|
||||
|
||||
/** Shows facets aggregation by an expression. */
|
||||
public class ExpressionAggregationFacetsExample {
|
||||
|
||||
private final Directory indexDir = new RAMDirectory();
|
||||
private final Directory taxoDir = new RAMDirectory();
|
||||
private final Directory indexDir = new ByteBuffersDirectory();
|
||||
private final Directory taxoDir = new ByteBuffersDirectory();
|
||||
private final FacetsConfig config = new FacetsConfig();
|
||||
|
||||
/** Empty constructor */
|
||||
|
|
|
@ -38,14 +38,14 @@ import org.apache.lucene.index.IndexWriterConfig;
|
|||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
|
||||
/** Demonstrates indexing categories into different indexed fields. */
|
||||
public class MultiCategoryListsFacetsExample {
|
||||
|
||||
private final Directory indexDir = new RAMDirectory();
|
||||
private final Directory taxoDir = new RAMDirectory();
|
||||
private final Directory indexDir = new ByteBuffersDirectory();
|
||||
private final Directory taxoDir = new ByteBuffersDirectory();
|
||||
private final FacetsConfig config = new FacetsConfig();
|
||||
|
||||
/** Creates a new instance and populates the category list params mapping. */
|
||||
|
@ -87,7 +87,7 @@ public class MultiCategoryListsFacetsExample {
|
|||
doc.add(new FacetField("Author", "Frank"));
|
||||
doc.add(new FacetField("Publish Date", "1999", "5", "5"));
|
||||
indexWriter.addDocument(config.build(taxoWriter, doc));
|
||||
|
||||
|
||||
indexWriter.close();
|
||||
taxoWriter.close();
|
||||
}
|
||||
|
@ -114,10 +114,10 @@ public class MultiCategoryListsFacetsExample {
|
|||
|
||||
Facets pubDate = new FastTaxonomyFacetCounts("pubdate", taxoReader, config, fc);
|
||||
results.add(pubDate.getTopChildren(10, "Publish Date"));
|
||||
|
||||
|
||||
indexReader.close();
|
||||
taxoReader.close();
|
||||
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
|
@ -126,7 +126,7 @@ public class MultiCategoryListsFacetsExample {
|
|||
index();
|
||||
return search();
|
||||
}
|
||||
|
||||
|
||||
/** Runs the search example and prints the results. */
|
||||
public static void main(String[] args) throws Exception {
|
||||
System.out.println("Facet counting over multiple category lists example:");
|
||||
|
|
|
@ -38,13 +38,13 @@ import org.apache.lucene.index.IndexWriterConfig;
|
|||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
|
||||
/** Shows simple usage of dynamic range faceting. */
|
||||
public class RangeFacetsExample implements Closeable {
|
||||
|
||||
private final Directory indexDir = new RAMDirectory();
|
||||
private final Directory indexDir = new ByteBuffersDirectory();
|
||||
private IndexSearcher searcher;
|
||||
private final long nowSec = System.currentTimeMillis();
|
||||
|
||||
|
|
|
@ -41,14 +41,14 @@ import org.apache.lucene.index.IndexWriterConfig;
|
|||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
|
||||
/** Shows simple usage of faceted indexing and search. */
|
||||
public class SimpleFacetsExample {
|
||||
|
||||
private final Directory indexDir = new RAMDirectory();
|
||||
private final Directory taxoDir = new RAMDirectory();
|
||||
private final Directory indexDir = new ByteBuffersDirectory();
|
||||
private final Directory taxoDir = new ByteBuffersDirectory();
|
||||
private final FacetsConfig config = new FacetsConfig();
|
||||
|
||||
/** Empty constructor */
|
||||
|
|
|
@ -38,8 +38,8 @@ import org.apache.lucene.index.IndexWriterConfig;
|
|||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
|
||||
/** Shows simple usage of faceted indexing and search,
|
||||
* using {@link SortedSetDocValuesFacetField} and {@link
|
||||
|
@ -47,7 +47,7 @@ import org.apache.lucene.store.RAMDirectory;
|
|||
|
||||
public class SimpleSortedSetFacetsExample {
|
||||
|
||||
private final Directory indexDir = new RAMDirectory();
|
||||
private final Directory indexDir = new ByteBuffersDirectory();
|
||||
private final FacetsConfig config = new FacetsConfig();
|
||||
|
||||
/** Empty constructor */
|
||||
|
|
|
@ -19,17 +19,18 @@ package org.apache.lucene.facet;
|
|||
import java.io.IOException;
|
||||
import java.util.Random;
|
||||
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.FilterDirectory;
|
||||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.store.IndexInput;
|
||||
import org.apache.lucene.store.IndexOutput;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.ThreadInterruptedException;
|
||||
|
||||
/**
|
||||
* Test utility - slow directory
|
||||
*/
|
||||
// TODO: move to test-framework and sometimes use in tests?
|
||||
public class SlowRAMDirectory extends RAMDirectory {
|
||||
public class SlowDirectory extends FilterDirectory {
|
||||
|
||||
private static final int IO_SLEEP_THRESHOLD = 50;
|
||||
|
||||
|
@ -40,7 +41,8 @@ public class SlowRAMDirectory extends RAMDirectory {
|
|||
this.sleepMillis = sleepMillis;
|
||||
}
|
||||
|
||||
public SlowRAMDirectory(int sleepMillis, Random random) {
|
||||
public SlowDirectory(int sleepMillis, Random random) {
|
||||
super(new ByteBuffersDirectory());
|
||||
this.sleepMillis = sleepMillis;
|
||||
this.random = random;
|
||||
}
|
|
@ -24,7 +24,7 @@ import java.util.Arrays;
|
|||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
import org.apache.lucene.facet.FacetTestCase;
|
||||
import org.apache.lucene.facet.SlowRAMDirectory;
|
||||
import org.apache.lucene.facet.SlowDirectory;
|
||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
|
||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
|
||||
import org.apache.lucene.store.Directory;
|
||||
|
@ -732,7 +732,7 @@ public class TestTaxonomyCombined extends FacetTestCase {
|
|||
private void assertConsistentYoungestChild(final FacetLabel abPath,
|
||||
final int abOrd, final int abYoungChildBase1, final int abYoungChildBase2, final int retry, int numCategories)
|
||||
throws Exception {
|
||||
SlowRAMDirectory indexDir = new SlowRAMDirectory(-1, null); // no slowness for initialization
|
||||
SlowDirectory indexDir = new SlowDirectory(-1, null); // no slowness for initialization
|
||||
TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir);
|
||||
tw.addCategory(new FacetLabel("a", "0"));
|
||||
tw.addCategory(abPath);
|
||||
|
|
|
@ -34,8 +34,8 @@ import org.apache.lucene.index.IndexWriterConfig;
|
|||
import org.apache.lucene.index.LogByteSizeMergePolicy;
|
||||
import org.apache.lucene.index.LogMergePolicy;
|
||||
import org.apache.lucene.store.AlreadyClosedException;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.junit.Test;
|
||||
|
||||
|
@ -177,7 +177,7 @@ public class TestDirectoryTaxonomyReader extends FacetTestCase {
|
|||
|
||||
@Test
|
||||
public void testOpenIfChangedAndRefCount() throws Exception {
|
||||
Directory dir = new RAMDirectory(); // no need for random directories here
|
||||
Directory dir = new ByteBuffersDirectory(); // no need for random directories here
|
||||
|
||||
DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(dir);
|
||||
taxoWriter.addCategory(new FacetLabel("a"));
|
||||
|
|
|
@ -39,29 +39,6 @@ public class FieldTermStack {
|
|||
private final String fieldName;
|
||||
LinkedList<TermInfo> termList = new LinkedList<>();
|
||||
|
||||
//public static void main( String[] args ) throws Exception {
|
||||
// Analyzer analyzer = new WhitespaceAnalyzer(Version.LATEST);
|
||||
// QueryParser parser = new QueryParser(Version.LATEST, "f", analyzer );
|
||||
// Query query = parser.parse( "a x:b" );
|
||||
// FieldQuery fieldQuery = new FieldQuery( query, true, false );
|
||||
|
||||
// Directory dir = new RAMDirectory();
|
||||
// IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Version.LATEST, analyzer));
|
||||
// Document doc = new Document();
|
||||
// FieldType ft = new FieldType(TextField.TYPE_STORED);
|
||||
// ft.setStoreTermVectors(true);
|
||||
// ft.setStoreTermVectorOffsets(true);
|
||||
// ft.setStoreTermVectorPositions(true);
|
||||
// doc.add( new Field( "f", ft, "a a a b b c a b b c d e f" ) );
|
||||
// doc.add( new Field( "f", ft, "b a b a f" ) );
|
||||
// writer.addDocument( doc );
|
||||
// writer.close();
|
||||
|
||||
// IndexReader reader = IndexReader.open(dir1);
|
||||
// new FieldTermStack( reader, 0, "f", fieldQuery );
|
||||
// reader.close();
|
||||
//}
|
||||
|
||||
/**
|
||||
* a constructor.
|
||||
*
|
||||
|
|
|
@ -106,7 +106,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
static final String FIELD_NAME = "contents";
|
||||
private static final String NUMERIC_FIELD_NAME = "nfield";
|
||||
private Query query;
|
||||
Directory ramDir;
|
||||
Directory dir1;
|
||||
public IndexSearcher searcher = null;
|
||||
int numHighlights = 0;
|
||||
MockAnalyzer analyzer;
|
||||
|
@ -1926,7 +1926,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
helper.start();
|
||||
}
|
||||
|
||||
private Directory dir;
|
||||
private Directory dir2;
|
||||
private Analyzer a;
|
||||
|
||||
public void testWeightedTermsWithDeletes() throws IOException, InvalidTokenOffsetsException {
|
||||
|
@ -1936,7 +1936,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
}
|
||||
|
||||
private void makeIndex() throws IOException {
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)));
|
||||
IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)));
|
||||
writer.addDocument( doc( "t_text1", "random words for highlighting tests del" ) );
|
||||
writer.addDocument( doc( "t_text1", "more random words for second field del" ) );
|
||||
writer.addDocument( doc( "t_text1", "random words for highlighting tests del" ) );
|
||||
|
@ -1946,7 +1946,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
}
|
||||
|
||||
private void deleteDocument() throws IOException {
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setOpenMode(OpenMode.APPEND));
|
||||
IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setOpenMode(OpenMode.APPEND));
|
||||
writer.deleteDocuments( new Term( "t_text1", "del" ) );
|
||||
// To see negative idf, keep comment the following line
|
||||
//writer.forceMerge(1);
|
||||
|
@ -1955,7 +1955,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
|
||||
private void searchIndex() throws IOException, InvalidTokenOffsetsException {
|
||||
Query query = new TermQuery(new Term("t_text1", "random"));
|
||||
IndexReader reader = DirectoryReader.open(dir);
|
||||
IndexReader reader = DirectoryReader.open(dir1);
|
||||
IndexSearcher searcher = newSearcher(reader);
|
||||
// This scorer can return negative idf -> null fragment
|
||||
Scorer scorer = new QueryTermScorer( query, searcher.getIndexReader(), "t_text1" );
|
||||
|
@ -1978,7 +1978,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
final String text = "random words and words";//"words" at positions 1 & 4
|
||||
|
||||
Analyzer analyzer = new MockPayloadAnalyzer();//sets payload to "pos: X" (where X is position #)
|
||||
try (IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(analyzer))) {
|
||||
try (IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(analyzer))) {
|
||||
writer.deleteAll();
|
||||
Document doc = new Document();
|
||||
|
||||
|
@ -1986,7 +1986,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
writer.addDocument(doc);
|
||||
writer.commit();
|
||||
}
|
||||
try (IndexReader reader = DirectoryReader.open(dir)) {
|
||||
try (IndexReader reader = DirectoryReader.open(dir1)) {
|
||||
Query query = new SpanPayloadCheckQuery(new SpanTermQuery(new Term(FIELD_NAME, "words")),
|
||||
Collections.singletonList(new BytesRef("pos: 1")));//just match the first "word" occurrence
|
||||
IndexSearcher searcher = newSearcher(reader);
|
||||
|
@ -2004,32 +2004,6 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
assertEquals("random <B>words</B> and words", result);//only highlight first "word"
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
*
|
||||
* public void testBigramAnalyzer() throws IOException, ParseException {
|
||||
* //test to ensure analyzers with none-consecutive start/end offsets //dont
|
||||
* double-highlight text //setup index 1 RAMDirectory ramDir = new
|
||||
* RAMDirectory(); Analyzer bigramAnalyzer=new CJKAnalyzer(); IndexWriter
|
||||
* writer = new IndexWriter(ramDir,bigramAnalyzer , true); Document d = new
|
||||
* Document(); Field f = new Field(FIELD_NAME, "java abc def", true, true,
|
||||
* true); d.add(f); writer.addDocument(d); writer.close(); IndexReader reader =
|
||||
* DirectoryReader.open(ramDir);
|
||||
*
|
||||
* IndexSearcher searcher=new IndexSearcher(reader); query =
|
||||
* QueryParser.parse("abc", FIELD_NAME, bigramAnalyzer);
|
||||
* System.out.println("Searching for: " + query.toString(FIELD_NAME)); hits =
|
||||
* searcher.search(query);
|
||||
*
|
||||
* Highlighter highlighter = new Highlighter(this,new
|
||||
* QueryFragmentScorer(query));
|
||||
*
|
||||
* for (int i = 0; i < hits.totalHits.value; i++) { String text =
|
||||
* searcher.doc2(hits.scoreDocs[i].doc).get(FIELD_NAME); TokenStream
|
||||
* tokenStream=bigramAnalyzer.tokenStream(FIELD_NAME,text);
|
||||
* String highlightedText = highlighter.getBestFragment(tokenStream,text);
|
||||
* System.out.println(highlightedText); } }
|
||||
*/
|
||||
|
||||
@Override
|
||||
public String highlightTerm(String originalText, TokenGroup group) {
|
||||
|
@ -2074,13 +2048,13 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
|
||||
//Not many use this setup:
|
||||
a = new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false);
|
||||
dir = newDirectory();
|
||||
dir1 = newDirectory();
|
||||
|
||||
//Most tests use this setup:
|
||||
analyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET);
|
||||
ramDir = newDirectory();
|
||||
dir2 = newDirectory();
|
||||
fieldType = random().nextBoolean() ? FIELD_TYPE_TV : TextField.TYPE_STORED;
|
||||
IndexWriter writer = new IndexWriter(ramDir, newIndexWriterConfig(analyzer).setMergePolicy(newLogMergePolicy()));
|
||||
IndexWriter writer = new IndexWriter(dir2, newIndexWriterConfig(analyzer).setMergePolicy(newLogMergePolicy()));
|
||||
|
||||
for (String text : texts) {
|
||||
writer.addDocument(doc(FIELD_NAME, text));
|
||||
|
@ -2113,7 +2087,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
|
||||
writer.forceMerge(1);
|
||||
writer.close();
|
||||
reader = DirectoryReader.open(ramDir);
|
||||
reader = DirectoryReader.open(dir2);
|
||||
|
||||
//Misc:
|
||||
numHighlights = 0;
|
||||
|
@ -2122,8 +2096,8 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
@Override
|
||||
public void tearDown() throws Exception {
|
||||
reader.close();
|
||||
dir.close();
|
||||
ramDir.close();
|
||||
dir1.close();
|
||||
dir2.close();
|
||||
super.tearDown();
|
||||
}
|
||||
|
||||
|
|
|
@ -42,7 +42,7 @@ import org.apache.lucene.search.Scorable;
|
|||
import org.apache.lucene.search.ScoreMode;
|
||||
import org.apache.lucene.search.SimpleCollector;
|
||||
import org.apache.lucene.search.similarities.Similarity;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.ArrayUtil;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.ByteBlockPool;
|
||||
|
@ -65,8 +65,8 @@ import org.apache.lucene.util.Version;
|
|||
* <p>
|
||||
* <b>Overview</b>
|
||||
* <p>
|
||||
* This class is a replacement/substitute for a large subset of
|
||||
* {@link RAMDirectory} functionality. It is designed to
|
||||
* This class is a replacement/substitute for RAM-resident {@link Directory} implementations.
|
||||
* It is designed to
|
||||
* enable maximum efficiency for on-the-fly matchmaking combining structured and
|
||||
* fuzzy fulltext search in realtime streaming applications such as Nux XQuery based XML
|
||||
* message queues, publish-subscribe systems for Blogs/newsfeeds, text chat, data acquisition and
|
||||
|
@ -156,11 +156,12 @@ import org.apache.lucene.util.Version;
|
|||
* <p>
|
||||
* This class performs very well for very small texts (e.g. 10 chars)
|
||||
* as well as for large texts (e.g. 10 MB) and everything in between.
|
||||
* Typically, it is about 10-100 times faster than <code>RAMDirectory</code>.
|
||||
* Note that <code>RAMDirectory</code> has particularly
|
||||
* Typically, it is about 10-100 times faster than RAM-resident directory.
|
||||
*
|
||||
* Note that other <code>Directory</code> implementations have particularly
|
||||
* large efficiency overheads for small to medium sized texts, both in time and space.
|
||||
* Indexing a field with N tokens takes O(N) in the best case, and O(N logN) in the worst
|
||||
* case. Memory consumption is probably larger than for <code>RAMDirectory</code>.
|
||||
* case.
|
||||
* <p>
|
||||
* Example throughput of many simple term queries over a single MemoryIndex:
|
||||
* ~500000 queries/sec on a MacBook Pro, jdk 1.5.0_06, server VM.
|
||||
|
@ -707,7 +708,7 @@ public class MemoryIndex {
|
|||
});
|
||||
float score = scores[0];
|
||||
return score;
|
||||
} catch (IOException e) { // can never happen (RAMDirectory)
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -64,8 +64,8 @@ import org.apache.lucene.search.TopDocs;
|
|||
import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper;
|
||||
import org.apache.lucene.search.spans.SpanOrQuery;
|
||||
import org.apache.lucene.search.spans.SpanQuery;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.ByteBlockPool;
|
||||
import org.apache.lucene.util.ByteBlockPool.Allocator;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
|
@ -77,10 +77,10 @@ import org.apache.lucene.util.TestUtil;
|
|||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
|
||||
/**
|
||||
* Verifies that Lucene MemoryIndex and RAMDirectory have the same behaviour,
|
||||
* Verifies that Lucene MemoryIndex and RAM-resident Directory have the same behaviour,
|
||||
* returning the same results for queries on some randomish indexes.
|
||||
*/
|
||||
public class TestMemoryIndexAgainstRAMDir extends BaseTokenStreamTestCase {
|
||||
public class TestMemoryIndexAgainstDirectory extends BaseTokenStreamTestCase {
|
||||
private Set<String> queries = new HashSet<>();
|
||||
|
||||
public static final int ITERATIONS = 100 * RANDOM_MULTIPLIER;
|
||||
|
@ -116,15 +116,15 @@ public class TestMemoryIndexAgainstRAMDir extends BaseTokenStreamTestCase {
|
|||
public void testRandomQueries() throws Exception {
|
||||
MemoryIndex index = randomMemoryIndex();
|
||||
for (int i = 0; i < ITERATIONS; i++) {
|
||||
assertAgainstRAMDirectory(index);
|
||||
assertAgainstDirectory(index);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Build a randomish document for both RAMDirectory and MemoryIndex,
|
||||
* Build a randomish document for both Directory and MemoryIndex,
|
||||
* and run all the queries against it.
|
||||
*/
|
||||
public void assertAgainstRAMDirectory(MemoryIndex memory) throws Exception {
|
||||
public void assertAgainstDirectory(MemoryIndex memory) throws Exception {
|
||||
memory.reset();
|
||||
StringBuilder fooField = new StringBuilder();
|
||||
StringBuilder termField = new StringBuilder();
|
||||
|
@ -143,9 +143,9 @@ public class TestMemoryIndexAgainstRAMDir extends BaseTokenStreamTestCase {
|
|||
termField.append(randomTerm());
|
||||
}
|
||||
|
||||
Directory ramdir = new RAMDirectory();
|
||||
Directory dir = new ByteBuffersDirectory();
|
||||
Analyzer analyzer = randomAnalyzer();
|
||||
IndexWriter writer = new IndexWriter(ramdir,
|
||||
IndexWriter writer = new IndexWriter(dir,
|
||||
new IndexWriterConfig(analyzer).setCodec(
|
||||
TestUtil.alwaysPostingsFormat(TestUtil.getDefaultPostingsFormat())));
|
||||
Document doc = new Document();
|
||||
|
@ -161,11 +161,11 @@ public class TestMemoryIndexAgainstRAMDir extends BaseTokenStreamTestCase {
|
|||
|
||||
LeafReader reader = (LeafReader) memory.createSearcher().getIndexReader();
|
||||
TestUtil.checkReader(reader);
|
||||
DirectoryReader competitor = DirectoryReader.open(ramdir);
|
||||
DirectoryReader competitor = DirectoryReader.open(dir);
|
||||
duellReaders(competitor, reader);
|
||||
IOUtils.close(reader, competitor);
|
||||
assertAllQueries(memory, ramdir, analyzer);
|
||||
ramdir.close();
|
||||
assertAllQueries(memory, dir, analyzer);
|
||||
dir.close();
|
||||
}
|
||||
|
||||
private void duellReaders(CompositeReader other, LeafReader memIndexReader)
|
||||
|
@ -236,10 +236,10 @@ public class TestMemoryIndexAgainstRAMDir extends BaseTokenStreamTestCase {
|
|||
}
|
||||
|
||||
/**
|
||||
* Run all queries against both the RAMDirectory and MemoryIndex, ensuring they are the same.
|
||||
* Run all queries against both the Directory and MemoryIndex, ensuring they are the same.
|
||||
*/
|
||||
public void assertAllQueries(MemoryIndex memory, Directory ramdir, Analyzer analyzer) throws Exception {
|
||||
IndexReader reader = DirectoryReader.open(ramdir);
|
||||
public void assertAllQueries(MemoryIndex memory, Directory directory, Analyzer analyzer) throws Exception {
|
||||
IndexReader reader = DirectoryReader.open(directory);
|
||||
IndexSearcher ram = newSearcher(reader);
|
||||
IndexSearcher mem = memory.createSearcher();
|
||||
QueryParser qp = new QueryParser("foo", analyzer);
|
|
@ -32,8 +32,8 @@ import org.apache.lucene.search.similarities.ClassicSimilarity;
|
|||
import org.apache.lucene.search.similarities.PerFieldSimilarityWrapper;
|
||||
import org.apache.lucene.search.similarities.Similarity;
|
||||
import org.apache.lucene.search.similarities.TFIDFSimilarity;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
||||
/**
|
||||
|
@ -43,7 +43,7 @@ public class SweetSpotSimilarityTest extends LuceneTestCase {
|
|||
|
||||
private static float computeNorm(Similarity sim, String field, int length) throws IOException {
|
||||
String value = IntStream.range(0, length).mapToObj(i -> "a").collect(Collectors.joining(" "));
|
||||
Directory dir = new RAMDirectory();
|
||||
Directory dir = new ByteBuffersDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setSimilarity(sim));
|
||||
w.addDocument(Collections.singleton(newTextField(field, value, Store.NO)));
|
||||
DirectoryReader reader = DirectoryReader.open(w);
|
||||
|
|
|
@ -38,7 +38,7 @@ public class TestHardLinkCopyDirectoryWrapper extends BaseDirectoryTestCase {
|
|||
protected Directory getDirectory(Path file) throws IOException {
|
||||
Directory open;
|
||||
if (random().nextBoolean()) {
|
||||
open = new RAMDirectory();
|
||||
open = new ByteBuffersDirectory();
|
||||
} else {
|
||||
open = FSDirectory.open(file);
|
||||
}
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.apache.lucene.index.IndexWriter;
|
|||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.TextField;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.English;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
@ -32,7 +33,6 @@ import org.apache.lucene.search.IndexSearcher;
|
|||
import org.apache.lucene.search.similarities.Similarity;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Random;
|
||||
|
@ -104,7 +104,7 @@ public class PayloadHelper {
|
|||
}
|
||||
|
||||
/**
|
||||
* Sets up a RAMDirectory, and adds documents (using English.intToEnglish()) with two fields: field and multiField
|
||||
* Sets up a RAM-resident Directory, and adds documents (using English.intToEnglish()) with two fields: field and multiField
|
||||
* and analyzes them using the PayloadAnalyzer
|
||||
* @param similarity The Similarity class to use in the Searcher
|
||||
* @param numDocs The num docs to add
|
||||
|
@ -112,7 +112,7 @@ public class PayloadHelper {
|
|||
*/
|
||||
// TODO: randomize
|
||||
public IndexSearcher setUp(Random random, Similarity similarity, int numDocs) throws IOException {
|
||||
Directory directory = new MockDirectoryWrapper(random, new RAMDirectory());
|
||||
Directory directory = new MockDirectoryWrapper(random, new ByteBuffersDirectory());
|
||||
PayloadAnalyzer analyzer = new PayloadAnalyzer();
|
||||
|
||||
// TODO randomize this
|
||||
|
|
|
@ -19,9 +19,9 @@ package org.apache.lucene.queryparser.surround.query;
|
|||
import java.util.Random;
|
||||
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.TextField;
|
||||
|
@ -35,7 +35,7 @@ public class SingleFieldTestDb {
|
|||
|
||||
public SingleFieldTestDb(Random random, String[] documents, String fName) {
|
||||
try {
|
||||
db = new MockDirectoryWrapper(random, new RAMDirectory());
|
||||
db = new MockDirectoryWrapper(random, new ByteBuffersDirectory());
|
||||
docs = documents;
|
||||
fieldName = fName;
|
||||
IndexWriter writer = new IndexWriter(db, new IndexWriterConfig(new MockAnalyzer(random)));
|
||||
|
|
|
@ -40,8 +40,9 @@ import org.apache.lucene.spatial.prefix.tree.SpatialPrefixTree;
|
|||
import org.apache.lucene.spatial.query.SpatialArgs;
|
||||
import org.apache.lucene.spatial.query.SpatialArgsParser;
|
||||
import org.apache.lucene.spatial.query.SpatialOperation;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.locationtech.spatial4j.context.SpatialContext;
|
||||
import org.locationtech.spatial4j.distance.DistanceUtils;
|
||||
|
@ -98,7 +99,7 @@ public class SpatialExample extends LuceneTestCase {
|
|||
|
||||
this.strategy = new RecursivePrefixTreeStrategy(grid, "myGeoField");
|
||||
|
||||
this.directory = new RAMDirectory();
|
||||
this.directory = new ByteBuffersDirectory();
|
||||
}
|
||||
|
||||
private void indexPoints() throws Exception {
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue