diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index e2d955a5e7e..4838cc4fa86 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -4,7 +4,12 @@ For more information on past and future Lucene versions, please see:
http://s.apache.org/luceneversions
======================= Lucene 9.0.0 =======================
-(No Changes)
+
+API Changes
+
+* LUCENE-8474: RAMDirectory and associated deprecated classes have been
+ removed. (Dawid Weiss)
+
======================= Lucene 8.0.0 =======================
diff --git a/lucene/MIGRATE.txt b/lucene/MIGRATE.txt
index 045f00db627..c005bc0afca 100644
--- a/lucene/MIGRATE.txt
+++ b/lucene/MIGRATE.txt
@@ -1,5 +1,12 @@
# Apache Lucene Migration Guide
+## RAMDirectory, RAMFile, RAMInputStream, RAMOutputStream removed ##
+
+RAM-based directory implementation have been removed. (LUCENE-8474).
+ByteBuffersDirectory can be used as a RAM-resident replacement, although it
+is discouraged in favor of the default memory-mapped directory.
+
+
## Similarity.SimScorer.computeXXXFactor methods removed (LUCENE-8014) ##
SpanQuery and PhraseQuery now always calculate their slops as (1.0 / (1.0 +
diff --git a/lucene/analysis/common/src/test/org/apache/lucene/analysis/core/TestKeywordAnalyzer.java b/lucene/analysis/common/src/test/org/apache/lucene/analysis/core/TestKeywordAnalyzer.java
index eeff4d6c84e..d0da8861112 100644
--- a/lucene/analysis/common/src/test/org/apache/lucene/analysis/core/TestKeywordAnalyzer.java
+++ b/lucene/analysis/common/src/test/org/apache/lucene/analysis/core/TestKeywordAnalyzer.java
@@ -33,8 +33,8 @@ import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.search.DocIdSetIterator;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.TestUtil;
@@ -84,7 +84,7 @@ public class TestKeywordAnalyzer extends BaseTokenStreamTestCase {
*/
public void testMutipleDocument() throws Exception {
- RAMDirectory dir = new RAMDirectory();
+ Directory dir = new ByteBuffersDirectory();
Analyzer analyzer = new KeywordAnalyzer();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(analyzer));
Document doc = new Document();
diff --git a/lucene/analysis/common/src/test/org/apache/lucene/analysis/core/TestRandomChains.java b/lucene/analysis/common/src/test/org/apache/lucene/analysis/core/TestRandomChains.java
index beb9bb21077..ddaa984f336 100644
--- a/lucene/analysis/common/src/test/org/apache/lucene/analysis/core/TestRandomChains.java
+++ b/lucene/analysis/common/src/test/org/apache/lucene/analysis/core/TestRandomChains.java
@@ -94,7 +94,7 @@ import org.apache.lucene.analysis.snowball.TestSnowball;
import org.apache.lucene.analysis.standard.StandardTokenizer;
import org.apache.lucene.analysis.synonym.SynonymMap;
import org.apache.lucene.analysis.wikipedia.WikipediaTokenizer;
-import org.apache.lucene.store.RAMDirectory;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.util.AttributeFactory;
import org.apache.lucene.util.AttributeSource;
import org.apache.lucene.util.CharsRef;
@@ -387,7 +387,7 @@ public class TestRandomChains extends BaseTokenStreamTestCase {
InputStream affixStream = TestHunspellStemFilter.class.getResourceAsStream("simple.aff");
InputStream dictStream = TestHunspellStemFilter.class.getResourceAsStream("simple.dic");
try {
- return new Dictionary(new RAMDirectory(), "dictionary", affixStream, dictStream);
+ return new Dictionary(new ByteBuffersDirectory(), "dictionary", affixStream, dictStream);
} catch (Exception ex) {
Rethrow.rethrow(ex);
return null; // unreachable code
diff --git a/lucene/analysis/common/src/test/org/apache/lucene/analysis/hunspell/StemmerTestBase.java b/lucene/analysis/common/src/test/org/apache/lucene/analysis/hunspell/StemmerTestBase.java
index 771cf950fe9..4aa324a760f 100644
--- a/lucene/analysis/common/src/test/org/apache/lucene/analysis/hunspell/StemmerTestBase.java
+++ b/lucene/analysis/common/src/test/org/apache/lucene/analysis/hunspell/StemmerTestBase.java
@@ -24,7 +24,7 @@ import java.text.ParseException;
import java.util.Arrays;
import java.util.List;
-import org.apache.lucene.store.RAMDirectory;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.util.CharsRef;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.LuceneTestCase;
@@ -62,7 +62,7 @@ public abstract class StemmerTestBase extends LuceneTestCase {
}
try {
- Dictionary dictionary = new Dictionary(new RAMDirectory(), "dictionary", affixStream, Arrays.asList(dictStreams), ignoreCase);
+ Dictionary dictionary = new Dictionary(new ByteBuffersDirectory(), "dictionary", affixStream, Arrays.asList(dictStreams), ignoreCase);
stemmer = new Stemmer(dictionary);
} finally {
IOUtils.closeWhileHandlingException(affixStream);
diff --git a/lucene/analysis/common/src/test/org/apache/lucene/analysis/query/QueryAutoStopWordAnalyzerTest.java b/lucene/analysis/common/src/test/org/apache/lucene/analysis/query/QueryAutoStopWordAnalyzerTest.java
index f790597647f..d7a70520c93 100644
--- a/lucene/analysis/common/src/test/org/apache/lucene/analysis/query/QueryAutoStopWordAnalyzerTest.java
+++ b/lucene/analysis/common/src/test/org/apache/lucene/analysis/query/QueryAutoStopWordAnalyzerTest.java
@@ -24,7 +24,8 @@ import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
-import org.apache.lucene.store.RAMDirectory;
+import org.apache.lucene.store.ByteBuffersDirectory;
+import org.apache.lucene.store.Directory;
import java.util.Arrays;
import java.util.Collections;
@@ -32,7 +33,7 @@ import java.util.Collections;
public class QueryAutoStopWordAnalyzerTest extends BaseTokenStreamTestCase {
String variedFieldValues[] = {"the", "quick", "brown", "fox", "jumped", "over", "the", "lazy", "boring", "dog"};
String repetitiveFieldValues[] = {"boring", "boring", "vaguelyboring"};
- RAMDirectory dir;
+ Directory dir;
Analyzer appAnalyzer;
IndexReader reader;
QueryAutoStopWordAnalyzer protectedAnalyzer;
@@ -40,7 +41,7 @@ public class QueryAutoStopWordAnalyzerTest extends BaseTokenStreamTestCase {
@Override
public void setUp() throws Exception {
super.setUp();
- dir = new RAMDirectory();
+ dir = new ByteBuffersDirectory();
appAnalyzer = new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(appAnalyzer));
int numDocs = 200;
diff --git a/lucene/analysis/common/src/test/org/apache/lucene/analysis/standard/TestClassicAnalyzer.java b/lucene/analysis/common/src/test/org/apache/lucene/analysis/standard/TestClassicAnalyzer.java
index ffb1d2b0b90..bb16e9bae42 100644
--- a/lucene/analysis/common/src/test/org/apache/lucene/analysis/standard/TestClassicAnalyzer.java
+++ b/lucene/analysis/common/src/test/org/apache/lucene/analysis/standard/TestClassicAnalyzer.java
@@ -33,7 +33,8 @@ import org.apache.lucene.index.MultiTerms;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.DocIdSetIterator;
-import org.apache.lucene.store.RAMDirectory;
+import org.apache.lucene.store.ByteBuffersDirectory;
+import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
/** tests for classicanalyzer */
@@ -269,7 +270,7 @@ public class TestClassicAnalyzer extends BaseTokenStreamTestCase {
* Make sure we skip wicked long terms.
*/
public void testWickedLongTerm() throws IOException {
- RAMDirectory dir = new RAMDirectory();
+ Directory dir = new ByteBuffersDirectory();
Analyzer analyzer = new ClassicAnalyzer();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(analyzer));
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java b/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
index d66f17a44a8..2737881bda1 100644
--- a/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
+++ b/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
@@ -67,10 +67,10 @@ import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.BaseDirectoryWrapper;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.store.NIOFSDirectory;
-import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.store.SimpleFSDirectory;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
@@ -1450,7 +1450,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
// create a bunch of dummy segments
int id = 40;
- RAMDirectory ramDir = new RAMDirectory();
+ Directory ramDir = new ByteBuffersDirectory();
for (int i = 0; i < 3; i++) {
// only use Log- or TieredMergePolicy, to make document addition predictable and not suddenly merge:
MergePolicy mp = random().nextBoolean() ? newLogMergePolicy() : newTieredMergePolicy();
diff --git a/lucene/benchmark/conf/analyzer.alg b/lucene/benchmark/conf/analyzer.alg
index 698a5843f28..497ec3d216d 100644
--- a/lucene/benchmark/conf/analyzer.alg
+++ b/lucene/benchmark/conf/analyzer.alg
@@ -25,7 +25,7 @@ compound=true
analyzer=org.apache.lucene.analysis.standard.StandardAnalyzer
directory=FSDirectory
-#directory=RamDirectory
+#directory=ByteBuffersDirectory
doc.stored=true
doc.tokenized=true
diff --git a/lucene/benchmark/conf/collector-small.alg b/lucene/benchmark/conf/collector-small.alg
index e4801ff9d65..763cb0454ad 100644
--- a/lucene/benchmark/conf/collector-small.alg
+++ b/lucene/benchmark/conf/collector-small.alg
@@ -25,7 +25,7 @@ collector.class=coll:topScoreDocOrdered:topScoreDocUnordered:topScoreDocOrdered:
analyzer=org.apache.lucene.analysis.core.WhitespaceAnalyzer
directory=FSDirectory
-#directory=RamDirectory
+#directory=ByteBuffersDirectory
doc.stored=true
doc.tokenized=true
diff --git a/lucene/benchmark/conf/collector.alg b/lucene/benchmark/conf/collector.alg
index 237f33b0f06..d85582a7ba2 100644
--- a/lucene/benchmark/conf/collector.alg
+++ b/lucene/benchmark/conf/collector.alg
@@ -25,7 +25,7 @@ collector.class=coll:topScoreDocOrdered:topScoreDocUnordered:topScoreDocOrdered:
analyzer=org.apache.lucene.analysis.core.WhitespaceAnalyzer
directory=FSDirectory
-#directory=RamDirectory
+#directory=ByteBuffersDirectory
doc.stored=true
doc.tokenized=true
diff --git a/lucene/benchmark/conf/compound-penalty.alg b/lucene/benchmark/conf/compound-penalty.alg
index 129119872f5..06b2821f04b 100644
--- a/lucene/benchmark/conf/compound-penalty.alg
+++ b/lucene/benchmark/conf/compound-penalty.alg
@@ -29,7 +29,7 @@ compound=compnd:true:false
analyzer=org.apache.lucene.analysis.standard.StandardAnalyzer
directory=FSDirectory
-#directory=RamDirectory
+#directory=ByteBuffersDirectory
doc.stored=stored:true:true:false:false
doc.tokenized=true
diff --git a/lucene/benchmark/conf/indexing-flush-by-RAM-multithreaded.alg b/lucene/benchmark/conf/indexing-flush-by-RAM-multithreaded.alg
index 4f732597908..c25c9c3fac9 100644
--- a/lucene/benchmark/conf/indexing-flush-by-RAM-multithreaded.alg
+++ b/lucene/benchmark/conf/indexing-flush-by-RAM-multithreaded.alg
@@ -25,7 +25,7 @@ compound=cmpnd:true:true:true:true:false:false:false:false
analyzer=org.apache.lucene.analysis.standard.StandardAnalyzer
directory=FSDirectory
-#directory=RamDirectory
+#directory=ByteBuffersDirectory
doc.stored=true
doc.tokenized=true
diff --git a/lucene/benchmark/conf/indexing-flush-by-RAM.alg b/lucene/benchmark/conf/indexing-flush-by-RAM.alg
index 771a685cb24..0b6c79762ef 100644
--- a/lucene/benchmark/conf/indexing-flush-by-RAM.alg
+++ b/lucene/benchmark/conf/indexing-flush-by-RAM.alg
@@ -25,7 +25,7 @@ compound=cmpnd:true:true:true:true:false:false:false:false
analyzer=org.apache.lucene.analysis.standard.StandardAnalyzer
directory=FSDirectory
-#directory=RamDirectory
+#directory=ByteBuffersDirectory
doc.stored=true
doc.tokenized=true
diff --git a/lucene/benchmark/conf/indexing-multithreaded.alg b/lucene/benchmark/conf/indexing-multithreaded.alg
index a1685462d6b..1d2e18e260d 100644
--- a/lucene/benchmark/conf/indexing-multithreaded.alg
+++ b/lucene/benchmark/conf/indexing-multithreaded.alg
@@ -25,7 +25,7 @@ compound=cmpnd:true:true:true:true:false:false:false:false
analyzer=org.apache.lucene.analysis.standard.StandardAnalyzer
directory=FSDirectory
-#directory=RamDirectory
+#directory=ByteBuffersDirectory
doc.stored=true
doc.tokenized=true
diff --git a/lucene/benchmark/conf/indexing.alg b/lucene/benchmark/conf/indexing.alg
index 42c01f3e07e..e31f87185b1 100644
--- a/lucene/benchmark/conf/indexing.alg
+++ b/lucene/benchmark/conf/indexing.alg
@@ -25,7 +25,7 @@ compound=cmpnd:true:true:true:true:false:false:false:false
analyzer=org.apache.lucene.analysis.standard.StandardAnalyzer
directory=FSDirectory
-#directory=RamDirectory
+#directory=ByteBuffersDirectory
doc.stored=true
doc.tokenized=true
diff --git a/lucene/benchmark/conf/micro-standard-flush-by-ram.alg b/lucene/benchmark/conf/micro-standard-flush-by-ram.alg
index f503081668a..993e58a883d 100644
--- a/lucene/benchmark/conf/micro-standard-flush-by-ram.alg
+++ b/lucene/benchmark/conf/micro-standard-flush-by-ram.alg
@@ -24,7 +24,7 @@ compound=true
analyzer=org.apache.lucene.analysis.standard.StandardAnalyzer
directory=FSDirectory
-#directory=RamDirectory
+#directory=ByteBuffersDirectory
doc.stored=true
doc.tokenized=true
diff --git a/lucene/benchmark/conf/micro-standard.alg b/lucene/benchmark/conf/micro-standard.alg
index bcd6ebd6084..9c161664822 100644
--- a/lucene/benchmark/conf/micro-standard.alg
+++ b/lucene/benchmark/conf/micro-standard.alg
@@ -23,7 +23,7 @@ compound=true
analyzer=org.apache.lucene.analysis.standard.StandardAnalyzer
directory=FSDirectory
-#directory=RamDirectory
+#directory=ByteBuffersDirectory
doc.stored=true
doc.tokenized=true
diff --git a/lucene/benchmark/conf/sample.alg b/lucene/benchmark/conf/sample.alg
index 74b5d441126..4f93230bfc7 100644
--- a/lucene/benchmark/conf/sample.alg
+++ b/lucene/benchmark/conf/sample.alg
@@ -35,7 +35,7 @@ compound=true
analyzer=org.apache.lucene.analysis.standard.StandardAnalyzer
directory=FSDirectory
-#directory=RamDirectory
+#directory=ByteBuffersDirectory
doc.stored=true
doc.tokenized=true
diff --git a/lucene/benchmark/conf/sloppy-phrase.alg b/lucene/benchmark/conf/sloppy-phrase.alg
index bbdbed86512..4d06d6fdbe0 100644
--- a/lucene/benchmark/conf/sloppy-phrase.alg
+++ b/lucene/benchmark/conf/sloppy-phrase.alg
@@ -23,7 +23,7 @@ compound=true
analyzer=org.apache.lucene.analysis.standard.StandardAnalyzer
directory=FSDirectory
-#directory=RamDirectory
+#directory=ByteBuffersDirectory
doc.stored=false
doc.tokenized=true
diff --git a/lucene/benchmark/conf/sort-standard.alg b/lucene/benchmark/conf/sort-standard.alg
index 7a25574d320..48cae964dbe 100644
--- a/lucene/benchmark/conf/sort-standard.alg
+++ b/lucene/benchmark/conf/sort-standard.alg
@@ -24,7 +24,7 @@ sort.rng=20000:10000:20000:10000
analyzer=org.apache.lucene.analysis.standard.StandardAnalyzer
directory=FSDirectory
-#directory=RamDirectory
+#directory=ByteBuffersDirectory
doc.stored=true
doc.tokenized=true
diff --git a/lucene/benchmark/conf/spatial.alg b/lucene/benchmark/conf/spatial.alg
index 0ee637f510f..334497b631e 100644
--- a/lucene/benchmark/conf/spatial.alg
+++ b/lucene/benchmark/conf/spatial.alg
@@ -54,7 +54,7 @@ doc.tokenized=false
### Directory
directory=FSDirectory
-#directory=RamDirectory
+#directory=ByteBuffersDirectory
compound=false
merge.factor=10
ram.flush.mb=64
diff --git a/lucene/benchmark/conf/standard-flush-by-RAM.alg b/lucene/benchmark/conf/standard-flush-by-RAM.alg
index cd1ebe82be4..3ceed106fae 100644
--- a/lucene/benchmark/conf/standard-flush-by-RAM.alg
+++ b/lucene/benchmark/conf/standard-flush-by-RAM.alg
@@ -24,7 +24,7 @@ compound=cmpnd:true:true:true:true:false:false:false:false
analyzer=org.apache.lucene.analysis.standard.StandardAnalyzer
directory=FSDirectory
-#directory=RamDirectory
+#directory=ByteBuffersDirectory
doc.stored=true
doc.tokenized=true
diff --git a/lucene/benchmark/conf/standard.alg b/lucene/benchmark/conf/standard.alg
index 69cb56e3f3f..4d0b0480ffe 100644
--- a/lucene/benchmark/conf/standard.alg
+++ b/lucene/benchmark/conf/standard.alg
@@ -24,7 +24,7 @@ compound=cmpnd:true:true:true:true:false:false:false:false
analyzer=org.apache.lucene.analysis.standard.StandardAnalyzer
directory=FSDirectory
-#directory=RamDirectory
+#directory=ByteBuffersDirectory
doc.stored=true
doc.tokenized=true
diff --git a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/PerfRunData.java b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/PerfRunData.java
index a08b79e38cf..1304d4be9b2 100644
--- a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/PerfRunData.java
+++ b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/PerfRunData.java
@@ -44,9 +44,9 @@ import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
-import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.IOUtils;
/**
@@ -70,12 +70,13 @@ import org.apache.lucene.util.IOUtils;
*
taxonomy.directory=<type of directory for taxonomy index| Default: ByteBuffersDirectory>
*
*/
public class PerfRunData implements Closeable {
+ private static final String DEFAULT_DIRECTORY = "ByteBuffersDirectory";
private Points points;
// objects used during performance test run
@@ -192,17 +193,26 @@ public class PerfRunData implements Closeable {
private Directory createDirectory(boolean eraseIndex, String dirName,
String dirParam) throws IOException {
- if ("FSDirectory".equals(config.get(dirParam,"RAMDirectory"))) {
- Path workDir = Paths.get(config.get("work.dir","work"));
+ String dirImpl = config.get(dirParam, DEFAULT_DIRECTORY);
+ if ("FSDirectory".equals(dirImpl)) {
+ Path workDir = Paths.get(config.get("work.dir", "work"));
Path indexDir = workDir.resolve(dirName);
if (eraseIndex && Files.exists(indexDir)) {
IOUtils.rm(indexDir);
}
Files.createDirectories(indexDir);
return FSDirectory.open(indexDir);
- }
+ }
- return new RAMDirectory();
+ if ("RAMDirectory".equals(dirImpl)) {
+ throw new IOException("RAMDirectory has been removed, use ByteBuffersDirectory.");
+ }
+
+ if ("ByteBuffersDirectory".equals(dirImpl)) {
+ return new ByteBuffersDirectory();
+ }
+
+ throw new IOException("Directory type not supported: " + dirImpl);
}
/** Returns an object that was previously set by {@link #setPerfObject(String, Object)}. */
diff --git a/lucene/benchmark/src/test/org/apache/lucene/benchmark/BenchmarkTestCase.java b/lucene/benchmark/src/test/org/apache/lucene/benchmark/BenchmarkTestCase.java
index 2092afedc06..66dc1c699c6 100644
--- a/lucene/benchmark/src/test/org/apache/lucene/benchmark/BenchmarkTestCase.java
+++ b/lucene/benchmark/src/test/org/apache/lucene/benchmark/BenchmarkTestCase.java
@@ -79,7 +79,7 @@ public abstract class BenchmarkTestCase extends LuceneTestCase {
// properties in effect in all tests here
final String propLines [] = {
"work.dir=" + getWorkDirPath(),
- "directory=RAMDirectory",
+ "directory=ByteBuffersDirectory",
"print.props=false",
};
diff --git a/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/TestPerfTasksLogic.java b/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/TestPerfTasksLogic.java
index f1936ad5850..1cf4c755417 100644
--- a/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/TestPerfTasksLogic.java
+++ b/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/TestPerfTasksLogic.java
@@ -170,7 +170,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
"content.source.log.step=1",
"doc.term.vector=false",
"content.source.forever=false",
- "directory=RAMDirectory",
+ "directory=ByteBuffersDirectory",
"doc.stored=false",
"doc.tokenized=false",
"# ----- alg ",
@@ -211,7 +211,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
"doc.term.vector=false",
"log.step.AddDoc=10000",
"content.source.forever=true",
- "directory=RAMDirectory",
+ "directory=ByteBuffersDirectory",
"doc.reuse.fields=false",
"doc.stored=true",
"doc.tokenized=false",
@@ -412,7 +412,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
"content.source.log.step=3",
"doc.term.vector=false",
"content.source.forever=false",
- "directory=RAMDirectory",
+ "directory=ByteBuffersDirectory",
"doc.stored=false",
"doc.tokenized=false",
"task.max.depth.log=1",
@@ -447,7 +447,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
"content.source.log.step=3",
"doc.term.vector=false",
"content.source.forever=false",
- "directory=RAMDirectory",
+ "directory=ByteBuffersDirectory",
"doc.stored=false",
"doc.tokenized=false",
"task.max.depth.log=1",
@@ -484,7 +484,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
"content.source.log.step=3",
"doc.term.vector=false",
"content.source.forever=false",
- "directory=RAMDirectory",
+ "directory=ByteBuffersDirectory",
"doc.stored=false",
"doc.tokenized=false",
"debug.level=1",
@@ -527,7 +527,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
"content.source.log.step=3",
"doc.term.vector=false",
"content.source.forever=false",
- "directory=RAMDirectory",
+ "directory=ByteBuffersDirectory",
"merge.scheduler=" + MyMergeScheduler.class.getName(),
"doc.stored=false",
"doc.tokenized=false",
@@ -575,7 +575,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
"max.buffered=2",
"doc.term.vector=false",
"content.source.forever=false",
- "directory=RAMDirectory",
+ "directory=ByteBuffersDirectory",
"merge.policy=" + MyMergePolicy.class.getName(),
"doc.stored=false",
"doc.tokenized=false",
@@ -615,7 +615,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
"compound=cmpnd:true:false",
"doc.term.vector=vector:false:true",
"content.source.forever=false",
- "directory=RAMDirectory",
+ "directory=ByteBuffersDirectory",
"doc.stored=false",
"merge.factor=3",
"doc.tokenized=false",
@@ -656,7 +656,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
"docs.file=" + getReuters20LinesFile(),
"content.source.log.step=100",
"content.source.forever=false",
- "directory=RAMDirectory",
+ "directory=ByteBuffersDirectory",
"doc.stored=false",
"merge.factor=3",
"doc.tokenized=false",
@@ -695,7 +695,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
"max.buffered=3",
"doc.term.vector=false",
"content.source.forever=false",
- "directory=RAMDirectory",
+ "directory=ByteBuffersDirectory",
"merge.policy=org.apache.lucene.index.LogDocMergePolicy",
"doc.stored=false",
"doc.tokenized=false",
@@ -767,7 +767,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
"content.source.log.step=30",
"doc.term.vector=false",
"content.source.forever=false",
- "directory=RAMDirectory",
+ "directory=ByteBuffersDirectory",
"doc.stored=false",
"doc.tokenized=false",
"task.max.depth.log=1",
@@ -815,7 +815,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
"docs.file=" + getReuters20LinesFile(),
"content.source.log.step=3",
"content.source.forever=false",
- "directory=RAMDirectory",
+ "directory=ByteBuffersDirectory",
"# ----- alg ",
"{ \"Rounds\"",
" ResetSystemErase",
@@ -880,7 +880,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
"docs.file=" + getReuters20LinesFile(),
"content.source.log.step=3",
"content.source.forever=false",
- "directory=RAMDirectory",
+ "directory=ByteBuffersDirectory",
"# ----- alg ",
"{ \"Rounds\"",
" ResetSystemErase",
@@ -946,7 +946,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
"docs.file=" + getReuters20LinesFile(),
"work.dir=" + getWorkDir().toAbsolutePath().toString().replaceAll("\\\\", "/"), // Fix Windows path
"content.source.forever=false",
- "directory=RAMDirectory",
+ "directory=ByteBuffersDirectory",
"AnalyzerFactory(name:'" + singleQuoteEscapedName + "', " + params + ")",
"NewAnalyzer('" + singleQuoteEscapedName + "')",
"CreateIndex",
diff --git a/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/TestPerfTasksParse.java b/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/TestPerfTasksParse.java
index 04e15cab760..d6fdb2b0929 100644
--- a/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/TestPerfTasksParse.java
+++ b/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/TestPerfTasksParse.java
@@ -34,11 +34,10 @@ import org.apache.lucene.benchmark.byTask.tasks.TaskSequence;
import org.apache.lucene.benchmark.byTask.utils.Algorithm;
import org.apache.lucene.benchmark.byTask.utils.Config;
import org.apache.lucene.search.Query;
-import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.LuceneTestCase.SuppressSysoutChecks;
-/** Test very simply that perf tasks are parses as expected. */
+/** Test very simply that perf tasks are parsed as expected. */
@SuppressSysoutChecks(bugUrl = "very noisy")
public class TestPerfTasksParse extends LuceneTestCase {
@@ -47,7 +46,7 @@ public class TestPerfTasksParse extends LuceneTestCase {
// properties in effect in all tests here
static final String propPart =
- INDENT + "directory=RAMDirectory" + NEW_LINE +
+ INDENT + "directory=ByteBuffersDirectory" + NEW_LINE +
INDENT + "print.props=false" + NEW_LINE;
/** Test the repetiotion parsing for parallel tasks */
@@ -122,7 +121,7 @@ public class TestPerfTasksParse extends LuceneTestCase {
config.set("content.source", MockContentSource.class.getName());
String dir = config.get("content.source", null);
if (dir != null) { Class.forName(dir); }
- config.set("directory", RAMDirectory.class.getName());
+ config.set("directory", "ByteBuffersDirectory");
if (config.get("line.file.out", null) != null) {
config.set("line.file.out", createTempFile("linefile", ".txt").toAbsolutePath().toString());
}
diff --git a/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/feeds/DocMakerTest.java b/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/feeds/DocMakerTest.java
index 331fd23ee32..1e33d6f041c 100644
--- a/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/feeds/DocMakerTest.java
+++ b/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/feeds/DocMakerTest.java
@@ -76,7 +76,7 @@ public class DocMakerTest extends BenchmarkTestCase {
// Indexing configuration.
props.setProperty("analyzer", WhitespaceAnalyzer.class.getName());
props.setProperty("content.source", OneDocSource.class.getName());
- props.setProperty("directory", "RAMDirectory");
+ props.setProperty("directory", "ByteBuffersDirectory");
if (setIndexProps) {
props.setProperty("doc.index.props", Boolean.toString(indexPropsVal));
}
@@ -105,7 +105,7 @@ public class DocMakerTest extends BenchmarkTestCase {
// Indexing configuration.
props.setProperty("analyzer", WhitespaceAnalyzer.class.getName());
- props.setProperty("directory", "RAMDirectory");
+ props.setProperty("directory", "ByteBuffersDirectory");
if (setNormsProp) {
props.setProperty("doc.tokenized.norms", Boolean.toString(normsPropVal));
}
diff --git a/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/feeds/LineDocSourceTest.java b/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/feeds/LineDocSourceTest.java
index e826e43c05a..927dcb79e3b 100644
--- a/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/feeds/LineDocSourceTest.java
+++ b/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/feeds/LineDocSourceTest.java
@@ -131,7 +131,7 @@ public class LineDocSourceTest extends BenchmarkTestCase {
// Indexing configuration.
props.setProperty("analyzer", WhitespaceAnalyzer.class.getName());
props.setProperty("content.source", LineDocSource.class.getName());
- props.setProperty("directory", "RAMDirectory");
+ props.setProperty("directory", "ByteBuffersDirectory");
props.setProperty("doc.stored", "true");
props.setProperty("doc.index.props", "true");
diff --git a/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/tasks/AddIndexesTaskTest.java b/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/tasks/AddIndexesTaskTest.java
index 1f38264aec7..85198034cbd 100644
--- a/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/tasks/AddIndexesTaskTest.java
+++ b/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/tasks/AddIndexesTaskTest.java
@@ -28,8 +28,8 @@ import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.Version;
import org.junit.AfterClass;
import org.junit.BeforeClass;
@@ -66,7 +66,7 @@ public class AddIndexesTaskTest extends BenchmarkTestCase {
Properties props = new Properties();
props.setProperty("writer.version", Version.LATEST.toString());
props.setProperty("print.props", "false"); // don't print anything
- props.setProperty("directory", "RAMDirectory");
+ props.setProperty("directory", "ByteBuffersDirectory");
props.setProperty(AddIndexesTask.ADDINDEXES_INPUT_DIR, inputDir.toAbsolutePath().toString());
Config config = new Config(props);
return new PerfRunData(config);
@@ -74,7 +74,7 @@ public class AddIndexesTaskTest extends BenchmarkTestCase {
private void assertIndex(PerfRunData runData) throws Exception {
Directory taskDir = runData.getDirectory();
- assertSame(RAMDirectory.class, taskDir.getClass());
+ assertSame(ByteBuffersDirectory.class, taskDir.getClass());
IndexReader r = DirectoryReader.open(taskDir);
try {
assertEquals(10, r.numDocs());
diff --git a/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/tasks/CommitIndexTaskTest.java b/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/tasks/CommitIndexTaskTest.java
index f6e7faa43d9..c50ac10ea19 100644
--- a/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/tasks/CommitIndexTaskTest.java
+++ b/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/tasks/CommitIndexTaskTest.java
@@ -32,7 +32,7 @@ public class CommitIndexTaskTest extends BenchmarkTestCase {
Properties props = new Properties();
props.setProperty("writer.version", Version.LATEST.toString());
props.setProperty("print.props", "false"); // don't print anything
- props.setProperty("directory", "RAMDirectory");
+ props.setProperty("directory", "ByteBuffersDirectory");
Config config = new Config(props);
return new PerfRunData(config);
}
diff --git a/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/tasks/CreateIndexTaskTest.java b/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/tasks/CreateIndexTaskTest.java
index 90ae88ebf5b..9d323eb5323 100644
--- a/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/tasks/CreateIndexTaskTest.java
+++ b/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/tasks/CreateIndexTaskTest.java
@@ -40,7 +40,7 @@ public class CreateIndexTaskTest extends BenchmarkTestCase {
Properties props = new Properties();
props.setProperty("writer.version", Version.LATEST.toString());
props.setProperty("print.props", "false"); // don't print anything
- props.setProperty("directory", "RAMDirectory");
+ props.setProperty("directory", "ByteBuffersDirectory");
if (infoStreamValue != null) {
props.setProperty("writer.info.stream", infoStreamValue);
}
diff --git a/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/tasks/PerfTaskTest.java b/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/tasks/PerfTaskTest.java
index 09bda2245d6..74c54d280d4 100644
--- a/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/tasks/PerfTaskTest.java
+++ b/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/tasks/PerfTaskTest.java
@@ -50,7 +50,7 @@ public class PerfTaskTest extends BenchmarkTestCase {
if (setTaskLogStep) {
props.setProperty("log.step.MyPerf", Integer.toString(taskLogStepVal));
}
- props.setProperty("directory", "RAMDirectory"); // no accidental FS dir.
+ props.setProperty("directory", "ByteBuffersDirectory"); // no accidental FS dir.
Config config = new Config(props);
return new PerfRunData(config);
}
diff --git a/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/tasks/WriteEnwikiLineDocTaskTest.java b/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/tasks/WriteEnwikiLineDocTaskTest.java
index 117ed3ac4b1..de39bfe6b2a 100644
--- a/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/tasks/WriteEnwikiLineDocTaskTest.java
+++ b/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/tasks/WriteEnwikiLineDocTaskTest.java
@@ -58,7 +58,7 @@ public class WriteEnwikiLineDocTaskTest extends BenchmarkTestCase {
Properties props = new Properties();
props.setProperty("doc.maker", docMakerName);
props.setProperty("line.file.out", file.toAbsolutePath().toString());
- props.setProperty("directory", "RAMDirectory"); // no accidental FS dir.
+ props.setProperty("directory", "ByteBuffersDirectory"); // no accidental FS dir.
Config config = new Config(props);
return new PerfRunData(config);
}
diff --git a/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/tasks/WriteLineDocTaskTest.java b/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/tasks/WriteLineDocTaskTest.java
index 3b2d6259469..1536d21080b 100644
--- a/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/tasks/WriteLineDocTaskTest.java
+++ b/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/tasks/WriteLineDocTaskTest.java
@@ -142,7 +142,7 @@ public class WriteLineDocTaskTest extends BenchmarkTestCase {
Properties props = new Properties();
props.setProperty("doc.maker", docMakerName);
props.setProperty("line.file.out", file.toAbsolutePath().toString());
- props.setProperty("directory", "RAMDirectory"); // no accidental FS dir.
+ props.setProperty("directory", "ByteBuffersDirectory"); // no accidental FS dir.
if (allowEmptyDocs) {
props.setProperty("sufficient.fields", ",");
}
diff --git a/lucene/core/src/java/org/apache/lucene/store/ByteBuffersDirectory.java b/lucene/core/src/java/org/apache/lucene/store/ByteBuffersDirectory.java
index d1302dc7fd2..535d5582943 100644
--- a/lucene/core/src/java/org/apache/lucene/store/ByteBuffersDirectory.java
+++ b/lucene/core/src/java/org/apache/lucene/store/ByteBuffersDirectory.java
@@ -157,6 +157,12 @@ public final class ByteBuffersDirectory extends BaseDirectory {
return file.length();
}
+ public boolean fileExists(String name) {
+ ensureOpen();
+ FileEntry file = files.get(name);
+ return file != null;
+ }
+
@Override
public IndexOutput createOutput(String name, IOContext context) throws IOException {
ensureOpen();
diff --git a/lucene/core/src/java/org/apache/lucene/store/Directory.java b/lucene/core/src/java/org/apache/lucene/store/Directory.java
index 505be7fe0f1..7dcf7097af7 100644
--- a/lucene/core/src/java/org/apache/lucene/store/Directory.java
+++ b/lucene/core/src/java/org/apache/lucene/store/Directory.java
@@ -44,7 +44,7 @@ import org.apache.lucene.util.IOUtils;
*
*
* @see FSDirectory
- * @see RAMDirectory
+ * @see ByteBuffersDirectory
* @see FilterDirectory
*/
public abstract class Directory implements Closeable {
diff --git a/lucene/core/src/java/org/apache/lucene/store/NRTCachingDirectory.java b/lucene/core/src/java/org/apache/lucene/store/NRTCachingDirectory.java
index 0bbc5aedaad..ee8aac0e1a1 100644
--- a/lucene/core/src/java/org/apache/lucene/store/NRTCachingDirectory.java
+++ b/lucene/core/src/java/org/apache/lucene/store/NRTCachingDirectory.java
@@ -19,15 +19,16 @@ package org.apache.lucene.store;
import java.io.FileNotFoundException;
import java.io.IOException;
+import java.io.UncheckedIOException;
import java.nio.file.NoSuchFileException;
import java.util.Arrays;
import java.util.Collection;
-import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicLong;
import org.apache.lucene.util.Accountable;
-import org.apache.lucene.util.Accountables;
import org.apache.lucene.util.IOUtils;
// TODO
@@ -35,8 +36,7 @@ import org.apache.lucene.util.IOUtils;
// - rename to MergeCacheingDir? NRTCachingDir
/**
- * Wraps a {@link RAMDirectory}
- * around any provided delegate directory, to
+ * Wraps a RAM-resident directory around any provided delegate directory, to
* be used during NRT search.
*
* This class is likely only useful in a near-real-time
@@ -67,9 +67,24 @@ import org.apache.lucene.util.IOUtils;
*/
public class NRTCachingDirectory extends FilterDirectory implements Accountable {
+ private final AtomicBoolean closed = new AtomicBoolean(false);
- private final RAMDirectory cache = new RAMDirectory();
+ /**
+ * Current total size of files in the cache is maintained separately for faster access.
+ */
+ private final AtomicLong cacheSize = new AtomicLong();
+ /**
+ * RAM-resident directory that updates {@link #cacheSize} when files are successfully closed.
+ */
+ private final ByteBuffersDirectory cacheDirectory = new ByteBuffersDirectory(
+ new SingleInstanceLockFactory(),
+ ByteBuffersDataOutput::new,
+ (fileName, content) -> {
+ cacheSize.addAndGet(content.size());
+ return ByteBuffersDirectory.OUTPUT_AS_MANY_BUFFERS_LUCENE.apply(fileName, content);
+ }
+ );
private final long maxMergeSizeBytes;
private final long maxCachedBytes;
@@ -83,8 +98,8 @@ public class NRTCachingDirectory extends FilterDirectory implements Accountable
* {@code <= maxCachedMB} */
public NRTCachingDirectory(Directory delegate, double maxMergeSizeMB, double maxCachedMB) {
super(delegate);
- maxMergeSizeBytes = (long) (maxMergeSizeMB*1024*1024);
- maxCachedBytes = (long) (maxCachedMB*1024*1024);
+ maxMergeSizeBytes = (long) (maxMergeSizeMB * 1024 * 1024);
+ maxCachedBytes = (long) (maxCachedMB * 1024 * 1024);
}
@@ -96,10 +111,10 @@ public class NRTCachingDirectory extends FilterDirectory implements Accountable
@Override
public synchronized String[] listAll() throws IOException {
final Set files = new HashSet<>();
- for(String f : cache.listAll()) {
+ for (String f : cacheDirectory.listAll()) {
files.add(f);
}
- for(String f : in.listAll()) {
+ for (String f : in.listAll()) {
files.add(f);
}
String[] result = files.toArray(new String[files.size()]);
@@ -112,8 +127,8 @@ public class NRTCachingDirectory extends FilterDirectory implements Accountable
if (VERBOSE) {
System.out.println("nrtdir.deleteFile name=" + name);
}
- if (cache.fileNameExists(name)) {
- cache.deleteFile(name);
+ if (cacheDirectory.fileExists(name)) {
+ cacheDirectory.deleteFile(name);
} else {
in.deleteFile(name);
}
@@ -121,15 +136,19 @@ public class NRTCachingDirectory extends FilterDirectory implements Accountable
@Override
public synchronized long fileLength(String name) throws IOException {
- if (cache.fileNameExists(name)) {
- return cache.fileLength(name);
+ if (cacheDirectory.fileExists(name)) {
+ return cacheDirectory.fileLength(name);
} else {
return in.fileLength(name);
}
}
public String[] listCachedFiles() {
- return cache.listAll();
+ try {
+ return cacheDirectory.listAll();
+ } catch (IOException e) {
+ throw new UncheckedIOException(e);
+ }
}
@Override
@@ -141,7 +160,7 @@ public class NRTCachingDirectory extends FilterDirectory implements Accountable
if (VERBOSE) {
System.out.println(" to cache");
}
- return cache.createOutput(name, context);
+ return cacheDirectory.createOutput(name, context);
} else {
return in.createOutput(name, context);
}
@@ -161,7 +180,7 @@ public class NRTCachingDirectory extends FilterDirectory implements Accountable
@Override
public void rename(String source, String dest) throws IOException {
unCache(source);
- if (cache.fileNameExists(dest)) {
+ if (cacheDirectory.fileExists(dest)) {
throw new IllegalArgumentException("target file " + dest + " already exists");
}
in.rename(source, dest);
@@ -172,11 +191,11 @@ public class NRTCachingDirectory extends FilterDirectory implements Accountable
if (VERBOSE) {
System.out.println("nrtdir.openInput name=" + name);
}
- if (cache.fileNameExists(name)) {
+ if (cacheDirectory.fileExists(name)) {
if (VERBOSE) {
System.out.println(" from cache");
}
- return cache.openInput(name, context);
+ return cacheDirectory.openInput(name, context);
} else {
return in.openInput(name, context);
}
@@ -191,25 +210,20 @@ public class NRTCachingDirectory extends FilterDirectory implements Accountable
// it for defensive reasons... or in case the app is
// doing something custom (creating outputs directly w/o
// using IndexWriter):
- boolean success = false;
- try {
- if (cache.isOpen) {
- for(String fileName : cache.listAll()) {
- unCache(fileName);
- }
- }
- success = true;
- } finally {
- if (success) {
- IOUtils.close(cache, in);
- } else {
- IOUtils.closeWhileHandlingException(cache, in);
- }
- }
+ IOUtils.close(
+ () -> {
+ if (!closed.getAndSet(true)) {
+ for(String fileName : cacheDirectory.listAll()) {
+ unCache(fileName);
+ }
+ }
+ },
+ cacheDirectory,
+ in);
}
/** Subclass can override this to customize logic; return
- * true if this file should be written to the RAMDirectory. */
+ * true if this file should be written to the RAM-based cache first. */
protected boolean doCacheWrite(String name, IOContext context) {
//System.out.println(Thread.currentThread().getName() + ": CACHE check merge=" + merge + " size=" + (merge==null ? 0 : merge.estimatedMergeBytes));
@@ -220,7 +234,7 @@ public class NRTCachingDirectory extends FilterDirectory implements Accountable
bytes = context.flushInfo.estimatedSegmentSize;
}
- return (bytes <= maxMergeSizeBytes) && (bytes + cache.ramBytesUsed()) <= maxCachedBytes;
+ return (bytes <= maxMergeSizeBytes) && (bytes + cacheSize.get()) <= maxCachedBytes;
}
@Override
@@ -236,11 +250,11 @@ public class NRTCachingDirectory extends FilterDirectory implements Accountable
Directory first;
Directory second;
if (doCacheWrite(prefix, context)) {
- first = cache;
+ first = cacheDirectory;
second = in;
} else {
first = in;
- second = cache;
+ second = cacheDirectory;
}
IndexOutput out = null;
@@ -282,47 +296,27 @@ public class NRTCachingDirectory extends FilterDirectory implements Accountable
}
}
- private final Object uncacheLock = new Object();
-
private void unCache(String fileName) throws IOException {
- // Only let one thread uncache at a time; this only
- // happens during commit() or close():
- synchronized(uncacheLock) {
+ // Must sync here because other sync methods have
+ // if (cache.fileNameExists(name)) { ... } else { ... }:
+ synchronized (this) {
if (VERBOSE) {
System.out.println("nrtdir.unCache name=" + fileName);
}
- if (!cache.fileNameExists(fileName)) {
+ if (!cacheDirectory.fileExists(fileName)) {
// Another thread beat us...
return;
}
assert slowFileExists(in, fileName) == false: "fileName=" + fileName + " exists both in cache and in delegate";
- final IOContext context = IOContext.DEFAULT;
- final IndexOutput out = in.createOutput(fileName, context);
- IndexInput in = null;
- try {
- in = cache.openInput(fileName, context);
- out.copyBytes(in, in.length());
- } finally {
- IOUtils.close(in, out);
- }
-
- // Lock order: uncacheLock -> this
- synchronized(this) {
- // Must sync here because other sync methods have
- // if (cache.fileNameExists(name)) { ... } else { ... }:
- cache.deleteFile(fileName);
- }
+ in.copyFrom(cacheDirectory, fileName, fileName, IOContext.DEFAULT);
+ cacheSize.addAndGet(-cacheDirectory.fileLength(fileName));
+ cacheDirectory.deleteFile(fileName);
}
}
@Override
public long ramBytesUsed() {
- return cache.ramBytesUsed();
- }
-
- @Override
- public Collection getChildResources() {
- return Collections.singleton(Accountables.namedAccountable("cache", cache));
+ return cacheSize.get();
}
}
diff --git a/lucene/core/src/java/org/apache/lucene/store/RAMDirectory.java b/lucene/core/src/java/org/apache/lucene/store/RAMDirectory.java
deleted file mode 100644
index 8d2dcf34dc1..00000000000
--- a/lucene/core/src/java/org/apache/lucene/store/RAMDirectory.java
+++ /dev/null
@@ -1,259 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.store;
-
-
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.nio.file.FileAlreadyExistsException;
-import java.nio.file.Files;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.atomic.AtomicLong;
-
-import org.apache.lucene.index.IndexFileNames;
-import org.apache.lucene.util.Accountable;
-import org.apache.lucene.util.Accountables;
-
-/**
- * A memory-resident {@link Directory} implementation. Locking
- * implementation is by default the {@link SingleInstanceLockFactory}.
- *
- * Warning: This class is not intended to work with huge
- * indexes. Everything beyond several hundred megabytes will waste
- * resources (GC cycles), because it uses an internal buffer size
- * of 1024 bytes, producing millions of {@code byte[1024]} arrays.
- * This class is optimized for small memory-resident indexes.
- * It also has bad concurrency on multithreaded environments.
- *
- *
It is recommended to materialize large indexes on disk and use
- * {@link MMapDirectory}, which is a high-performance directory
- * implementation working directly on the file system cache of the
- * operating system, so copying data to Java heap space is not useful.
- *
- * @deprecated This class uses inefficient synchronization and is discouraged
- * in favor of {@link MMapDirectory}. It will be removed in future versions
- * of Lucene.
- */
-@Deprecated
-public class RAMDirectory extends BaseDirectory implements Accountable {
- protected final Map fileMap = new ConcurrentHashMap<>();
- protected final AtomicLong sizeInBytes = new AtomicLong();
-
- /** Used to generate temp file names in {@link #createTempOutput}. */
- private final AtomicLong nextTempFileCounter = new AtomicLong();
-
- /** Constructs an empty {@link Directory}. */
- public RAMDirectory() {
- this(new SingleInstanceLockFactory());
- }
-
- /** Constructs an empty {@link Directory} with the given {@link LockFactory}. */
- public RAMDirectory(LockFactory lockFactory) {
- super(lockFactory);
- }
-
- /**
- * Creates a new RAMDirectory
instance from a different
- * Directory
implementation. This can be used to load
- * a disk-based index into memory.
- *
- * Warning: This class is not intended to work with huge
- * indexes. Everything beyond several hundred megabytes will waste
- * resources (GC cycles), because it uses an internal buffer size
- * of 1024 bytes, producing millions of {@code byte[1024]} arrays.
- * This class is optimized for small memory-resident indexes.
- * It also has bad concurrency on multithreaded environments.
- *
- *
For disk-based indexes it is recommended to use
- * {@link MMapDirectory}, which is a high-performance directory
- * implementation working directly on the file system cache of the
- * operating system, so copying data to Java heap space is not useful.
- *
- *
Note that the resulting RAMDirectory
instance is fully
- * independent from the original Directory
(it is a
- * complete copy). Any subsequent changes to the
- * original Directory
will not be visible in the
- * RAMDirectory
instance.
- *
- * @param dir a Directory
value
- * @exception IOException if an error occurs
- */
- public RAMDirectory(FSDirectory dir, IOContext context) throws IOException {
- this(dir, false, context);
- }
-
- private RAMDirectory(FSDirectory dir, boolean closeDir, IOContext context) throws IOException {
- this();
- for (String file : dir.listAll()) {
- if (!Files.isDirectory(dir.getDirectory().resolve(file))) {
- copyFrom(dir, file, file, context);
- }
- }
- if (closeDir) {
- dir.close();
- }
- }
-
- @Override
- public final String[] listAll() {
- ensureOpen();
- // NOTE: this returns a "weakly consistent view". Unless we change Dir API, keep this,
- // and do not synchronize or anything stronger. it's great for testing!
- // NOTE: fileMap.keySet().toArray(new String[0]) is broken in non Sun JDKs,
- // and the code below is resilient to map changes during the array population.
- // NOTE: don't replace this with return names.toArray(new String[names.size()]);
- // or some files could be null at the end of the array if files are being deleted
- // concurrently
- Set fileNames = fileMap.keySet();
- List names = new ArrayList<>(fileNames.size());
- for (String name : fileNames) {
- names.add(name);
- }
- String[] namesArray = names.toArray(new String[names.size()]);
- Arrays.sort(namesArray);
- return namesArray;
- }
-
- public final boolean fileNameExists(String name) {
- ensureOpen();
- return fileMap.containsKey(name);
- }
-
- /** Returns the length in bytes of a file in the directory.
- * @throws IOException if the file does not exist
- */
- @Override
- public final long fileLength(String name) throws IOException {
- ensureOpen();
- RAMFile file = fileMap.get(name);
- if (file == null) {
- throw new FileNotFoundException(name);
- }
- return file.getLength();
- }
-
- /**
- * Return total size in bytes of all files in this directory. This is
- * currently quantized to RAMOutputStream.BUFFER_SIZE.
- */
- @Override
- public final long ramBytesUsed() {
- ensureOpen();
- return sizeInBytes.get();
- }
-
- @Override
- public Collection getChildResources() {
- return Accountables.namedAccountables("file", fileMap);
- }
-
- @Override
- public void deleteFile(String name) throws IOException {
- ensureOpen();
- RAMFile file = fileMap.remove(name);
- if (file != null) {
- file.directory = null;
- sizeInBytes.addAndGet(-file.sizeInBytes);
- } else {
- throw new FileNotFoundException(name);
- }
- }
-
- @Override
- public IndexOutput createOutput(String name, IOContext context) throws IOException {
- ensureOpen();
- RAMFile file = newRAMFile();
- if (fileMap.putIfAbsent(name, file) != null) {
- throw new FileAlreadyExistsException(name);
- }
- return new RAMOutputStream(name, file, true);
- }
-
- @Override
- public IndexOutput createTempOutput(String prefix, String suffix, IOContext context) throws IOException {
- ensureOpen();
-
- // Make the file first...
- RAMFile file = newRAMFile();
-
- // ... then try to find a unique name for it:
- while (true) {
- String name = IndexFileNames.segmentFileName(prefix, suffix + "_" + Long.toString(nextTempFileCounter.getAndIncrement(), Character.MAX_RADIX), "tmp");
- if (fileMap.putIfAbsent(name, file) == null) {
- return new RAMOutputStream(name, file, true);
- }
- }
- }
-
- /**
- * Returns a new {@link RAMFile} for storing data. This method can be
- * overridden to return different {@link RAMFile} impls, that e.g. override
- * {@link RAMFile#newBuffer(int)}.
- */
- protected RAMFile newRAMFile() {
- return new RAMFile(this);
- }
-
- @Override
- public void sync(Collection names) throws IOException {
- }
-
- @Override
- public void rename(String source, String dest) throws IOException {
- ensureOpen();
- RAMFile file = fileMap.get(source);
- if (file == null) {
- throw new FileNotFoundException(source);
- }
- if (fileMap.putIfAbsent(dest, file) != null) {
- throw new FileAlreadyExistsException(dest);
- }
- if (!fileMap.remove(source, file)) {
- throw new IllegalStateException("file was unexpectedly replaced: " + source);
- }
- fileMap.remove(source);
- }
-
- @Override
- public void syncMetaData() throws IOException {
- // we are by definition not durable!
- }
-
- /** Returns a stream reading an existing file. */
- @Override
- public IndexInput openInput(String name, IOContext context) throws IOException {
- ensureOpen();
- RAMFile file = fileMap.get(name);
- if (file == null) {
- throw new FileNotFoundException(name);
- }
- return new RAMInputStream(name, file);
- }
-
- /** Closes the store to future operations, releasing associated memory. */
- @Override
- public void close() {
- isOpen = false;
- fileMap.clear();
- }
-}
diff --git a/lucene/core/src/java/org/apache/lucene/store/RAMFile.java b/lucene/core/src/java/org/apache/lucene/store/RAMFile.java
deleted file mode 100644
index d4fd014e0a9..00000000000
--- a/lucene/core/src/java/org/apache/lucene/store/RAMFile.java
+++ /dev/null
@@ -1,123 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.store;
-
-
-import java.util.ArrayList;
-import java.util.Arrays;
-
-import org.apache.lucene.util.Accountable;
-
-/**
- * Represents a file in RAM as a list of byte[] buffers.
- *
- * @lucene.internal
- * @deprecated This class uses inefficient synchronization and is discouraged
- * in favor of {@link MMapDirectory}. It will be removed in future versions
- * of Lucene.
- */
-@Deprecated
-public class RAMFile implements Accountable {
- protected final ArrayList buffers = new ArrayList<>();
- long length;
- RAMDirectory directory;
- protected long sizeInBytes;
-
- // File used as buffer, in no RAMDirectory
- public RAMFile() {}
-
- RAMFile(RAMDirectory directory) {
- this.directory = directory;
- }
-
- // For non-stream access from thread that might be concurrent with writing
- public synchronized long getLength() {
- return length;
- }
-
- protected synchronized void setLength(long length) {
- this.length = length;
- }
-
- protected final byte[] addBuffer(int size) {
- byte[] buffer = newBuffer(size);
- synchronized(this) {
- buffers.add(buffer);
- sizeInBytes += size;
- }
-
- if (directory != null) {
- directory.sizeInBytes.getAndAdd(size);
- }
- return buffer;
- }
-
- protected final synchronized byte[] getBuffer(int index) {
- return buffers.get(index);
- }
-
- protected final synchronized int numBuffers() {
- return buffers.size();
- }
-
- /**
- * Expert: allocate a new buffer.
- * Subclasses can allocate differently.
- * @param size size of allocated buffer.
- * @return allocated buffer.
- */
- protected byte[] newBuffer(int size) {
- return new byte[size];
- }
-
- @Override
- public synchronized long ramBytesUsed() {
- return sizeInBytes;
- }
-
- @Override
- public String toString() {
- return getClass().getSimpleName() + "(length=" + length + ")";
- }
-
- @Override
- public int hashCode() {
- int h = (int) (length ^ (length >>> 32));
- for (byte[] block : buffers) {
- h = 31 * h + Arrays.hashCode(block);
- }
- return h;
- }
-
- @Override
- public boolean equals(Object obj) {
- if (this == obj) return true;
- if (obj == null) return false;
- if (getClass() != obj.getClass()) return false;
- RAMFile other = (RAMFile) obj;
- if (length != other.length) return false;
- if (buffers.size() != other.buffers.size()) {
- return false;
- }
- for (int i = 0; i < buffers.size(); i++) {
- if (!Arrays.equals(buffers.get(i), other.buffers.get(i))) {
- return false;
- }
- }
- return true;
- }
-}
diff --git a/lucene/core/src/java/org/apache/lucene/store/RAMInputStream.java b/lucene/core/src/java/org/apache/lucene/store/RAMInputStream.java
deleted file mode 100644
index 312d6383cd8..00000000000
--- a/lucene/core/src/java/org/apache/lucene/store/RAMInputStream.java
+++ /dev/null
@@ -1,182 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.store;
-
-
-import java.io.EOFException;
-import java.io.IOException;
-
-import static org.apache.lucene.store.RAMOutputStream.BUFFER_SIZE;
-
-/**
- * A memory-resident {@link IndexInput} implementation.
- *
- * @lucene.internal
- * @deprecated This class uses inefficient synchronization and is discouraged
- * in favor of {@link MMapDirectory}. It will be removed in future versions
- * of Lucene.
- */
-@Deprecated
-public class RAMInputStream extends IndexInput implements Cloneable {
-
- private final RAMFile file;
- private final long length;
-
- private byte[] currentBuffer;
- private int currentBufferIndex;
-
- private int bufferPosition;
- private int bufferLength;
-
- public RAMInputStream(String name, RAMFile f) throws IOException {
- this(name, f, f.length);
- }
-
- RAMInputStream(String name, RAMFile f, long length) throws IOException {
- super("RAMInputStream(name=" + name + ")");
- this.file = f;
- this.length = length;
- if (length/BUFFER_SIZE >= Integer.MAX_VALUE) {
- throw new IOException("RAMInputStream too large length=" + length + ": " + name);
- }
-
- setCurrentBuffer();
- }
-
- @Override
- public void close() {
- // nothing to do here
- }
-
- @Override
- public long length() {
- return length;
- }
-
- @Override
- public byte readByte() throws IOException {
- if (bufferPosition == bufferLength) {
- nextBuffer();
- }
- if (currentBuffer == null) {
- throw new EOFException();
- } else {
- return currentBuffer[bufferPosition++];
- }
- }
-
- @Override
- public void readBytes(byte[] b, int offset, int len) throws IOException {
- while (len > 0) {
- if (bufferPosition == bufferLength) {
- nextBuffer();
- }
-
- if (currentBuffer == null) {
- throw new EOFException();
- }
-
- int remainInBuffer = bufferLength - bufferPosition;
- int bytesToCopy = len < remainInBuffer ? len : remainInBuffer;
- System.arraycopy(currentBuffer, bufferPosition, b, offset, bytesToCopy);
- offset += bytesToCopy;
- len -= bytesToCopy;
- bufferPosition += bytesToCopy;
- }
- }
-
- @Override
- public long getFilePointer() {
- return (long) currentBufferIndex * BUFFER_SIZE + bufferPosition;
- }
-
- @Override
- public void seek(long pos) throws IOException {
- int newBufferIndex = (int) (pos / BUFFER_SIZE);
-
- if (newBufferIndex != currentBufferIndex) {
- // we seek'd to a different buffer:
- currentBufferIndex = newBufferIndex;
- setCurrentBuffer();
- }
-
- bufferPosition = (int) (pos % BUFFER_SIZE);
-
- // This is not >= because seeking to exact end of file is OK: this is where
- // you'd also be if you did a readBytes of all bytes in the file
- if (getFilePointer() > length()) {
- throw new EOFException("seek beyond EOF: pos=" + getFilePointer() + " vs length=" + length() + ": " + this);
- }
- }
-
- private void nextBuffer() throws IOException {
- // This is >= because we are called when there is at least 1 more byte to read:
- if (getFilePointer() >= length()) {
- throw new EOFException("cannot read another byte at EOF: pos=" + getFilePointer() + " vs length=" + length() + ": " + this);
- }
- currentBufferIndex++;
- setCurrentBuffer();
- assert currentBuffer != null;
- bufferPosition = 0;
- }
-
- private final void setCurrentBuffer() throws IOException {
- if (currentBufferIndex < file.numBuffers()) {
- currentBuffer = file.getBuffer(currentBufferIndex);
- assert currentBuffer != null;
- long bufferStart = (long) BUFFER_SIZE * (long) currentBufferIndex;
- bufferLength = (int) Math.min(BUFFER_SIZE, length - bufferStart);
- } else {
- currentBuffer = null;
- }
- }
-
- @Override
- public IndexInput slice(String sliceDescription, final long offset, final long sliceLength) throws IOException {
- if (offset < 0 || sliceLength < 0 || offset + sliceLength > this.length) {
- throw new IllegalArgumentException("slice() " + sliceDescription + " out of bounds: " + this);
- }
- return new RAMInputStream(getFullSliceDescription(sliceDescription), file, offset + sliceLength) {
- {
- seek(0L);
- }
-
- @Override
- public void seek(long pos) throws IOException {
- if (pos < 0L) {
- throw new IllegalArgumentException("Seeking to negative position: " + this);
- }
- super.seek(pos + offset);
- }
-
- @Override
- public long getFilePointer() {
- return super.getFilePointer() - offset;
- }
-
- @Override
- public long length() {
- return sliceLength;
- }
-
- @Override
- public IndexInput slice(String sliceDescription, long ofs, long len) throws IOException {
- return super.slice(sliceDescription, offset + ofs, len);
- }
- };
- }
-}
diff --git a/lucene/core/src/java/org/apache/lucene/store/RAMOutputStream.java b/lucene/core/src/java/org/apache/lucene/store/RAMOutputStream.java
deleted file mode 100644
index 02dc8439693..00000000000
--- a/lucene/core/src/java/org/apache/lucene/store/RAMOutputStream.java
+++ /dev/null
@@ -1,213 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.store;
-
-
-import java.io.IOException;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.zip.CRC32;
-import java.util.zip.Checksum;
-
-import org.apache.lucene.util.Accountable;
-import org.apache.lucene.util.Accountables;
-
-/**
- * A memory-resident {@link IndexOutput} implementation.
- *
- * @lucene.internal
- * @deprecated This class uses inefficient synchronization and is discouraged
- * in favor of {@link MMapDirectory}. It will be removed in future versions
- * of Lucene.
- */
-@Deprecated
-public class RAMOutputStream extends IndexOutput implements Accountable {
- static final int BUFFER_SIZE = 1024;
-
- private final RAMFile file;
-
- private byte[] currentBuffer;
- private int currentBufferIndex;
-
- private int bufferPosition;
- private long bufferStart;
- private int bufferLength;
-
- private final Checksum crc;
-
- /** Construct an empty output buffer. */
- public RAMOutputStream() {
- this("noname", new RAMFile(), false);
- }
-
- /** Creates this, with no name. */
- public RAMOutputStream(RAMFile f, boolean checksum) {
- this("noname", f, checksum);
- }
-
- /** Creates this, with specified name. */
- public RAMOutputStream(String name, RAMFile f, boolean checksum) {
- super("RAMOutputStream(name=\"" + name + "\")", name);
- file = f;
-
- // make sure that we switch to the
- // first needed buffer lazily
- currentBufferIndex = -1;
- currentBuffer = null;
- if (checksum) {
- crc = new BufferedChecksum(new CRC32());
- } else {
- crc = null;
- }
- }
-
- /** Copy the current contents of this buffer to the provided {@link DataOutput}. */
- public void writeTo(DataOutput out) throws IOException {
- flush();
- final long end = file.length;
- long pos = 0;
- int buffer = 0;
- while (pos < end) {
- int length = BUFFER_SIZE;
- long nextPos = pos + length;
- if (nextPos > end) { // at the last buffer
- length = (int)(end - pos);
- }
- out.writeBytes(file.getBuffer(buffer++), length);
- pos = nextPos;
- }
- }
-
- /** Copy the current contents of this buffer to output
- * byte array */
- public void writeTo(byte[] bytes, int offset) throws IOException {
- flush();
- final long end = file.length;
- long pos = 0;
- int buffer = 0;
- int bytesUpto = offset;
- while (pos < end) {
- int length = BUFFER_SIZE;
- long nextPos = pos + length;
- if (nextPos > end) { // at the last buffer
- length = (int)(end - pos);
- }
- System.arraycopy(file.getBuffer(buffer++), 0, bytes, bytesUpto, length);
- bytesUpto += length;
- pos = nextPos;
- }
- }
-
- /** Resets this to an empty file. */
- public void reset() {
- currentBuffer = null;
- currentBufferIndex = -1;
- bufferPosition = 0;
- bufferStart = 0;
- bufferLength = 0;
- file.setLength(0);
- if (crc != null) {
- crc.reset();
- }
- }
-
- @Override
- public void close() throws IOException {
- flush();
- }
-
- @Override
- public void writeByte(byte b) throws IOException {
- if (bufferPosition == bufferLength) {
- currentBufferIndex++;
- switchCurrentBuffer();
- }
- if (crc != null) {
- crc.update(b);
- }
- currentBuffer[bufferPosition++] = b;
- }
-
- @Override
- public void writeBytes(byte[] b, int offset, int len) throws IOException {
- assert b != null;
- if (crc != null) {
- crc.update(b, offset, len);
- }
- while (len > 0) {
- if (bufferPosition == bufferLength) {
- currentBufferIndex++;
- switchCurrentBuffer();
- }
-
- int remainInBuffer = currentBuffer.length - bufferPosition;
- int bytesToCopy = len < remainInBuffer ? len : remainInBuffer;
- System.arraycopy(b, offset, currentBuffer, bufferPosition, bytesToCopy);
- offset += bytesToCopy;
- len -= bytesToCopy;
- bufferPosition += bytesToCopy;
- }
- }
-
- private final void switchCurrentBuffer() {
- if (currentBufferIndex == file.numBuffers()) {
- currentBuffer = file.addBuffer(BUFFER_SIZE);
- } else {
- currentBuffer = file.getBuffer(currentBufferIndex);
- }
- bufferPosition = 0;
- bufferStart = (long) BUFFER_SIZE * (long) currentBufferIndex;
- bufferLength = currentBuffer.length;
- }
-
- private void setFileLength() {
- long pointer = bufferStart + bufferPosition;
- if (pointer > file.length) {
- file.setLength(pointer);
- }
- }
-
- /** Forces any buffered output to be written. */
- protected void flush() throws IOException {
- setFileLength();
- }
-
- @Override
- public long getFilePointer() {
- return currentBufferIndex < 0 ? 0 : bufferStart + bufferPosition;
- }
-
- /** Returns byte usage of all buffers. */
- @Override
- public long ramBytesUsed() {
- return (long) file.numBuffers() * (long) BUFFER_SIZE;
- }
-
- @Override
- public Collection getChildResources() {
- return Collections.singleton(Accountables.namedAccountable("file", file));
- }
-
- @Override
- public long getChecksum() throws IOException {
- if (crc == null) {
- throw new IllegalStateException("internal RAMOutputStream created with checksum disabled");
- } else {
- return crc.getValue();
- }
- }
-}
diff --git a/lucene/core/src/java/org/apache/lucene/store/SingleInstanceLockFactory.java b/lucene/core/src/java/org/apache/lucene/store/SingleInstanceLockFactory.java
index 6bfcb0a92d0..3ab2a683bd7 100644
--- a/lucene/core/src/java/org/apache/lucene/store/SingleInstanceLockFactory.java
+++ b/lucene/core/src/java/org/apache/lucene/store/SingleInstanceLockFactory.java
@@ -25,8 +25,7 @@ import java.util.HashSet;
* meaning all locking will take place through this one instance.
* Only use this {@link LockFactory} when you are certain all
* IndexWriters for a given index are running
- * against a single shared in-process Directory instance. This is
- * currently the default locking for RAMDirectory.
+ * against a single shared in-process Directory instance.
*
* @see LockFactory
*/
diff --git a/lucene/core/src/java/org/apache/lucene/util/IOUtils.java b/lucene/core/src/java/org/apache/lucene/util/IOUtils.java
index ba0be427396..c1de6dfe082 100644
--- a/lucene/core/src/java/org/apache/lucene/util/IOUtils.java
+++ b/lucene/core/src/java/org/apache/lucene/util/IOUtils.java
@@ -46,7 +46,6 @@ import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.store.FileSwitchDirectory;
import org.apache.lucene.store.FilterDirectory;
-import org.apache.lucene.store.RAMDirectory;
/** This class emulates the new Java 7 "Try-With-Resources" statement.
* Remove once Lucene is on Java 7.
@@ -487,7 +486,7 @@ public final class IOUtils {
FileSwitchDirectory fsd = (FileSwitchDirectory) dir;
// Spinning is contagious:
return spins(fsd.getPrimaryDir()) || spins(fsd.getSecondaryDir());
- } else if (dir instanceof RAMDirectory || dir instanceof ByteBuffersDirectory) {
+ } else if (dir instanceof ByteBuffersDirectory) {
return false;
} else if (dir instanceof FSDirectory) {
return spins(((FSDirectory) dir).getDirectory());
diff --git a/lucene/core/src/test/org/apache/lucene/TestMergeSchedulerExternal.java b/lucene/core/src/test/org/apache/lucene/TestMergeSchedulerExternal.java
index 307a1a71d41..ef82685bece 100644
--- a/lucene/core/src/test/org/apache/lucene/TestMergeSchedulerExternal.java
+++ b/lucene/core/src/test/org/apache/lucene/TestMergeSchedulerExternal.java
@@ -34,9 +34,9 @@ import org.apache.lucene.index.MergePolicy.OneMerge;
import org.apache.lucene.index.MergePolicy;
import org.apache.lucene.index.MergeScheduler;
import org.apache.lucene.index.MergeTrigger;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.MockDirectoryWrapper;
-import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.InfoStream;
import org.apache.lucene.util.LuceneTestCase;
@@ -175,7 +175,7 @@ public class TestMergeSchedulerExternal extends LuceneTestCase {
// we don't really need to execute anything, just to make sure the custom MS
// compiles. But ensure that it can be used as well, e.g., no other hidden
// dependencies or something. Therefore, don't use any random API !
- Directory dir = new RAMDirectory();
+ Directory dir = new ByteBuffersDirectory();
IndexWriterConfig conf = new IndexWriterConfig(null);
conf.setMergeScheduler(new ReportingMergeScheduler());
IndexWriter writer = new IndexWriter(dir, conf);
@@ -187,5 +187,4 @@ public class TestMergeSchedulerExternal extends LuceneTestCase {
writer.close();
dir.close();
}
-
}
diff --git a/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestForUtil.java b/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestForUtil.java
index e13645fbfad..bd0681029d1 100644
--- a/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestForUtil.java
+++ b/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestForUtil.java
@@ -23,11 +23,11 @@ import static org.apache.lucene.codecs.lucene50.ForUtil.MAX_ENCODED_SIZE;
import java.io.IOException;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
-import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.packed.PackedInts;
@@ -55,7 +55,7 @@ public class TestForUtil extends LuceneTestCase {
}
}
- final Directory d = new RAMDirectory();
+ final Directory d = new ByteBuffersDirectory();
final long endPointer;
{
diff --git a/lucene/core/src/test/org/apache/lucene/codecs/lucene80/TestLucene80DocValuesFormat.java b/lucene/core/src/test/org/apache/lucene/codecs/lucene80/TestLucene80DocValuesFormat.java
index 943f3cf9af3..dcb398ac88e 100644
--- a/lucene/core/src/test/org/apache/lucene/codecs/lucene80/TestLucene80DocValuesFormat.java
+++ b/lucene/core/src/test/org/apache/lucene/codecs/lucene80/TestLucene80DocValuesFormat.java
@@ -61,10 +61,9 @@ import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.index.TermsEnum.SeekStatus;
+import org.apache.lucene.store.ByteBuffersDataInput;
+import org.apache.lucene.store.ByteBuffersDataOutput;
import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.RAMFile;
-import org.apache.lucene.store.RAMInputStream;
-import org.apache.lucene.store.RAMOutputStream;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.TestUtil;
@@ -445,8 +444,7 @@ public class TestLucene80DocValuesFormat extends BaseCompressingDocValuesFormatT
for (int maxDoc = frontier - 1; maxDoc <= frontier + 1; ++maxDoc) {
final Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setMergePolicy(newLogMergePolicy()));
- RAMFile buffer = new RAMFile();
- RAMOutputStream out = new RAMOutputStream(buffer, false);
+ ByteBuffersDataOutput out = new ByteBuffersDataOutput();
Document doc = new Document();
SortedSetDocValuesField field1 = new SortedSetDocValuesField("sset", new BytesRef());
doc.add(field1);
@@ -465,7 +463,7 @@ public class TestLucene80DocValuesFormat extends BaseCompressingDocValuesFormatT
out.writeBytes(ref.bytes, ref.offset, ref.length);
}
}
- out.close();
+
w.forceMerge(1);
DirectoryReader r = DirectoryReader.open(w);
w.close();
@@ -473,21 +471,20 @@ public class TestLucene80DocValuesFormat extends BaseCompressingDocValuesFormatT
assertEquals(maxDoc, sr.maxDoc());
SortedSetDocValues values = sr.getSortedSetDocValues("sset");
assertNotNull(values);
- try (RAMInputStream in = new RAMInputStream("", buffer)) {
- BytesRefBuilder b = new BytesRefBuilder();
- for (int i = 0; i < maxDoc; ++i) {
- assertEquals(i, values.nextDoc());
- final int numValues = in.readVInt();
+ ByteBuffersDataInput in = out.toDataInput();
+ BytesRefBuilder b = new BytesRefBuilder();
+ for (int i = 0; i < maxDoc; ++i) {
+ assertEquals(i, values.nextDoc());
+ final int numValues = in.readVInt();
- for (int j = 0; j < numValues; ++j) {
- b.setLength(in.readVInt());
- b.grow(b.length());
- in.readBytes(b.bytes(), 0, b.length());
- assertEquals(b.get(), values.lookupOrd(values.nextOrd()));
- }
-
- assertEquals(SortedSetDocValues.NO_MORE_ORDS, values.nextOrd());
+ for (int j = 0; j < numValues; ++j) {
+ b.setLength(in.readVInt());
+ b.grow(b.length());
+ in.readBytes(b.bytes(), 0, b.length());
+ assertEquals(b.get(), values.lookupOrd(values.nextOrd()));
}
+
+ assertEquals(SortedSetDocValues.NO_MORE_ORDS, values.nextOrd());
}
r.close();
dir.close();
@@ -500,8 +497,8 @@ public class TestLucene80DocValuesFormat extends BaseCompressingDocValuesFormatT
for (int maxDoc = frontier - 1; maxDoc <= frontier + 1; ++maxDoc) {
final Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setMergePolicy(newLogMergePolicy()));
- RAMFile buffer = new RAMFile();
- RAMOutputStream out = new RAMOutputStream(buffer, false);
+ ByteBuffersDataOutput buffer = new ByteBuffersDataOutput();
+
Document doc = new Document();
SortedNumericDocValuesField field1 = new SortedNumericDocValuesField("snum", 0L);
doc.add(field1);
@@ -513,10 +510,10 @@ public class TestLucene80DocValuesFormat extends BaseCompressingDocValuesFormatT
field1.setLongValue(s1);
field2.setLongValue(s2);
w.addDocument(doc);
- out.writeVLong(Math.min(s1, s2));
- out.writeVLong(Math.max(s1, s2));
+ buffer.writeVLong(Math.min(s1, s2));
+ buffer.writeVLong(Math.max(s1, s2));
}
- out.close();
+
w.forceMerge(1);
DirectoryReader r = DirectoryReader.open(w);
w.close();
@@ -524,13 +521,12 @@ public class TestLucene80DocValuesFormat extends BaseCompressingDocValuesFormatT
assertEquals(maxDoc, sr.maxDoc());
SortedNumericDocValues values = sr.getSortedNumericDocValues("snum");
assertNotNull(values);
- try (RAMInputStream in = new RAMInputStream("", buffer)) {
- for (int i = 0; i < maxDoc; ++i) {
- assertEquals(i, values.nextDoc());
- assertEquals(2, values.docValueCount());
- assertEquals(in.readVLong(), values.nextValue());
- assertEquals(in.readVLong(), values.nextValue());
- }
+ ByteBuffersDataInput dataInput = buffer.toDataInput();
+ for (int i = 0; i < maxDoc; ++i) {
+ assertEquals(i, values.nextDoc());
+ assertEquals(2, values.docValueCount());
+ assertEquals(dataInput.readVLong(), values.nextValue());
+ assertEquals(dataInput.readVLong(), values.nextValue());
}
r.close();
dir.close();
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestAddIndexes.java b/lucene/core/src/test/org/apache/lucene/index/TestAddIndexes.java
index 7e5f0087a09..5f2d8b2684f 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestAddIndexes.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestAddIndexes.java
@@ -43,10 +43,11 @@ import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.BaseDirectoryWrapper;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.LockObtainFailedException;
import org.apache.lucene.store.MockDirectoryWrapper;
-import org.apache.lucene.store.RAMDirectory;
+
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
@@ -669,7 +670,7 @@ public class TestAddIndexes extends LuceneTestCase {
public RunAddIndexesThreads(int numCopy) throws Throwable {
NUM_COPY = numCopy;
- dir = new MockDirectoryWrapper(random(), new RAMDirectory());
+ dir = new MockDirectoryWrapper(random(), new ByteBuffersDirectory());
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2));
for (int i = 0; i < NUM_INIT_DOCS; i++)
@@ -1103,7 +1104,7 @@ public class TestAddIndexes extends LuceneTestCase {
public void testNonCFSLeftovers() throws Exception {
Directory[] dirs = new Directory[2];
for (int i = 0; i < dirs.length; i++) {
- dirs[i] = new RAMDirectory();
+ dirs[i] = new ByteBuffersDirectory();
IndexWriter w = new IndexWriter(dirs[i], new IndexWriterConfig(new MockAnalyzer(random())));
Document d = new Document();
FieldType customType = new FieldType(TextField.TYPE_STORED);
@@ -1115,7 +1116,7 @@ public class TestAddIndexes extends LuceneTestCase {
DirectoryReader[] readers = new DirectoryReader[] { DirectoryReader.open(dirs[0]), DirectoryReader.open(dirs[1]) };
- MockDirectoryWrapper dir = new MockDirectoryWrapper(random(), new RAMDirectory());
+ MockDirectoryWrapper dir = new MockDirectoryWrapper(random(), new ByteBuffersDirectory());
IndexWriterConfig conf = new IndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy(true));
MergePolicy lmp = conf.getMergePolicy();
// Force creation of CFS:
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestAllFilesCheckIndexHeader.java b/lucene/core/src/test/org/apache/lucene/index/TestAllFilesCheckIndexHeader.java
index b4c63686bfe..6ef0fcf28c4 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestAllFilesCheckIndexHeader.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestAllFilesCheckIndexHeader.java
@@ -24,12 +24,12 @@ import java.util.Collections;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.store.BaseDirectoryWrapper;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.MockDirectoryWrapper;
-import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.LineFileDocs;
import org.apache.lucene.util.LuceneTestCase.SuppressFileSystems;
import org.apache.lucene.util.LuceneTestCase;
@@ -87,7 +87,7 @@ public class TestAllFilesCheckIndexHeader extends LuceneTestCase {
private void checkOneFile(Directory dir, String victim) throws IOException {
// use ramdir explicit, as we do evil things like try to generate broken files, deletes must work.
- try (BaseDirectoryWrapper dirCopy = new MockDirectoryWrapper(random(), new RAMDirectory())) {
+ try (BaseDirectoryWrapper dirCopy = new MockDirectoryWrapper(random(), new ByteBuffersDirectory())) {
dirCopy.setCheckIndexOnClose(false);
long victimLength = dir.fileLength(victim);
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestAtomicUpdate.java b/lucene/core/src/test/org/apache/lucene/index/TestAtomicUpdate.java
index 930daeea117..08fdffdfac3 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestAtomicUpdate.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestAtomicUpdate.java
@@ -163,19 +163,16 @@ public class TestAtomicUpdate extends LuceneTestCase {
//System.out.println("Searcher 2: " + searcherThread2.count + " searchers created");
}
- /*
- Run above stress test against RAMDirectory and then
- FSDirectory.
- */
+ /* */
public void testAtomicUpdates() throws Exception {
Directory directory;
- // First in a RAM directory:
- directory = new MockDirectoryWrapper(random(), new RAMDirectory());
+ // run against a random directory.
+ directory = new MockDirectoryWrapper(random(), new ByteBuffersDirectory());
runTest(directory);
directory.close();
- // Second in an FSDirectory:
+ // then against an FSDirectory.
Path dirPath = createTempDir("lucene.test.atomic");
directory = newFSDirectory(dirPath);
runTest(directory);
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDirectoryReaderReopen.java b/lucene/core/src/test/org/apache/lucene/index/TestDirectoryReaderReopen.java
index 111c6832e63..7a7a160d453 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestDirectoryReaderReopen.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestDirectoryReaderReopen.java
@@ -39,10 +39,10 @@ import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.store.MockDirectoryWrapper.FakeIOException;
-import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
@@ -688,7 +688,7 @@ public class TestDirectoryReaderReopen extends LuceneTestCase {
}
public void testNPEAfterInvalidReindex1() throws Exception {
- Directory dir = new RAMDirectory();
+ Directory dir = new ByteBuffersDirectory();
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE));
Document doc = new Document();
@@ -735,7 +735,7 @@ public class TestDirectoryReaderReopen extends LuceneTestCase {
}
public void testNPEAfterInvalidReindex2() throws Exception {
- Directory dir = new RAMDirectory();
+ Directory dir = new ByteBuffersDirectory();
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE));
Document doc = new Document();
@@ -974,7 +974,7 @@ public class TestDirectoryReaderReopen extends LuceneTestCase {
// LUCENE-5931: we make a "best effort" to catch this abuse and throw a clear(er)
// exception than what would otherwise look like hard to explain index corruption during searching
public void testDeleteIndexFilesWhileReaderStillOpen() throws Exception {
- RAMDirectory dir = new RAMDirectory();
+ Directory dir = new ByteBuffersDirectory();
IndexWriter w = new IndexWriter(dir,
new IndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document();
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java
index 41fdd4b7c0d..6733c2dd02f 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java
@@ -80,6 +80,7 @@ import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.BaseDirectoryWrapper;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.store.FilterDirectory;
@@ -91,7 +92,6 @@ import org.apache.lucene.store.MMapDirectory;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.store.NIOFSDirectory;
import org.apache.lucene.store.NoLockFactory;
-import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.store.SimpleFSDirectory;
import org.apache.lucene.store.SimpleFSLockFactory;
import org.apache.lucene.util.Bits;
@@ -869,7 +869,7 @@ public class TestIndexWriter extends LuceneTestCase {
this.random = new Random(random().nextLong());
// make a little directory for addIndexes
// LUCENE-2239: won't work with NIOFS/MMAP
- adder = new MockDirectoryWrapper(random, new RAMDirectory());
+ adder = new MockDirectoryWrapper(random, new ByteBuffersDirectory());
IndexWriterConfig conf = newIndexWriterConfig(random, new MockAnalyzer(random));
if (conf.getMergeScheduler() instanceof ConcurrentMergeScheduler) {
conf.setMergeScheduler(new SuppressingConcurrentMergeScheduler() {
@@ -910,7 +910,7 @@ public class TestIndexWriter extends LuceneTestCase {
@Override
public void run() {
// LUCENE-2239: won't work with NIOFS/MMAP
- MockDirectoryWrapper dir = new MockDirectoryWrapper(random, new RAMDirectory());
+ MockDirectoryWrapper dir = new MockDirectoryWrapper(random, new ByteBuffersDirectory());
// open/close slowly sometimes
dir.setUseSlowOpenClosers(true);
@@ -1596,7 +1596,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testDeleteAllNRTLeftoverFiles() throws Exception {
- MockDirectoryWrapper d = new MockDirectoryWrapper(random(), new RAMDirectory());
+ MockDirectoryWrapper d = new MockDirectoryWrapper(random(), new ByteBuffersDirectory());
IndexWriter w = new IndexWriter(d, new IndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document();
for(int i = 0; i < 20; i++) {
@@ -1618,7 +1618,7 @@ public class TestIndexWriter extends LuceneTestCase {
}
public void testNRTReaderVersion() throws Exception {
- Directory d = new MockDirectoryWrapper(random(), new RAMDirectory());
+ Directory d = new MockDirectoryWrapper(random(), new ByteBuffersDirectory());
IndexWriter w = new IndexWriter(d, new IndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document();
doc.add(newStringField("id", "0", Field.Store.YES));
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java
index bafe4873758..0bfa1e1e202 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java
@@ -52,13 +52,14 @@ import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.BaseDirectoryWrapper;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.FilterDirectory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.store.MockDirectoryWrapper.FakeIOException;
-import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOSupplier;
@@ -1713,9 +1714,15 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
// TODO: we could also check isValid, to catch "broken" bytesref values, might be too much?
- static class UOEDirectory extends RAMDirectory {
+ static class UOEDirectory extends FilterDirectory {
boolean doFail = false;
+ /**
+ */
+ protected UOEDirectory() {
+ super(new ByteBuffersDirectory());
+ }
+
@Override
public IndexInput openInput(String name, IOContext context) throws IOException {
if (doFail && name.startsWith("segments_")) {
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java
index a00e01beb54..cd97703ad09 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java
@@ -32,9 +32,9 @@ import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.AlreadyClosedException;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.MockDirectoryWrapper;
-import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.IOSupplier;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
@@ -62,7 +62,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
if (VERBOSE) {
System.out.println("TEST: cycle: diskFree=" + diskFree);
}
- MockDirectoryWrapper dir = new MockDirectoryWrapper(random(), new RAMDirectory());
+ MockDirectoryWrapper dir = new MockDirectoryWrapper(random(), new ByteBuffersDirectory());
dir.setMaxSizeInBytes(diskFree);
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
MergeScheduler ms = writer.getConfig().getMergeScheduler();
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterReader.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterReader.java
index 1e01712dcf1..5996d7e81ea 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterReader.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterReader.java
@@ -33,10 +33,10 @@ import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.AlreadyClosedException;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.MockDirectoryWrapper.FakeIOException;
import org.apache.lucene.store.MockDirectoryWrapper;
-import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.InfoStream;
@@ -1111,7 +1111,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
/** Make sure if all we do is open NRT reader against
* writer, we don't see merge starvation. */
public void testTooManySegments() throws Exception {
- Directory dir = getAssertNoDeletesDirectory(new RAMDirectory());
+ Directory dir = getAssertNoDeletesDirectory(new ByteBuffersDirectory());
// Don't use newIndexWriterConfig, because we need a
// "sane" mergePolicy:
IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestLazyProxSkipping.java b/lucene/core/src/test/org/apache/lucene/index/TestLazyProxSkipping.java
index be63c84713a..7062a2ca43a 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestLazyProxSkipping.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestLazyProxSkipping.java
@@ -25,11 +25,11 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.search.ScoreDoc;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.MockDirectoryWrapper;
-import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.TestUtil;
@@ -73,7 +73,7 @@ public class TestLazyProxSkipping extends LuceneTestCase {
return new TokenStreamComponents(new MockTokenizer(MockTokenizer.WHITESPACE, true));
}
};
- Directory directory = new SeekCountingDirectory(new RAMDirectory());
+ Directory directory = new SeekCountingDirectory(new ByteBuffersDirectory());
// note: test explicitly disables payloads
IndexWriter writer = new IndexWriter(
directory,
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestMultiLevelSkipList.java b/lucene/core/src/test/org/apache/lucene/index/TestMultiLevelSkipList.java
index bc14cb8a690..8a8d88ca30e 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestMultiLevelSkipList.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestMultiLevelSkipList.java
@@ -24,11 +24,11 @@ import org.apache.lucene.analysis.*;
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.MockDirectoryWrapper;
-import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
@@ -44,8 +44,8 @@ import org.junit.Before;
*/
public class TestMultiLevelSkipList extends LuceneTestCase {
- class CountingRAMDirectory extends MockDirectoryWrapper {
- public CountingRAMDirectory(Directory delegate) {
+ class CountingDirectory extends MockDirectoryWrapper {
+ public CountingDirectory(Directory delegate) {
super(random(), delegate);
}
@@ -66,7 +66,7 @@ public class TestMultiLevelSkipList extends LuceneTestCase {
}
public void testSimpleSkip() throws IOException {
- Directory dir = new CountingRAMDirectory(new RAMDirectory());
+ Directory dir = new CountingDirectory(new ByteBuffersDirectory());
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new PayloadAnalyzer())
.setCodec(TestUtil.alwaysPostingsFormat(TestUtil.getDefaultPostingsFormat()))
.setMergePolicy(newLogMergePolicy()));
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestMultiTermsEnum.java b/lucene/core/src/test/org/apache/lucene/index/TestMultiTermsEnum.java
index aa325a1aad7..ffa1f3c3636 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestMultiTermsEnum.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestMultiTermsEnum.java
@@ -27,8 +27,8 @@ import org.apache.lucene.codecs.FieldsProducer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
@@ -38,7 +38,7 @@ public class TestMultiTermsEnum extends LuceneTestCase {
// LUCENE-6826
public void testNoTermsInField() throws Exception {
- Directory directory = new RAMDirectory();
+ Directory directory = new ByteBuffersDirectory();
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(new MockAnalyzer(random())));
Document document = new Document();
document.add(new StringField("deleted", "0", Field.Store.YES));
@@ -47,7 +47,7 @@ public class TestMultiTermsEnum extends LuceneTestCase {
DirectoryReader reader = DirectoryReader.open(writer);
writer.close();
- Directory directory2 = new RAMDirectory();
+ Directory directory2 = new ByteBuffersDirectory();
writer = new IndexWriter(directory2, new IndexWriterConfig(new MockAnalyzer(random())));
List leaves = reader.leaves();
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestPayloads.java b/lucene/core/src/test/org/apache/lucene/index/TestPayloads.java
index ffb9a420667..1d8c338d19a 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestPayloads.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestPayloads.java
@@ -120,7 +120,7 @@ public class TestPayloads extends LuceneTestCase {
ram.close();
}
- // Tests if payloads are correctly stored and loaded using both RamDirectory and FSDirectory
+ // Tests if payloads are correctly stored and loaded.
public void testPayloadsEncoding() throws Exception {
Directory dir = newDirectory();
performTest(dir);
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestPendingDeletes.java b/lucene/core/src/test/org/apache/lucene/index/TestPendingDeletes.java
index e82ec10d0e6..143f671990b 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestPendingDeletes.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestPendingDeletes.java
@@ -22,8 +22,9 @@ import java.util.Collections;
import java.util.HashMap;
import org.apache.lucene.codecs.Codec;
+import org.apache.lucene.store.ByteBuffersDirectory;
+import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
-import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.StringHelper;
@@ -37,7 +38,7 @@ public class TestPendingDeletes extends LuceneTestCase {
}
public void testDeleteDoc() throws IOException {
- RAMDirectory dir = new RAMDirectory();
+ Directory dir = new ByteBuffersDirectory();
SegmentInfo si = new SegmentInfo(dir, Version.LATEST, Version.LATEST, "test", 10, false, Codec.getDefault(),
Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
SegmentCommitInfo commitInfo = new SegmentCommitInfo(si, 0, 0, -1, -1, -1);
@@ -71,7 +72,7 @@ public class TestPendingDeletes extends LuceneTestCase {
}
public void testWriteLiveDocs() throws IOException {
- RAMDirectory dir = new RAMDirectory();
+ Directory dir = new ByteBuffersDirectory();
SegmentInfo si = new SegmentInfo(dir, Version.LATEST, Version.LATEST, "test", 6, false, Codec.getDefault(),
Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
SegmentCommitInfo commitInfo = new SegmentCommitInfo(si, 0, 0, -1, -1, -1);
@@ -128,7 +129,7 @@ public class TestPendingDeletes extends LuceneTestCase {
}
public void testIsFullyDeleted() throws IOException {
- RAMDirectory dir = new RAMDirectory();
+ Directory dir = new ByteBuffersDirectory();
SegmentInfo si = new SegmentInfo(dir, Version.LATEST, Version.LATEST, "test", 3, false, Codec.getDefault(),
Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
SegmentCommitInfo commitInfo = new SegmentCommitInfo(si, 0, 0, -1, -1, -1);
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestPendingSoftDeletes.java b/lucene/core/src/test/org/apache/lucene/index/TestPendingSoftDeletes.java
index c882a3b88f3..666b3c4052e 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestPendingSoftDeletes.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestPendingSoftDeletes.java
@@ -29,8 +29,8 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.search.DocIdSetIterator;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
@@ -147,7 +147,7 @@ public class TestPendingSoftDeletes extends TestPendingDeletes {
}
public void testApplyUpdates() throws IOException {
- RAMDirectory dir = new RAMDirectory();
+ Directory dir = new ByteBuffersDirectory();
SegmentInfo si = new SegmentInfo(dir, Version.LATEST, Version.LATEST, "test", 10, false, Codec.getDefault(),
Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
SegmentCommitInfo commitInfo = new SegmentCommitInfo(si, 0, 0, -1, -1, -1);
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestPerSegmentDeletes.java b/lucene/core/src/test/org/apache/lucene/index/TestPerSegmentDeletes.java
index 8cc3c9cd878..bc60c7924a5 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestPerSegmentDeletes.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestPerSegmentDeletes.java
@@ -23,9 +23,9 @@ import java.util.Random;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.search.DocIdSetIterator;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.MockDirectoryWrapper;
-import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
@@ -35,7 +35,7 @@ import org.apache.lucene.util.TestUtil;
public class TestPerSegmentDeletes extends LuceneTestCase {
public void testDeletes1() throws Exception {
//IndexWriter.debug2 = System.out;
- Directory dir = new MockDirectoryWrapper(new Random(random().nextLong()), new RAMDirectory());
+ Directory dir = new MockDirectoryWrapper(new Random(random().nextLong()), new ByteBuffersDirectory());
IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
iwc.setMergeScheduler(new SerialMergeScheduler());
iwc.setMaxBufferedDocs(5000);
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java b/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java
index a9217099456..348a31a53ef 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java
@@ -33,9 +33,9 @@ import org.apache.lucene.document.StringField;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.index.PointValues.IntersectVisitor;
import org.apache.lucene.index.PointValues.Relation;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
-import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
@@ -624,7 +624,7 @@ public class TestPointValues extends LuceneTestCase {
}
public void testCheckIndexIncludesPoints() throws Exception {
- Directory dir = new RAMDirectory();
+ Directory dir = new ByteBuffersDirectory();
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null));
Document doc = new Document();
doc.add(new IntPoint("int1", 17));
@@ -659,7 +659,7 @@ public class TestPointValues extends LuceneTestCase {
}
public void testMergedStatsOneSegmentWithoutPoints() throws IOException {
- Directory dir = new RAMDirectory();
+ Directory dir = new ByteBuffersDirectory();
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null).setMergePolicy(NoMergePolicy.INSTANCE));
w.addDocument(new Document());
DirectoryReader.open(w).close();
@@ -680,7 +680,7 @@ public class TestPointValues extends LuceneTestCase {
}
public void testMergedStatsAllPointsDeleted() throws IOException {
- Directory dir = new RAMDirectory();
+ Directory dir = new ByteBuffersDirectory();
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null));
w.addDocument(new Document());
Document doc = new Document();
@@ -718,7 +718,7 @@ public class TestPointValues extends LuceneTestCase {
private void doTestMergedStats() throws IOException {
final int numDims = TestUtil.nextInt(random(), 1, 8);
final int numBytesPerDim = TestUtil.nextInt(random(), 1, 16);
- Directory dir = new RAMDirectory();
+ Directory dir = new ByteBuffersDirectory();
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null));
final int numDocs = TestUtil.nextInt(random(), 10, 20);
for (int i = 0; i < numDocs; ++i) {
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestSizeBoundedForceMerge.java b/lucene/core/src/test/org/apache/lucene/index/TestSizeBoundedForceMerge.java
index 81148680da3..64aea7fe1d4 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestSizeBoundedForceMerge.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestSizeBoundedForceMerge.java
@@ -22,8 +22,8 @@ import java.io.IOException;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.LuceneTestCase;
public class TestSizeBoundedForceMerge extends LuceneTestCase {
@@ -54,7 +54,7 @@ public class TestSizeBoundedForceMerge extends LuceneTestCase {
public void testByteSizeLimit() throws Exception {
// tests that the max merge size constraint is applied during forceMerge.
- Directory dir = new RAMDirectory();
+ Directory dir = new ByteBuffersDirectory();
// Prepare an index w/ several small segments and a large one.
IndexWriterConfig conf = newWriterConfig();
@@ -85,7 +85,7 @@ public class TestSizeBoundedForceMerge extends LuceneTestCase {
public void testNumDocsLimit() throws Exception {
// tests that the max merge docs constraint is applied during forceMerge.
- Directory dir = new RAMDirectory();
+ Directory dir = new ByteBuffersDirectory();
// Prepare an index w/ several small segments and a large one.
IndexWriterConfig conf = newWriterConfig();
@@ -116,7 +116,7 @@ public class TestSizeBoundedForceMerge extends LuceneTestCase {
}
public void testLastSegmentTooLarge() throws Exception {
- Directory dir = new RAMDirectory();
+ Directory dir = new ByteBuffersDirectory();
IndexWriterConfig conf = newWriterConfig();
IndexWriter writer = new IndexWriter(dir, conf);
@@ -142,7 +142,7 @@ public class TestSizeBoundedForceMerge extends LuceneTestCase {
}
public void testFirstSegmentTooLarge() throws Exception {
- Directory dir = new RAMDirectory();
+ Directory dir = new ByteBuffersDirectory();
IndexWriterConfig conf = newWriterConfig();
IndexWriter writer = new IndexWriter(dir, conf);
@@ -168,7 +168,7 @@ public class TestSizeBoundedForceMerge extends LuceneTestCase {
}
public void testAllSegmentsSmall() throws Exception {
- Directory dir = new RAMDirectory();
+ Directory dir = new ByteBuffersDirectory();
IndexWriterConfig conf = newWriterConfig();
IndexWriter writer = new IndexWriter(dir, conf);
@@ -194,7 +194,7 @@ public class TestSizeBoundedForceMerge extends LuceneTestCase {
}
public void testAllSegmentsLarge() throws Exception {
- Directory dir = new RAMDirectory();
+ Directory dir = new ByteBuffersDirectory();
IndexWriterConfig conf = newWriterConfig();
IndexWriter writer = new IndexWriter(dir, conf);
@@ -219,7 +219,7 @@ public class TestSizeBoundedForceMerge extends LuceneTestCase {
}
public void testOneLargeOneSmall() throws Exception {
- Directory dir = new RAMDirectory();
+ Directory dir = new ByteBuffersDirectory();
IndexWriterConfig conf = newWriterConfig();
IndexWriter writer = new IndexWriter(dir, conf);
@@ -245,7 +245,7 @@ public class TestSizeBoundedForceMerge extends LuceneTestCase {
}
public void testMergeFactor() throws Exception {
- Directory dir = new RAMDirectory();
+ Directory dir = new ByteBuffersDirectory();
IndexWriterConfig conf = newWriterConfig();
IndexWriter writer = new IndexWriter(dir, conf);
@@ -277,7 +277,7 @@ public class TestSizeBoundedForceMerge extends LuceneTestCase {
}
public void testSingleMergeableSegment() throws Exception {
- Directory dir = new RAMDirectory();
+ Directory dir = new ByteBuffersDirectory();
IndexWriterConfig conf = newWriterConfig();
IndexWriter writer = new IndexWriter(dir, conf);
@@ -306,7 +306,7 @@ public class TestSizeBoundedForceMerge extends LuceneTestCase {
}
public void testSingleNonMergeableSegment() throws Exception {
- Directory dir = new RAMDirectory();
+ Directory dir = new ByteBuffersDirectory();
IndexWriterConfig conf = newWriterConfig();
IndexWriter writer = new IndexWriter(dir, conf);
@@ -330,7 +330,7 @@ public class TestSizeBoundedForceMerge extends LuceneTestCase {
}
public void testSingleMergeableTooLargeSegment() throws Exception {
- Directory dir = new RAMDirectory();
+ Directory dir = new ByteBuffersDirectory();
IndexWriterConfig conf = newWriterConfig();
IndexWriter writer = new IndexWriter(dir, conf);
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestStressIndexing.java b/lucene/core/src/test/org/apache/lucene/index/TestStressIndexing.java
index b302e9885b2..e65960d3974 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestStressIndexing.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestStressIndexing.java
@@ -159,10 +159,7 @@ public class TestStressIndexing extends LuceneTestCase {
//System.out.println("Searcher 2: " + searcherThread2.count + " searchers created");
}
- /*
- Run above stress test against RAMDirectory and then
- FSDirectory.
- */
+ /* */
public void testStressIndexAndSearching() throws Exception {
Directory directory = newMaybeVirusCheckingDirectory();
if (directory instanceof MockDirectoryWrapper) {
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestThreadedForceMerge.java b/lucene/core/src/test/org/apache/lucene/index/TestThreadedForceMerge.java
index ad8c13196dd..eda3e3ebffd 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestThreadedForceMerge.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestThreadedForceMerge.java
@@ -137,10 +137,7 @@ public class TestThreadedForceMerge extends LuceneTestCase {
writer.close();
}
- /*
- Run above stress test against RAMDirectory and then
- FSDirectory.
- */
+ /* */
public void testThreadedForceMerge() throws Exception {
Directory directory = newDirectory();
runTest(random(), directory);
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestTransactions.java b/lucene/core/src/test/org/apache/lucene/index/TestTransactions.java
index 882d43223b6..b1f1b033370 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestTransactions.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestTransactions.java
@@ -24,9 +24,9 @@ import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.StringField;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.MockDirectoryWrapper;
-import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.English;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.LuceneTestCase;
@@ -231,8 +231,8 @@ public class TestTransactions extends LuceneTestCase {
public void testTransactions() throws Throwable {
// we cant use non-ramdir on windows, because this test needs to double-write.
- MockDirectoryWrapper dir1 = new MockDirectoryWrapper(random(), new RAMDirectory());
- MockDirectoryWrapper dir2 = new MockDirectoryWrapper(random(), new RAMDirectory());
+ MockDirectoryWrapper dir1 = new MockDirectoryWrapper(random(), new ByteBuffersDirectory());
+ MockDirectoryWrapper dir2 = new MockDirectoryWrapper(random(), new ByteBuffersDirectory());
dir1.failOn(new RandomFailure());
dir2.failOn(new RandomFailure());
dir1.setFailOnOpenInput(false);
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestTryDelete.java b/lucene/core/src/test/org/apache/lucene/index/TestTryDelete.java
index 0f0601ab09b..d9f829f2f43 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestTryDelete.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestTryDelete.java
@@ -30,8 +30,8 @@ import org.apache.lucene.search.SearcherFactory;
import org.apache.lucene.search.SearcherManager;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.LuceneTestCase;
@@ -53,7 +53,7 @@ public class TestTryDelete extends LuceneTestCase
private static Directory createIndex ()
throws IOException
{
- Directory directory = new RAMDirectory();
+ Directory directory = new ByteBuffersDirectory();
IndexWriter writer = getWriter(directory);
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestMultiPhraseQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestMultiPhraseQuery.java
index bd9ce08f936..66ec84a4204 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestMultiPhraseQuery.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestMultiPhraseQuery.java
@@ -33,8 +33,8 @@ import org.apache.lucene.index.MultiTerms;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermsEnum;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase;
import org.junit.Ignore;
@@ -337,7 +337,7 @@ public class TestMultiPhraseQuery extends LuceneTestCase {
}
public void testZeroPosIncr() throws IOException {
- Directory dir = new RAMDirectory();
+ Directory dir = new ByteBuffersDirectory();
final Token[] tokens = new Token[3];
tokens[0] = new Token();
tokens[0].append("a");
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestScorerPerf.java b/lucene/core/src/test/org/apache/lucene/search/TestScorerPerf.java
index 60363d20246..be3f9694500 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestScorerPerf.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestScorerPerf.java
@@ -23,7 +23,6 @@ import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.DirectoryReader;
-import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.index.LeafReaderContext;
@@ -35,30 +34,11 @@ import org.apache.lucene.util.LuceneTestCase;
public class TestScorerPerf extends LuceneTestCase {
- boolean validate = true; // set to false when doing performance testing
-
- FixedBitSet[] sets;
- Term[] terms;
- IndexSearcher s;
- IndexReader r;
- Directory d;
-
- // TODO: this should be setUp()....
- public void createDummySearcher() throws Exception {
- // Create a dummy index with nothing in it.
- // This could possibly fail if Lucene starts checking for docid ranges...
- d = newDirectory();
- IndexWriter iw = new IndexWriter(d, newIndexWriterConfig(new MockAnalyzer(random())));
- iw.addDocument(new Document());
- iw.close();
- r = DirectoryReader.open(d);
- s = newSearcher(r);
- s.setQueryCache(null);
- }
+ private final boolean validate = true; // set to false when doing performance testing
public void createRandomTerms(int nDocs, int nTerms, double power, Directory dir) throws Exception {
int[] freq = new int[nTerms];
- terms = new Term[nTerms];
+ Term[] terms = new Term[nTerms];
for (int i=0; i files = Arrays.asList(ramDir.listAll());
- assertFalse(files.contains("subdir"));
- }
-
// LUCENE-1468
public void testNotDirectory() throws Throwable {
Path path = createTempDir("testnotdir");
diff --git a/lucene/core/src/test/org/apache/lucene/store/TestFileSwitchDirectory.java b/lucene/core/src/test/org/apache/lucene/store/TestFileSwitchDirectory.java
index 5c897862281..f324bb35108 100644
--- a/lucene/core/src/test/org/apache/lucene/store/TestFileSwitchDirectory.java
+++ b/lucene/core/src/test/org/apache/lucene/store/TestFileSwitchDirectory.java
@@ -44,9 +44,9 @@ public class TestFileSwitchDirectory extends BaseDirectoryTestCase {
fileExtensions.add(CompressingStoredFieldsWriter.FIELDS_EXTENSION);
fileExtensions.add(CompressingStoredFieldsWriter.FIELDS_INDEX_EXTENSION);
- MockDirectoryWrapper primaryDir = new MockDirectoryWrapper(random(), new RAMDirectory());
+ MockDirectoryWrapper primaryDir = new MockDirectoryWrapper(random(), new ByteBuffersDirectory());
primaryDir.setCheckIndexOnClose(false); // only part of an index
- MockDirectoryWrapper secondaryDir = new MockDirectoryWrapper(random(), new RAMDirectory());
+ MockDirectoryWrapper secondaryDir = new MockDirectoryWrapper(random(), new ByteBuffersDirectory());
secondaryDir.setCheckIndexOnClose(false); // only part of an index
FileSwitchDirectory fsd = new FileSwitchDirectory(fileExtensions, primaryDir, secondaryDir, true);
diff --git a/lucene/core/src/test/org/apache/lucene/store/TestFilterDirectory.java b/lucene/core/src/test/org/apache/lucene/store/TestFilterDirectory.java
index 81b8fad2bb7..09e97ba126a 100644
--- a/lucene/core/src/test/org/apache/lucene/store/TestFilterDirectory.java
+++ b/lucene/core/src/test/org/apache/lucene/store/TestFilterDirectory.java
@@ -29,7 +29,7 @@ public class TestFilterDirectory extends BaseDirectoryTestCase {
@Override
protected Directory getDirectory(Path path) {
- return new FilterDirectory(new RAMDirectory()) {};
+ return new FilterDirectory(new ByteBuffersDirectory()) {};
}
@Test
diff --git a/lucene/core/src/test/org/apache/lucene/store/TestLockFactory.java b/lucene/core/src/test/org/apache/lucene/store/TestLockFactory.java
index c2578612cad..a6ab624347f 100644
--- a/lucene/core/src/test/org/apache/lucene/store/TestLockFactory.java
+++ b/lucene/core/src/test/org/apache/lucene/store/TestLockFactory.java
@@ -37,7 +37,7 @@ public class TestLockFactory extends LuceneTestCase {
public void testCustomLockFactory() throws IOException {
MockLockFactory lf = new MockLockFactory();
- Directory dir = new MockDirectoryWrapper(random(), new RAMDirectory(lf));
+ Directory dir = new MockDirectoryWrapper(random(), new ByteBuffersDirectory(lf));
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())));
@@ -52,14 +52,14 @@ public class TestLockFactory extends LuceneTestCase {
writer.close();
}
- // Verify: we can use the NoLockFactory with RAMDirectory w/ no
- // exceptions raised:
+ // Verify: we can use the NoLockFactory w/ no exceptions raised.
// Verify: NoLockFactory allows two IndexWriters
- public void testRAMDirectoryNoLocking() throws IOException {
- MockDirectoryWrapper dir = new MockDirectoryWrapper(random(), new RAMDirectory(NoLockFactory.INSTANCE));
+ public void testDirectoryNoLocking() throws IOException {
+ MockDirectoryWrapper dir = new MockDirectoryWrapper(random(), new ByteBuffersDirectory(NoLockFactory.INSTANCE));
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())));
- writer.commit(); // required so the second open succeed
+ writer.commit(); // required so the second open succeed
+
// Create a 2nd IndexWriter. This is normally not allowed but it should run through since we're not
// using any locks:
IndexWriter writer2 = null;
diff --git a/lucene/core/src/test/org/apache/lucene/store/TestNRTCachingDirectory.java b/lucene/core/src/test/org/apache/lucene/store/TestNRTCachingDirectory.java
index 227d41f9c65..0f2e200bf26 100644
--- a/lucene/core/src/test/org/apache/lucene/store/TestNRTCachingDirectory.java
+++ b/lucene/core/src/test/org/apache/lucene/store/TestNRTCachingDirectory.java
@@ -44,7 +44,7 @@ public class TestNRTCachingDirectory extends BaseDirectoryTestCase {
// would be good to investigate further...
@Override
protected Directory getDirectory(Path path) throws IOException {
- return new NRTCachingDirectory(new RAMDirectory(),
+ return new NRTCachingDirectory(new ByteBuffersDirectory(),
.1 + 2.0*random().nextDouble(),
.1 + 5.0*random().nextDouble());
}
diff --git a/lucene/core/src/test/org/apache/lucene/store/TestSingleInstanceLockFactory.java b/lucene/core/src/test/org/apache/lucene/store/TestSingleInstanceLockFactory.java
index 14863ffb53c..73cd9cef1b8 100644
--- a/lucene/core/src/test/org/apache/lucene/store/TestSingleInstanceLockFactory.java
+++ b/lucene/core/src/test/org/apache/lucene/store/TestSingleInstanceLockFactory.java
@@ -33,17 +33,15 @@ public class TestSingleInstanceLockFactory extends BaseLockFactoryTestCase {
return newDirectory(random(), new SingleInstanceLockFactory());
}
- // Verify: SingleInstanceLockFactory is the default lock for RAMDirectory
- // Verify: RAMDirectory does basic locking correctly (can't create two IndexWriters)
- public void testDefaultRAMDirectory() throws IOException {
- RAMDirectory dir = new RAMDirectory();
-
- assertTrue("RAMDirectory did not use correct LockFactory: got " + dir.lockFactory,
- dir.lockFactory instanceof SingleInstanceLockFactory);
-
+ // Verify: basic locking on single instance lock factory (can't create two IndexWriters)
+ public void testDefaultLockFactory() throws IOException {
+ ByteBuffersDirectory dir = new ByteBuffersDirectory();
+
+ assertTrue(dir.lockFactory instanceof SingleInstanceLockFactory);
+
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())));
- // Create a 2nd IndexWriter. This should fail:
+ // Create a 2nd IndexWriter. This should fail.
expectThrows(IOException.class, () -> {
new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
});
diff --git a/lucene/core/src/test/org/apache/lucene/store/TestTrackingDirectoryWrapper.java b/lucene/core/src/test/org/apache/lucene/store/TestTrackingDirectoryWrapper.java
index 008ac748092..69192871078 100644
--- a/lucene/core/src/test/org/apache/lucene/store/TestTrackingDirectoryWrapper.java
+++ b/lucene/core/src/test/org/apache/lucene/store/TestTrackingDirectoryWrapper.java
@@ -25,22 +25,22 @@ public class TestTrackingDirectoryWrapper extends BaseDirectoryTestCase {
@Override
protected Directory getDirectory(Path path) throws IOException {
- return new TrackingDirectoryWrapper(new RAMDirectory());
+ return new TrackingDirectoryWrapper(new ByteBuffersDirectory());
}
public void testTrackEmpty() throws IOException {
- TrackingDirectoryWrapper dir = new TrackingDirectoryWrapper(new RAMDirectory());
+ TrackingDirectoryWrapper dir = new TrackingDirectoryWrapper(new ByteBuffersDirectory());
assertEquals(Collections.emptySet(), dir.getCreatedFiles());
}
public void testTrackCreate() throws IOException {
- TrackingDirectoryWrapper dir = new TrackingDirectoryWrapper(new RAMDirectory());
+ TrackingDirectoryWrapper dir = new TrackingDirectoryWrapper(new ByteBuffersDirectory());
dir.createOutput("foo", newIOContext(random())).close();
assertEquals(asSet("foo"), dir.getCreatedFiles());
}
public void testTrackDelete() throws IOException {
- TrackingDirectoryWrapper dir = new TrackingDirectoryWrapper(new RAMDirectory());
+ TrackingDirectoryWrapper dir = new TrackingDirectoryWrapper(new ByteBuffersDirectory());
dir.createOutput("foo", newIOContext(random())).close();
assertEquals(asSet("foo"), dir.getCreatedFiles());
dir.deleteFile("foo");
@@ -48,7 +48,7 @@ public class TestTrackingDirectoryWrapper extends BaseDirectoryTestCase {
}
public void testTrackRename() throws IOException {
- TrackingDirectoryWrapper dir = new TrackingDirectoryWrapper(new RAMDirectory());
+ TrackingDirectoryWrapper dir = new TrackingDirectoryWrapper(new ByteBuffersDirectory());
dir.createOutput("foo", newIOContext(random())).close();
assertEquals(asSet("foo"), dir.getCreatedFiles());
dir.rename("foo", "bar");
@@ -56,8 +56,8 @@ public class TestTrackingDirectoryWrapper extends BaseDirectoryTestCase {
}
public void testTrackCopyFrom() throws IOException {
- TrackingDirectoryWrapper source = new TrackingDirectoryWrapper(new RAMDirectory());
- TrackingDirectoryWrapper dest = new TrackingDirectoryWrapper(new RAMDirectory());
+ TrackingDirectoryWrapper source = new TrackingDirectoryWrapper(new ByteBuffersDirectory());
+ TrackingDirectoryWrapper dest = new TrackingDirectoryWrapper(new ByteBuffersDirectory());
source.createOutput("foo", newIOContext(random())).close();
assertEquals(asSet("foo"), source.getCreatedFiles());
dest.copyFrom(source, "foo", "bar", newIOContext(random()));
diff --git a/lucene/core/src/test/org/apache/lucene/util/packed/TestPackedInts.java b/lucene/core/src/test/org/apache/lucene/util/packed/TestPackedInts.java
index 69c1b3f7720..63366d36e1a 100644
--- a/lucene/core/src/test/org/apache/lucene/util/packed/TestPackedInts.java
+++ b/lucene/core/src/test/org/apache/lucene/util/packed/TestPackedInts.java
@@ -28,12 +28,12 @@ import java.util.Random;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.store.ByteArrayDataInput;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.DataInput;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
-import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.LongValues;
import org.apache.lucene.util.LongsRef;
@@ -815,7 +815,7 @@ public class TestPackedInts extends LuceneTestCase {
final int valueCount = TestUtil.nextInt(random(), 1, 2048);
for (int bpv = 1; bpv <= 64; ++bpv) {
final int maxValue = (int) Math.min(PackedInts.maxValue(31), PackedInts.maxValue(bpv));
- final RAMDirectory directory = new RAMDirectory();
+ final Directory directory = new ByteBuffersDirectory();
List packedInts = createPackedInts(valueCount, bpv);
for (PackedInts.Mutable mutable : packedInts) {
for (int i = 0; i < mutable.size(); ++i) {
diff --git a/lucene/demo/src/java/org/apache/lucene/demo/facet/AssociationsFacetsExample.java b/lucene/demo/src/java/org/apache/lucene/demo/facet/AssociationsFacetsExample.java
index 3e2737d0c8f..7e48382d81d 100644
--- a/lucene/demo/src/java/org/apache/lucene/demo/facet/AssociationsFacetsExample.java
+++ b/lucene/demo/src/java/org/apache/lucene/demo/facet/AssociationsFacetsExample.java
@@ -41,14 +41,14 @@ import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.RAMDirectory;
/** Shows example usage of category associations. */
public class AssociationsFacetsExample {
- private final Directory indexDir = new RAMDirectory();
- private final Directory taxoDir = new RAMDirectory();
+ private final Directory indexDir = new ByteBuffersDirectory();
+ private final Directory taxoDir = new ByteBuffersDirectory();
private final FacetsConfig config;
/** Empty constructor */
diff --git a/lucene/demo/src/java/org/apache/lucene/demo/facet/DistanceFacetsExample.java b/lucene/demo/src/java/org/apache/lucene/demo/facet/DistanceFacetsExample.java
index fa7ce8386d4..7c033ce4c9a 100644
--- a/lucene/demo/src/java/org/apache/lucene/demo/facet/DistanceFacetsExample.java
+++ b/lucene/demo/src/java/org/apache/lucene/demo/facet/DistanceFacetsExample.java
@@ -48,8 +48,8 @@ import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TopDocs;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.SloppyMath;
/** Shows simple usage of dynamic range faceting, using the
@@ -61,7 +61,7 @@ public class DistanceFacetsExample implements Closeable {
final DoubleRange FIVE_KM = new DoubleRange("< 5 km", 0.0, true, 5.0, false);
final DoubleRange TEN_KM = new DoubleRange("< 10 km", 0.0, true, 10.0, false);
- private final Directory indexDir = new RAMDirectory();
+ private final Directory indexDir = new ByteBuffersDirectory();
private IndexSearcher searcher;
private final FacetsConfig config = new FacetsConfig();
diff --git a/lucene/demo/src/java/org/apache/lucene/demo/facet/ExpressionAggregationFacetsExample.java b/lucene/demo/src/java/org/apache/lucene/demo/facet/ExpressionAggregationFacetsExample.java
index 7f29b384216..7363e7d03c0 100644
--- a/lucene/demo/src/java/org/apache/lucene/demo/facet/ExpressionAggregationFacetsExample.java
+++ b/lucene/demo/src/java/org/apache/lucene/demo/facet/ExpressionAggregationFacetsExample.java
@@ -43,15 +43,15 @@ import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.SortField;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.RAMDirectory;
/** Shows facets aggregation by an expression. */
public class ExpressionAggregationFacetsExample {
- private final Directory indexDir = new RAMDirectory();
- private final Directory taxoDir = new RAMDirectory();
+ private final Directory indexDir = new ByteBuffersDirectory();
+ private final Directory taxoDir = new ByteBuffersDirectory();
private final FacetsConfig config = new FacetsConfig();
/** Empty constructor */
diff --git a/lucene/demo/src/java/org/apache/lucene/demo/facet/MultiCategoryListsFacetsExample.java b/lucene/demo/src/java/org/apache/lucene/demo/facet/MultiCategoryListsFacetsExample.java
index c3647976a95..7c2e5fccac5 100644
--- a/lucene/demo/src/java/org/apache/lucene/demo/facet/MultiCategoryListsFacetsExample.java
+++ b/lucene/demo/src/java/org/apache/lucene/demo/facet/MultiCategoryListsFacetsExample.java
@@ -38,14 +38,14 @@ import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.RAMDirectory;
/** Demonstrates indexing categories into different indexed fields. */
public class MultiCategoryListsFacetsExample {
- private final Directory indexDir = new RAMDirectory();
- private final Directory taxoDir = new RAMDirectory();
+ private final Directory indexDir = new ByteBuffersDirectory();
+ private final Directory taxoDir = new ByteBuffersDirectory();
private final FacetsConfig config = new FacetsConfig();
/** Creates a new instance and populates the category list params mapping. */
@@ -87,7 +87,7 @@ public class MultiCategoryListsFacetsExample {
doc.add(new FacetField("Author", "Frank"));
doc.add(new FacetField("Publish Date", "1999", "5", "5"));
indexWriter.addDocument(config.build(taxoWriter, doc));
-
+
indexWriter.close();
taxoWriter.close();
}
@@ -114,10 +114,10 @@ public class MultiCategoryListsFacetsExample {
Facets pubDate = new FastTaxonomyFacetCounts("pubdate", taxoReader, config, fc);
results.add(pubDate.getTopChildren(10, "Publish Date"));
-
+
indexReader.close();
taxoReader.close();
-
+
return results;
}
@@ -126,7 +126,7 @@ public class MultiCategoryListsFacetsExample {
index();
return search();
}
-
+
/** Runs the search example and prints the results. */
public static void main(String[] args) throws Exception {
System.out.println("Facet counting over multiple category lists example:");
diff --git a/lucene/demo/src/java/org/apache/lucene/demo/facet/RangeFacetsExample.java b/lucene/demo/src/java/org/apache/lucene/demo/facet/RangeFacetsExample.java
index 32a40714409..eadc06adc44 100644
--- a/lucene/demo/src/java/org/apache/lucene/demo/facet/RangeFacetsExample.java
+++ b/lucene/demo/src/java/org/apache/lucene/demo/facet/RangeFacetsExample.java
@@ -38,13 +38,13 @@ import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.TopDocs;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.RAMDirectory;
/** Shows simple usage of dynamic range faceting. */
public class RangeFacetsExample implements Closeable {
- private final Directory indexDir = new RAMDirectory();
+ private final Directory indexDir = new ByteBuffersDirectory();
private IndexSearcher searcher;
private final long nowSec = System.currentTimeMillis();
diff --git a/lucene/demo/src/java/org/apache/lucene/demo/facet/SimpleFacetsExample.java b/lucene/demo/src/java/org/apache/lucene/demo/facet/SimpleFacetsExample.java
index df424cb5342..3553cfaf72d 100644
--- a/lucene/demo/src/java/org/apache/lucene/demo/facet/SimpleFacetsExample.java
+++ b/lucene/demo/src/java/org/apache/lucene/demo/facet/SimpleFacetsExample.java
@@ -41,14 +41,14 @@ import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.RAMDirectory;
/** Shows simple usage of faceted indexing and search. */
public class SimpleFacetsExample {
- private final Directory indexDir = new RAMDirectory();
- private final Directory taxoDir = new RAMDirectory();
+ private final Directory indexDir = new ByteBuffersDirectory();
+ private final Directory taxoDir = new ByteBuffersDirectory();
private final FacetsConfig config = new FacetsConfig();
/** Empty constructor */
diff --git a/lucene/demo/src/java/org/apache/lucene/demo/facet/SimpleSortedSetFacetsExample.java b/lucene/demo/src/java/org/apache/lucene/demo/facet/SimpleSortedSetFacetsExample.java
index 06b9bf42acd..89d9a7466fd 100644
--- a/lucene/demo/src/java/org/apache/lucene/demo/facet/SimpleSortedSetFacetsExample.java
+++ b/lucene/demo/src/java/org/apache/lucene/demo/facet/SimpleSortedSetFacetsExample.java
@@ -38,8 +38,8 @@ import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.RAMDirectory;
/** Shows simple usage of faceted indexing and search,
* using {@link SortedSetDocValuesFacetField} and {@link
@@ -47,7 +47,7 @@ import org.apache.lucene.store.RAMDirectory;
public class SimpleSortedSetFacetsExample {
- private final Directory indexDir = new RAMDirectory();
+ private final Directory indexDir = new ByteBuffersDirectory();
private final FacetsConfig config = new FacetsConfig();
/** Empty constructor */
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/SlowRAMDirectory.java b/lucene/facet/src/test/org/apache/lucene/facet/SlowDirectory.java
similarity index 95%
rename from lucene/facet/src/test/org/apache/lucene/facet/SlowRAMDirectory.java
rename to lucene/facet/src/test/org/apache/lucene/facet/SlowDirectory.java
index f6e691e8724..6312b51589b 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/SlowRAMDirectory.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/SlowDirectory.java
@@ -19,17 +19,18 @@ package org.apache.lucene.facet;
import java.io.IOException;
import java.util.Random;
+import org.apache.lucene.store.ByteBuffersDirectory;
+import org.apache.lucene.store.FilterDirectory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
-import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.ThreadInterruptedException;
/**
* Test utility - slow directory
*/
// TODO: move to test-framework and sometimes use in tests?
-public class SlowRAMDirectory extends RAMDirectory {
+public class SlowDirectory extends FilterDirectory {
private static final int IO_SLEEP_THRESHOLD = 50;
@@ -40,7 +41,8 @@ public class SlowRAMDirectory extends RAMDirectory {
this.sleepMillis = sleepMillis;
}
- public SlowRAMDirectory(int sleepMillis, Random random) {
+ public SlowDirectory(int sleepMillis, Random random) {
+ super(new ByteBuffersDirectory());
this.sleepMillis = sleepMillis;
this.random = random;
}
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyCombined.java b/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyCombined.java
index b2f33b207f3..9e7d128941b 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyCombined.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyCombined.java
@@ -24,7 +24,7 @@ import java.util.Arrays;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.lucene.facet.FacetTestCase;
-import org.apache.lucene.facet.SlowRAMDirectory;
+import org.apache.lucene.facet.SlowDirectory;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
import org.apache.lucene.store.Directory;
@@ -732,7 +732,7 @@ public class TestTaxonomyCombined extends FacetTestCase {
private void assertConsistentYoungestChild(final FacetLabel abPath,
final int abOrd, final int abYoungChildBase1, final int abYoungChildBase2, final int retry, int numCategories)
throws Exception {
- SlowRAMDirectory indexDir = new SlowRAMDirectory(-1, null); // no slowness for initialization
+ SlowDirectory indexDir = new SlowDirectory(-1, null); // no slowness for initialization
TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir);
tw.addCategory(new FacetLabel("a", "0"));
tw.addCategory(abPath);
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyReader.java b/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyReader.java
index de2023045a8..4c59318f463 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyReader.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyReader.java
@@ -34,8 +34,8 @@ import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LogByteSizeMergePolicy;
import org.apache.lucene.index.LogMergePolicy;
import org.apache.lucene.store.AlreadyClosedException;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.IOUtils;
import org.junit.Test;
@@ -177,7 +177,7 @@ public class TestDirectoryTaxonomyReader extends FacetTestCase {
@Test
public void testOpenIfChangedAndRefCount() throws Exception {
- Directory dir = new RAMDirectory(); // no need for random directories here
+ Directory dir = new ByteBuffersDirectory(); // no need for random directories here
DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(dir);
taxoWriter.addCategory(new FacetLabel("a"));
diff --git a/lucene/highlighter/src/java/org/apache/lucene/search/vectorhighlight/FieldTermStack.java b/lucene/highlighter/src/java/org/apache/lucene/search/vectorhighlight/FieldTermStack.java
index b1700c8a538..1c9f310aca5 100644
--- a/lucene/highlighter/src/java/org/apache/lucene/search/vectorhighlight/FieldTermStack.java
+++ b/lucene/highlighter/src/java/org/apache/lucene/search/vectorhighlight/FieldTermStack.java
@@ -39,29 +39,6 @@ public class FieldTermStack {
private final String fieldName;
LinkedList termList = new LinkedList<>();
- //public static void main( String[] args ) throws Exception {
- // Analyzer analyzer = new WhitespaceAnalyzer(Version.LATEST);
- // QueryParser parser = new QueryParser(Version.LATEST, "f", analyzer );
- // Query query = parser.parse( "a x:b" );
- // FieldQuery fieldQuery = new FieldQuery( query, true, false );
-
- // Directory dir = new RAMDirectory();
- // IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Version.LATEST, analyzer));
- // Document doc = new Document();
- // FieldType ft = new FieldType(TextField.TYPE_STORED);
- // ft.setStoreTermVectors(true);
- // ft.setStoreTermVectorOffsets(true);
- // ft.setStoreTermVectorPositions(true);
- // doc.add( new Field( "f", ft, "a a a b b c a b b c d e f" ) );
- // doc.add( new Field( "f", ft, "b a b a f" ) );
- // writer.addDocument( doc );
- // writer.close();
-
- // IndexReader reader = IndexReader.open(dir1);
- // new FieldTermStack( reader, 0, "f", fieldQuery );
- // reader.close();
- //}
-
/**
* a constructor.
*
diff --git a/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java b/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java
index 898846fa43c..ae4ad3c544f 100644
--- a/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java
+++ b/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java
@@ -106,7 +106,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
static final String FIELD_NAME = "contents";
private static final String NUMERIC_FIELD_NAME = "nfield";
private Query query;
- Directory ramDir;
+ Directory dir1;
public IndexSearcher searcher = null;
int numHighlights = 0;
MockAnalyzer analyzer;
@@ -1926,7 +1926,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
helper.start();
}
- private Directory dir;
+ private Directory dir2;
private Analyzer a;
public void testWeightedTermsWithDeletes() throws IOException, InvalidTokenOffsetsException {
@@ -1936,7 +1936,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
}
private void makeIndex() throws IOException {
- IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)));
+ IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)));
writer.addDocument( doc( "t_text1", "random words for highlighting tests del" ) );
writer.addDocument( doc( "t_text1", "more random words for second field del" ) );
writer.addDocument( doc( "t_text1", "random words for highlighting tests del" ) );
@@ -1946,7 +1946,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
}
private void deleteDocument() throws IOException {
- IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setOpenMode(OpenMode.APPEND));
+ IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setOpenMode(OpenMode.APPEND));
writer.deleteDocuments( new Term( "t_text1", "del" ) );
// To see negative idf, keep comment the following line
//writer.forceMerge(1);
@@ -1955,7 +1955,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
private void searchIndex() throws IOException, InvalidTokenOffsetsException {
Query query = new TermQuery(new Term("t_text1", "random"));
- IndexReader reader = DirectoryReader.open(dir);
+ IndexReader reader = DirectoryReader.open(dir1);
IndexSearcher searcher = newSearcher(reader);
// This scorer can return negative idf -> null fragment
Scorer scorer = new QueryTermScorer( query, searcher.getIndexReader(), "t_text1" );
@@ -1978,7 +1978,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
final String text = "random words and words";//"words" at positions 1 & 4
Analyzer analyzer = new MockPayloadAnalyzer();//sets payload to "pos: X" (where X is position #)
- try (IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(analyzer))) {
+ try (IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(analyzer))) {
writer.deleteAll();
Document doc = new Document();
@@ -1986,7 +1986,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
writer.addDocument(doc);
writer.commit();
}
- try (IndexReader reader = DirectoryReader.open(dir)) {
+ try (IndexReader reader = DirectoryReader.open(dir1)) {
Query query = new SpanPayloadCheckQuery(new SpanTermQuery(new Term(FIELD_NAME, "words")),
Collections.singletonList(new BytesRef("pos: 1")));//just match the first "word" occurrence
IndexSearcher searcher = newSearcher(reader);
@@ -2004,32 +2004,6 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
assertEquals("random words and words", result);//only highlight first "word"
}
}
-
- /*
- *
- * public void testBigramAnalyzer() throws IOException, ParseException {
- * //test to ensure analyzers with none-consecutive start/end offsets //dont
- * double-highlight text //setup index 1 RAMDirectory ramDir = new
- * RAMDirectory(); Analyzer bigramAnalyzer=new CJKAnalyzer(); IndexWriter
- * writer = new IndexWriter(ramDir,bigramAnalyzer , true); Document d = new
- * Document(); Field f = new Field(FIELD_NAME, "java abc def", true, true,
- * true); d.add(f); writer.addDocument(d); writer.close(); IndexReader reader =
- * DirectoryReader.open(ramDir);
- *
- * IndexSearcher searcher=new IndexSearcher(reader); query =
- * QueryParser.parse("abc", FIELD_NAME, bigramAnalyzer);
- * System.out.println("Searching for: " + query.toString(FIELD_NAME)); hits =
- * searcher.search(query);
- *
- * Highlighter highlighter = new Highlighter(this,new
- * QueryFragmentScorer(query));
- *
- * for (int i = 0; i < hits.totalHits.value; i++) { String text =
- * searcher.doc2(hits.scoreDocs[i].doc).get(FIELD_NAME); TokenStream
- * tokenStream=bigramAnalyzer.tokenStream(FIELD_NAME,text);
- * String highlightedText = highlighter.getBestFragment(tokenStream,text);
- * System.out.println(highlightedText); } }
- */
@Override
public String highlightTerm(String originalText, TokenGroup group) {
@@ -2074,13 +2048,13 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
//Not many use this setup:
a = new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false);
- dir = newDirectory();
+ dir1 = newDirectory();
//Most tests use this setup:
analyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET);
- ramDir = newDirectory();
+ dir2 = newDirectory();
fieldType = random().nextBoolean() ? FIELD_TYPE_TV : TextField.TYPE_STORED;
- IndexWriter writer = new IndexWriter(ramDir, newIndexWriterConfig(analyzer).setMergePolicy(newLogMergePolicy()));
+ IndexWriter writer = new IndexWriter(dir2, newIndexWriterConfig(analyzer).setMergePolicy(newLogMergePolicy()));
for (String text : texts) {
writer.addDocument(doc(FIELD_NAME, text));
@@ -2113,7 +2087,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
writer.forceMerge(1);
writer.close();
- reader = DirectoryReader.open(ramDir);
+ reader = DirectoryReader.open(dir2);
//Misc:
numHighlights = 0;
@@ -2122,8 +2096,8 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
@Override
public void tearDown() throws Exception {
reader.close();
- dir.close();
- ramDir.close();
+ dir1.close();
+ dir2.close();
super.tearDown();
}
diff --git a/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java b/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java
index fde9438e9c9..d0dae237a07 100644
--- a/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java
+++ b/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java
@@ -42,7 +42,7 @@ import org.apache.lucene.search.Scorable;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.search.SimpleCollector;
import org.apache.lucene.search.similarities.Similarity;
-import org.apache.lucene.store.RAMDirectory;
+import org.apache.lucene.store.Directory;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.ByteBlockPool;
@@ -65,8 +65,8 @@ import org.apache.lucene.util.Version;
*
* Overview
*
- * This class is a replacement/substitute for a large subset of
- * {@link RAMDirectory} functionality. It is designed to
+ * This class is a replacement/substitute for RAM-resident {@link Directory} implementations.
+ * It is designed to
* enable maximum efficiency for on-the-fly matchmaking combining structured and
* fuzzy fulltext search in realtime streaming applications such as Nux XQuery based XML
* message queues, publish-subscribe systems for Blogs/newsfeeds, text chat, data acquisition and
@@ -156,11 +156,12 @@ import org.apache.lucene.util.Version;
*
* This class performs very well for very small texts (e.g. 10 chars)
* as well as for large texts (e.g. 10 MB) and everything in between.
- * Typically, it is about 10-100 times faster than RAMDirectory
.
- * Note that RAMDirectory
has particularly
+ * Typically, it is about 10-100 times faster than RAM-resident directory.
+ *
+ * Note that other Directory
implementations have particularly
* large efficiency overheads for small to medium sized texts, both in time and space.
* Indexing a field with N tokens takes O(N) in the best case, and O(N logN) in the worst
- * case. Memory consumption is probably larger than for RAMDirectory
.
+ * case.
*
* Example throughput of many simple term queries over a single MemoryIndex:
* ~500000 queries/sec on a MacBook Pro, jdk 1.5.0_06, server VM.
@@ -707,7 +708,7 @@ public class MemoryIndex {
});
float score = scores[0];
return score;
- } catch (IOException e) { // can never happen (RAMDirectory)
+ } catch (IOException e) {
throw new RuntimeException(e);
}
}
diff --git a/lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndexAgainstRAMDir.java b/lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndexAgainstDirectory.java
similarity index 97%
rename from lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndexAgainstRAMDir.java
rename to lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndexAgainstDirectory.java
index 39c25a51538..a9d83b0fa8c 100644
--- a/lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndexAgainstRAMDir.java
+++ b/lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndexAgainstDirectory.java
@@ -64,8 +64,8 @@ import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper;
import org.apache.lucene.search.spans.SpanOrQuery;
import org.apache.lucene.search.spans.SpanQuery;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.ByteBlockPool;
import org.apache.lucene.util.ByteBlockPool.Allocator;
import org.apache.lucene.util.BytesRef;
@@ -77,10 +77,10 @@ import org.apache.lucene.util.TestUtil;
import static org.hamcrest.CoreMatchers.equalTo;
/**
- * Verifies that Lucene MemoryIndex and RAMDirectory have the same behaviour,
+ * Verifies that Lucene MemoryIndex and RAM-resident Directory have the same behaviour,
* returning the same results for queries on some randomish indexes.
*/
-public class TestMemoryIndexAgainstRAMDir extends BaseTokenStreamTestCase {
+public class TestMemoryIndexAgainstDirectory extends BaseTokenStreamTestCase {
private Set queries = new HashSet<>();
public static final int ITERATIONS = 100 * RANDOM_MULTIPLIER;
@@ -116,15 +116,15 @@ public class TestMemoryIndexAgainstRAMDir extends BaseTokenStreamTestCase {
public void testRandomQueries() throws Exception {
MemoryIndex index = randomMemoryIndex();
for (int i = 0; i < ITERATIONS; i++) {
- assertAgainstRAMDirectory(index);
+ assertAgainstDirectory(index);
}
}
/**
- * Build a randomish document for both RAMDirectory and MemoryIndex,
+ * Build a randomish document for both Directory and MemoryIndex,
* and run all the queries against it.
*/
- public void assertAgainstRAMDirectory(MemoryIndex memory) throws Exception {
+ public void assertAgainstDirectory(MemoryIndex memory) throws Exception {
memory.reset();
StringBuilder fooField = new StringBuilder();
StringBuilder termField = new StringBuilder();
@@ -143,9 +143,9 @@ public class TestMemoryIndexAgainstRAMDir extends BaseTokenStreamTestCase {
termField.append(randomTerm());
}
- Directory ramdir = new RAMDirectory();
+ Directory dir = new ByteBuffersDirectory();
Analyzer analyzer = randomAnalyzer();
- IndexWriter writer = new IndexWriter(ramdir,
+ IndexWriter writer = new IndexWriter(dir,
new IndexWriterConfig(analyzer).setCodec(
TestUtil.alwaysPostingsFormat(TestUtil.getDefaultPostingsFormat())));
Document doc = new Document();
@@ -161,11 +161,11 @@ public class TestMemoryIndexAgainstRAMDir extends BaseTokenStreamTestCase {
LeafReader reader = (LeafReader) memory.createSearcher().getIndexReader();
TestUtil.checkReader(reader);
- DirectoryReader competitor = DirectoryReader.open(ramdir);
+ DirectoryReader competitor = DirectoryReader.open(dir);
duellReaders(competitor, reader);
IOUtils.close(reader, competitor);
- assertAllQueries(memory, ramdir, analyzer);
- ramdir.close();
+ assertAllQueries(memory, dir, analyzer);
+ dir.close();
}
private void duellReaders(CompositeReader other, LeafReader memIndexReader)
@@ -236,10 +236,10 @@ public class TestMemoryIndexAgainstRAMDir extends BaseTokenStreamTestCase {
}
/**
- * Run all queries against both the RAMDirectory and MemoryIndex, ensuring they are the same.
+ * Run all queries against both the Directory and MemoryIndex, ensuring they are the same.
*/
- public void assertAllQueries(MemoryIndex memory, Directory ramdir, Analyzer analyzer) throws Exception {
- IndexReader reader = DirectoryReader.open(ramdir);
+ public void assertAllQueries(MemoryIndex memory, Directory directory, Analyzer analyzer) throws Exception {
+ IndexReader reader = DirectoryReader.open(directory);
IndexSearcher ram = newSearcher(reader);
IndexSearcher mem = memory.createSearcher();
QueryParser qp = new QueryParser("foo", analyzer);
diff --git a/lucene/misc/src/test/org/apache/lucene/misc/SweetSpotSimilarityTest.java b/lucene/misc/src/test/org/apache/lucene/misc/SweetSpotSimilarityTest.java
index 6ad94911e8d..1cce88790ae 100644
--- a/lucene/misc/src/test/org/apache/lucene/misc/SweetSpotSimilarityTest.java
+++ b/lucene/misc/src/test/org/apache/lucene/misc/SweetSpotSimilarityTest.java
@@ -32,8 +32,8 @@ import org.apache.lucene.search.similarities.ClassicSimilarity;
import org.apache.lucene.search.similarities.PerFieldSimilarityWrapper;
import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.search.similarities.TFIDFSimilarity;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.LuceneTestCase;
/**
@@ -43,7 +43,7 @@ public class SweetSpotSimilarityTest extends LuceneTestCase {
private static float computeNorm(Similarity sim, String field, int length) throws IOException {
String value = IntStream.range(0, length).mapToObj(i -> "a").collect(Collectors.joining(" "));
- Directory dir = new RAMDirectory();
+ Directory dir = new ByteBuffersDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setSimilarity(sim));
w.addDocument(Collections.singleton(newTextField(field, value, Store.NO)));
DirectoryReader reader = DirectoryReader.open(w);
diff --git a/lucene/misc/src/test/org/apache/lucene/store/TestHardLinkCopyDirectoryWrapper.java b/lucene/misc/src/test/org/apache/lucene/store/TestHardLinkCopyDirectoryWrapper.java
index a6392c90468..4ae44e0df1b 100644
--- a/lucene/misc/src/test/org/apache/lucene/store/TestHardLinkCopyDirectoryWrapper.java
+++ b/lucene/misc/src/test/org/apache/lucene/store/TestHardLinkCopyDirectoryWrapper.java
@@ -38,7 +38,7 @@ public class TestHardLinkCopyDirectoryWrapper extends BaseDirectoryTestCase {
protected Directory getDirectory(Path file) throws IOException {
Directory open;
if (random().nextBoolean()) {
- open = new RAMDirectory();
+ open = new ByteBuffersDirectory();
} else {
open = FSDirectory.open(file);
}
diff --git a/lucene/queries/src/test/org/apache/lucene/queries/payloads/PayloadHelper.java b/lucene/queries/src/test/org/apache/lucene/queries/payloads/PayloadHelper.java
index cfd5156aa6f..62854e7b6e6 100644
--- a/lucene/queries/src/test/org/apache/lucene/queries/payloads/PayloadHelper.java
+++ b/lucene/queries/src/test/org/apache/lucene/queries/payloads/PayloadHelper.java
@@ -24,6 +24,7 @@ import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.TextField;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.English;
import org.apache.lucene.util.LuceneTestCase;
@@ -32,7 +33,6 @@ import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.MockDirectoryWrapper;
-import org.apache.lucene.store.RAMDirectory;
import java.io.IOException;
import java.util.Random;
@@ -104,7 +104,7 @@ public class PayloadHelper {
}
/**
- * Sets up a RAMDirectory, and adds documents (using English.intToEnglish()) with two fields: field and multiField
+ * Sets up a RAM-resident Directory, and adds documents (using English.intToEnglish()) with two fields: field and multiField
* and analyzes them using the PayloadAnalyzer
* @param similarity The Similarity class to use in the Searcher
* @param numDocs The num docs to add
@@ -112,7 +112,7 @@ public class PayloadHelper {
*/
// TODO: randomize
public IndexSearcher setUp(Random random, Similarity similarity, int numDocs) throws IOException {
- Directory directory = new MockDirectoryWrapper(random, new RAMDirectory());
+ Directory directory = new MockDirectoryWrapper(random, new ByteBuffersDirectory());
PayloadAnalyzer analyzer = new PayloadAnalyzer();
// TODO randomize this
diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/surround/query/SingleFieldTestDb.java b/lucene/queryparser/src/test/org/apache/lucene/queryparser/surround/query/SingleFieldTestDb.java
index 30a5f925947..88614e8dfb4 100644
--- a/lucene/queryparser/src/test/org/apache/lucene/queryparser/surround/query/SingleFieldTestDb.java
+++ b/lucene/queryparser/src/test/org/apache/lucene/queryparser/surround/query/SingleFieldTestDb.java
@@ -19,9 +19,9 @@ package org.apache.lucene.queryparser.surround.query;
import java.util.Random;
import org.apache.lucene.document.Field;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.MockDirectoryWrapper;
-import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.TextField;
@@ -35,7 +35,7 @@ public class SingleFieldTestDb {
public SingleFieldTestDb(Random random, String[] documents, String fName) {
try {
- db = new MockDirectoryWrapper(random, new RAMDirectory());
+ db = new MockDirectoryWrapper(random, new ByteBuffersDirectory());
docs = documents;
fieldName = fName;
IndexWriter writer = new IndexWriter(db, new IndexWriterConfig(new MockAnalyzer(random)));
diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialExample.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialExample.java
index 3f1d3febc15..f810ab74438 100644
--- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialExample.java
+++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialExample.java
@@ -40,8 +40,9 @@ import org.apache.lucene.spatial.prefix.tree.SpatialPrefixTree;
import org.apache.lucene.spatial.query.SpatialArgs;
import org.apache.lucene.spatial.query.SpatialArgsParser;
import org.apache.lucene.spatial.query.SpatialOperation;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.RAMDirectory;
+
import org.apache.lucene.util.LuceneTestCase;
import org.locationtech.spatial4j.context.SpatialContext;
import org.locationtech.spatial4j.distance.DistanceUtils;
@@ -98,7 +99,7 @@ public class SpatialExample extends LuceneTestCase {
this.strategy = new RecursivePrefixTreeStrategy(grid, "myGeoField");
- this.directory = new RAMDirectory();
+ this.directory = new ByteBuffersDirectory();
}
private void indexPoints() throws Exception {
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BaseCompressingDocValuesFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BaseCompressingDocValuesFormatTestCase.java
index 585cea77ace..17a9d192cbe 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/BaseCompressingDocValuesFormatTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/BaseCompressingDocValuesFormatTestCase.java
@@ -23,8 +23,8 @@ import java.util.List;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.NumericDocValuesField;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.packed.PackedInts;
@@ -42,86 +42,88 @@ public abstract class BaseCompressingDocValuesFormatTestCase extends BaseDocValu
}
public void testUniqueValuesCompression() throws IOException {
- final Directory dir = new RAMDirectory();
- final IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
- final IndexWriter iwriter = new IndexWriter(dir, iwc);
+ try (final Directory dir = new ByteBuffersDirectory()) {
+ final IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
+ final IndexWriter iwriter = new IndexWriter(dir, iwc);
- final int uniqueValueCount = TestUtil.nextInt(random(), 1, 256);
- final List values = new ArrayList<>();
+ final int uniqueValueCount = TestUtil.nextInt(random(), 1, 256);
+ final List values = new ArrayList<>();
- final Document doc = new Document();
- final NumericDocValuesField dvf = new NumericDocValuesField("dv", 0);
- doc.add(dvf);
- for (int i = 0; i < 300; ++i) {
- final long value;
- if (values.size() < uniqueValueCount) {
- value = random().nextLong();
- values.add(value);
- } else {
- value = RandomPicks.randomFrom(random(), values);
+ final Document doc = new Document();
+ final NumericDocValuesField dvf = new NumericDocValuesField("dv", 0);
+ doc.add(dvf);
+ for (int i = 0; i < 300; ++i) {
+ final long value;
+ if (values.size() < uniqueValueCount) {
+ value = random().nextLong();
+ values.add(value);
+ } else {
+ value = RandomPicks.randomFrom(random(), values);
+ }
+ dvf.setLongValue(value);
+ iwriter.addDocument(doc);
}
- dvf.setLongValue(value);
- iwriter.addDocument(doc);
+ iwriter.forceMerge(1);
+ final long size1 = dirSize(dir);
+ for (int i = 0; i < 20; ++i) {
+ dvf.setLongValue(RandomPicks.randomFrom(random(), values));
+ iwriter.addDocument(doc);
+ }
+ iwriter.forceMerge(1);
+ final long size2 = dirSize(dir);
+ // make sure the new longs did not cost 8 bytes each
+ assertTrue(size2 < size1 + 8 * 20);
}
- iwriter.forceMerge(1);
- final long size1 = dirSize(dir);
- for (int i = 0; i < 20; ++i) {
- dvf.setLongValue(RandomPicks.randomFrom(random(), values));
- iwriter.addDocument(doc);
- }
- iwriter.forceMerge(1);
- final long size2 = dirSize(dir);
- // make sure the new longs did not cost 8 bytes each
- assertTrue(size2 < size1 + 8 * 20);
}
public void testDateCompression() throws IOException {
- final Directory dir = new RAMDirectory();
- final IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
- final IndexWriter iwriter = new IndexWriter(dir, iwc);
+ try (final Directory dir = new ByteBuffersDirectory()) {
+ final IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
+ final IndexWriter iwriter = new IndexWriter(dir, iwc);
- final long base = 13; // prime
- final long day = 1000L * 60 * 60 * 24;
+ final long base = 13; // prime
+ final long day = 1000L * 60 * 60 * 24;
- final Document doc = new Document();
- final NumericDocValuesField dvf = new NumericDocValuesField("dv", 0);
- doc.add(dvf);
- for (int i = 0; i < 300; ++i) {
- dvf.setLongValue(base + random().nextInt(1000) * day);
- iwriter.addDocument(doc);
+ final Document doc = new Document();
+ final NumericDocValuesField dvf = new NumericDocValuesField("dv", 0);
+ doc.add(dvf);
+ for (int i = 0; i < 300; ++i) {
+ dvf.setLongValue(base + random().nextInt(1000) * day);
+ iwriter.addDocument(doc);
+ }
+ iwriter.forceMerge(1);
+ final long size1 = dirSize(dir);
+ for (int i = 0; i < 50; ++i) {
+ dvf.setLongValue(base + random().nextInt(1000) * day);
+ iwriter.addDocument(doc);
+ }
+ iwriter.forceMerge(1);
+ final long size2 = dirSize(dir);
+ // make sure the new longs costed less than if they had only been packed
+ assertTrue(size2 < size1 + (PackedInts.bitsRequired(day) * 50) / 8);
}
- iwriter.forceMerge(1);
- final long size1 = dirSize(dir);
- for (int i = 0; i < 50; ++i) {
- dvf.setLongValue(base + random().nextInt(1000) * day);
- iwriter.addDocument(doc);
- }
- iwriter.forceMerge(1);
- final long size2 = dirSize(dir);
- // make sure the new longs costed less than if they had only been packed
- assertTrue(size2 < size1 + (PackedInts.bitsRequired(day) * 50) / 8);
}
public void testSingleBigValueCompression() throws IOException {
- final Directory dir = new RAMDirectory();
- final IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
- final IndexWriter iwriter = new IndexWriter(dir, iwc);
+ try (final Directory dir = new ByteBuffersDirectory()) {
+ final IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
+ final IndexWriter iwriter = new IndexWriter(dir, iwc);
- final Document doc = new Document();
- final NumericDocValuesField dvf = new NumericDocValuesField("dv", 0);
- doc.add(dvf);
- for (int i = 0; i < 20000; ++i) {
- dvf.setLongValue(i & 1023);
+ final Document doc = new Document();
+ final NumericDocValuesField dvf = new NumericDocValuesField("dv", 0);
+ doc.add(dvf);
+ for (int i = 0; i < 20000; ++i) {
+ dvf.setLongValue(i & 1023);
+ iwriter.addDocument(doc);
+ }
+ iwriter.forceMerge(1);
+ final long size1 = dirSize(dir);
+ dvf.setLongValue(Long.MAX_VALUE);
iwriter.addDocument(doc);
+ iwriter.forceMerge(1);
+ final long size2 = dirSize(dir);
+ // make sure the new value did not grow the bpv for every other value
+ assertTrue(size2 < size1 + (20000 * (63 - 10)) / 8);
}
- iwriter.forceMerge(1);
- final long size1 = dirSize(dir);
- dvf.setLongValue(Long.MAX_VALUE);
- iwriter.addDocument(doc);
- iwriter.forceMerge(1);
- final long size2 = dirSize(dir);
- // make sure the new value did not grow the bpv for every other value
- assertTrue(size2 < size1 + (20000 * (63 - 10)) / 8);
}
-
}
diff --git a/lucene/test-framework/src/java/org/apache/lucene/store/MockIndexOutputWrapper.java b/lucene/test-framework/src/java/org/apache/lucene/store/MockIndexOutputWrapper.java
index 17fb07e76c1..ef28da61681 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/store/MockIndexOutputWrapper.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/store/MockIndexOutputWrapper.java
@@ -22,7 +22,7 @@ import java.io.IOException;
import org.apache.lucene.util.LuceneTestCase;
/**
- * Used by MockRAMDirectory to create an output stream that
+ * Used to create an output stream that
* will throw an IOException on fake disk full, track max
* disk space actually used, and maybe throw random
* IOExceptions.
@@ -45,9 +45,9 @@ public class MockIndexOutputWrapper extends IndexOutput {
}
private void checkCrashed() throws IOException {
- // If MockRAMDir crashed since we were opened, then don't write anything
+ // If crashed since we were opened, then don't write anything
if (dir.crashed) {
- throw new IOException("MockRAMDirectory was crashed; cannot write to " + name);
+ throw new IOException(dir.getClass().getSimpleName() + " has crashed; cannot write to " + name);
}
}
diff --git a/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
index 2ffb1bf9fb0..1830dfc6f39 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
@@ -490,7 +490,6 @@ public abstract class LuceneTestCase extends Assert {
private static final List CORE_DIRECTORIES;
static {
CORE_DIRECTORIES = new ArrayList<>(FS_DIRECTORIES);
- CORE_DIRECTORIES.add("RAMDirectory");
CORE_DIRECTORIES.add(ByteBuffersDirectory.class.getSimpleName());
}
@@ -1626,7 +1625,7 @@ public abstract class LuceneTestCase extends Assert {
if (rarely(random)) {
clazzName = RandomPicks.randomFrom(random, CORE_DIRECTORIES);
} else {
- clazzName = "RAMDirectory";
+ clazzName = ByteBuffersDirectory.class.getName();
}
}
@@ -1650,7 +1649,7 @@ public abstract class LuceneTestCase extends Assert {
// the remaining dirs are no longer filesystem based, so we must check that the passedLockFactory is not file based:
if (!(lf instanceof FSLockFactory)) {
- // try ctor with only LockFactory (e.g. RAMDirectory)
+ // try ctor with only LockFactory
try {
return clazz.getConstructor(LockFactory.class).newInstance(lf);
} catch (NoSuchMethodException nsme) {
diff --git a/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java b/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java
index 5350890ad30..d207419e1fe 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java
@@ -71,12 +71,12 @@ import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TotalHits;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.store.FilterDirectory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.NoLockFactory;
-import org.apache.lucene.store.RAMDirectory;
import org.junit.Assert;
import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
@@ -1281,9 +1281,12 @@ public final class TestUtil {
}
}
- /** Returns a copy of directory, entirely in RAM */
- public static RAMDirectory ramCopyOf(Directory dir) throws IOException {
- RAMDirectory ram = new RAMDirectory();
+ /**
+ * Returns a copy of the source directory, with file contents stored
+ * in RAM.
+ */
+ public static Directory ramCopyOf(Directory dir) throws IOException {
+ Directory ram = new ByteBuffersDirectory();
for (String file : dir.listAll()) {
if (file.startsWith(IndexFileNames.SEGMENTS) || IndexFileNames.CODEC_FILE_PATTERN.matcher(file).matches()) {
ram.copyFrom(dir, file, file, IOContext.DEFAULT);
diff --git a/lucene/test-framework/src/test/org/apache/lucene/store/TestMockDirectoryWrapper.java b/lucene/test-framework/src/test/org/apache/lucene/store/TestMockDirectoryWrapper.java
index 2ea6ce0c1ac..d04a6930196 100644
--- a/lucene/test-framework/src/test/org/apache/lucene/store/TestMockDirectoryWrapper.java
+++ b/lucene/test-framework/src/test/org/apache/lucene/store/TestMockDirectoryWrapper.java
@@ -115,8 +115,8 @@ public class TestMockDirectoryWrapper extends BaseDirectoryTestCase {
}
}
- public void testCorruptOnCloseIsWorkingRAMDir() throws Exception {
- try(Directory dir = new RAMDirectory()) {
+ public void testCorruptOnCloseIsWorkingOnByteBuffersDirectory() throws Exception {
+ try(Directory dir = new ByteBuffersDirectory()) {
testCorruptOnCloseIsWorking(dir);
}
}
diff --git a/solr/core/src/java/org/apache/solr/core/RAMDirectoryFactory.java b/solr/core/src/java/org/apache/solr/core/RAMDirectoryFactory.java
index 25d0a1c0f7c..76e1b61d941 100644
--- a/solr/core/src/java/org/apache/solr/core/RAMDirectoryFactory.java
+++ b/solr/core/src/java/org/apache/solr/core/RAMDirectoryFactory.java
@@ -18,15 +18,15 @@ package org.apache.solr.core;
import java.io.IOException;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.LockFactory;
-import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.store.SingleInstanceLockFactory;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
/**
- * Factory to instantiate {@link org.apache.lucene.store.RAMDirectory}
+ * Factory to instantiate RAM-resident directory implementation.
*/
public class RAMDirectoryFactory extends EphemeralDirectoryFactory {
@@ -34,14 +34,14 @@ public class RAMDirectoryFactory extends EphemeralDirectoryFactory {
protected LockFactory createLockFactory(String rawLockType) throws IOException {
if (!(rawLockType == null || DirectoryFactory.LOCK_TYPE_SINGLE.equalsIgnoreCase(rawLockType.trim()))) {
throw new SolrException(ErrorCode.FORBIDDEN,
- "RAMDirectory can only be used with the '"+DirectoryFactory.LOCK_TYPE_SINGLE+"' lock factory type.");
+ "RAMDirectory can only be used with the '" +
+ DirectoryFactory.LOCK_TYPE_SINGLE+"' lock factory type.");
}
return new SingleInstanceLockFactory();
}
@Override
protected Directory create(String path, LockFactory lockFactory, DirContext dirContext) throws IOException {
- return new RAMDirectory(lockFactory);
+ return new ByteBuffersDirectory(lockFactory);
}
-
}
diff --git a/solr/core/src/java/org/apache/solr/spelling/AbstractLuceneSpellChecker.java b/solr/core/src/java/org/apache/solr/spelling/AbstractLuceneSpellChecker.java
index 8170982cff2..9fc31105882 100644
--- a/solr/core/src/java/org/apache/solr/spelling/AbstractLuceneSpellChecker.java
+++ b/solr/core/src/java/org/apache/solr/spelling/AbstractLuceneSpellChecker.java
@@ -32,10 +32,10 @@ import org.apache.lucene.search.spell.StringDistance;
import org.apache.lucene.search.spell.SuggestWord;
import org.apache.lucene.search.spell.SuggestWordFrequencyComparator;
import org.apache.lucene.search.spell.SuggestWordQueue;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.store.FilterDirectory;
-import org.apache.lucene.store.RAMDirectory;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.SolrCore;
import org.apache.solr.search.SolrIndexSearcher;
@@ -224,7 +224,7 @@ public abstract class AbstractLuceneSpellChecker extends SolrSpellChecker {
index = new FilterDirectory(FSDirectory.open(new File(indexDir).toPath())) {
};
} else {
- index = new RAMDirectory();
+ index = new ByteBuffersDirectory();
}
}
diff --git a/solr/core/src/java/org/apache/solr/spelling/FileBasedSpellChecker.java b/solr/core/src/java/org/apache/solr/spelling/FileBasedSpellChecker.java
index fd9ca21c336..b9559f577fd 100644
--- a/solr/core/src/java/org/apache/solr/spelling/FileBasedSpellChecker.java
+++ b/solr/core/src/java/org/apache/solr/spelling/FileBasedSpellChecker.java
@@ -28,6 +28,8 @@ import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LogByteSizeMergePolicy;
import org.apache.lucene.index.LogMergePolicy;
+import org.apache.lucene.store.ByteBuffersDirectory;
+import org.apache.lucene.store.Directory;
import org.apache.solr.schema.IndexSchema;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -36,7 +38,6 @@ import org.apache.lucene.document.Document;
import org.apache.lucene.document.TextField;
import org.apache.lucene.search.spell.HighFrequencyDictionary;
import org.apache.lucene.search.spell.PlainTextDictionary;
-import org.apache.lucene.store.RAMDirectory;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.SolrCore;
import org.apache.solr.schema.FieldType;
@@ -90,7 +91,7 @@ public class FileBasedSpellChecker extends AbstractLuceneSpellChecker {
if (fieldTypeName != null && schema.getFieldTypeNoEx(fieldTypeName) != null) {
FieldType fieldType = schema.getFieldTypes().get(fieldTypeName);
// Do index-time analysis using the given fieldType's analyzer
- RAMDirectory ramDir = new RAMDirectory();
+ Directory ramDir = new ByteBuffersDirectory();
LogMergePolicy mp = new LogByteSizeMergePolicy();
mp.setMergeFactor(300);
diff --git a/solr/core/src/test/org/apache/solr/core/RAMDirectoryFactoryTest.java b/solr/core/src/test/org/apache/solr/core/RAMDirectoryFactoryTest.java
index 2bc668ea8c3..713f60698cb 100644
--- a/solr/core/src/test/org/apache/solr/core/RAMDirectoryFactoryTest.java
+++ b/solr/core/src/test/org/apache/solr/core/RAMDirectoryFactoryTest.java
@@ -18,9 +18,9 @@ package org.apache.solr.core;
import java.io.IOException;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.LockFactory;
-import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.solr.core.DirectoryFactory.DirContext;
@@ -35,7 +35,7 @@ public class RAMDirectoryFactoryTest extends LuceneTestCase {
}
private void dotestOpenReturnsTheSameForSamePath() throws IOException {
- final Directory directory = new RAMDirectory();
+ final Directory directory = new ByteBuffersDirectory();
RAMDirectoryFactory factory = new RAMDirectoryFactory() {
@Override
protected Directory create(String path, LockFactory lockFactory, DirContext dirContext) {
diff --git a/solr/core/src/test/org/apache/solr/search/TestSort.java b/solr/core/src/test/org/apache/solr/search/TestSort.java
index 54cc5eadf88..0d276dc83e6 100644
--- a/solr/core/src/test/org/apache/solr/search/TestSort.java
+++ b/solr/core/src/test/org/apache/solr/search/TestSort.java
@@ -45,8 +45,8 @@ import org.apache.lucene.search.SortField.Type;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TopFieldCollector;
+import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.BitDocIdSet;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.FixedBitSet;
@@ -183,7 +183,7 @@ public class TestSort extends SolrTestCaseJ4 {
public void testSort() throws Exception {
- Directory dir = new RAMDirectory();
+ Directory dir = new ByteBuffersDirectory();
Field f = new StringField("f", "0", Field.Store.NO);
Field f2 = new StringField("f2", "0", Field.Store.NO);