Use ByteBuffersDirectory rather than RAMDirectory (#52768)
Lucene's RAMDirectory has been deprecated. This commit replaces all uses of RAMDirectory in elasticsearch with the newer ByteBuffersDirectory. Most uses are in tests, but the percolator and painless executor may get some small speedups.
This commit is contained in:
parent
02b23c37d1
commit
638f3e4183
|
@ -29,8 +29,9 @@ import org.apache.lucene.search.Query;
|
||||||
import org.apache.lucene.search.ScoreMode;
|
import org.apache.lucene.search.ScoreMode;
|
||||||
import org.apache.lucene.search.Scorer;
|
import org.apache.lucene.search.Scorer;
|
||||||
import org.apache.lucene.search.Weight;
|
import org.apache.lucene.search.Weight;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
|
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||||
|
import org.apache.lucene.store.Directory;
|
||||||
import org.elasticsearch.action.ActionRequestValidationException;
|
import org.elasticsearch.action.ActionRequestValidationException;
|
||||||
import org.elasticsearch.action.ActionResponse;
|
import org.elasticsearch.action.ActionResponse;
|
||||||
import org.elasticsearch.action.ActionType;
|
import org.elasticsearch.action.ActionType;
|
||||||
|
@ -562,8 +563,8 @@ public class PainlessExecuteAction extends ActionType<PainlessExecuteAction.Resp
|
||||||
|
|
||||||
Analyzer defaultAnalyzer = indexService.getIndexAnalyzers().getDefaultIndexAnalyzer();
|
Analyzer defaultAnalyzer = indexService.getIndexAnalyzers().getDefaultIndexAnalyzer();
|
||||||
|
|
||||||
try (RAMDirectory ramDirectory = new RAMDirectory()) {
|
try (Directory directory = new ByteBuffersDirectory()) {
|
||||||
try (IndexWriter indexWriter = new IndexWriter(ramDirectory, new IndexWriterConfig(defaultAnalyzer))) {
|
try (IndexWriter indexWriter = new IndexWriter(directory, new IndexWriterConfig(defaultAnalyzer))) {
|
||||||
String index = indexService.index().getName();
|
String index = indexService.index().getName();
|
||||||
String type = indexService.mapperService().documentMapper().type();
|
String type = indexService.mapperService().documentMapper().type();
|
||||||
BytesReference document = request.contextSetup.document;
|
BytesReference document = request.contextSetup.document;
|
||||||
|
|
|
@ -34,8 +34,8 @@ import org.apache.lucene.search.IndexSearcher;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.apache.lucene.search.TermQuery;
|
import org.apache.lucene.search.TermQuery;
|
||||||
import org.apache.lucene.search.TopDocs;
|
import org.apache.lucene.search.TopDocs;
|
||||||
|
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
|
||||||
import org.elasticsearch.index.similarity.ScriptedSimilarity;
|
import org.elasticsearch.index.similarity.ScriptedSimilarity;
|
||||||
import org.elasticsearch.painless.spi.Whitelist;
|
import org.elasticsearch.painless.spi.Whitelist;
|
||||||
import org.elasticsearch.script.ScriptContext;
|
import org.elasticsearch.script.ScriptContext;
|
||||||
|
@ -62,7 +62,7 @@ public class SimilarityScriptTests extends ScriptTestCase {
|
||||||
SimilarityScript.Factory factory = scriptEngine.compile(
|
SimilarityScript.Factory factory = scriptEngine.compile(
|
||||||
"foobar", "return query.boost * doc.freq / doc.length", SimilarityScript.CONTEXT, Collections.emptyMap());
|
"foobar", "return query.boost * doc.freq / doc.length", SimilarityScript.CONTEXT, Collections.emptyMap());
|
||||||
ScriptedSimilarity sim = new ScriptedSimilarity("foobar", null, "foobaz", factory::newInstance, true);
|
ScriptedSimilarity sim = new ScriptedSimilarity("foobar", null, "foobaz", factory::newInstance, true);
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = new ByteBuffersDirectory();
|
||||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setSimilarity(sim));
|
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setSimilarity(sim));
|
||||||
|
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
|
@ -101,7 +101,7 @@ public class SimilarityScriptTests extends ScriptTestCase {
|
||||||
SimilarityScript.Factory factory = scriptEngine.compile(
|
SimilarityScript.Factory factory = scriptEngine.compile(
|
||||||
"foobar", "return weight * doc.freq / doc.length", SimilarityScript.CONTEXT, Collections.emptyMap());
|
"foobar", "return weight * doc.freq / doc.length", SimilarityScript.CONTEXT, Collections.emptyMap());
|
||||||
ScriptedSimilarity sim = new ScriptedSimilarity("foobar", weightFactory::newInstance, "foobaz", factory::newInstance, true);
|
ScriptedSimilarity sim = new ScriptedSimilarity("foobar", weightFactory::newInstance, "foobaz", factory::newInstance, true);
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = new ByteBuffersDirectory();
|
||||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setSimilarity(sim));
|
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setSimilarity(sim));
|
||||||
|
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
|
|
|
@ -36,7 +36,8 @@ import org.apache.lucene.search.ScoreMode;
|
||||||
import org.apache.lucene.search.Scorer;
|
import org.apache.lucene.search.Scorer;
|
||||||
import org.apache.lucene.search.Weight;
|
import org.apache.lucene.search.Weight;
|
||||||
import org.apache.lucene.search.join.BitSetProducer;
|
import org.apache.lucene.search.join.BitSetProducer;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||||
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.util.BitDocIdSet;
|
import org.apache.lucene.util.BitDocIdSet;
|
||||||
import org.apache.lucene.util.BitSet;
|
import org.apache.lucene.util.BitSet;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
|
@ -674,8 +675,8 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
|
||||||
}
|
}
|
||||||
|
|
||||||
static IndexSearcher createMultiDocumentSearcher(Analyzer analyzer, Collection<ParsedDocument> docs) {
|
static IndexSearcher createMultiDocumentSearcher(Analyzer analyzer, Collection<ParsedDocument> docs) {
|
||||||
RAMDirectory ramDirectory = new RAMDirectory();
|
Directory directory = new ByteBuffersDirectory();
|
||||||
try (IndexWriter indexWriter = new IndexWriter(ramDirectory, new IndexWriterConfig(analyzer))) {
|
try (IndexWriter indexWriter = new IndexWriter(directory, new IndexWriterConfig(analyzer))) {
|
||||||
// Indexing in order here, so that the user provided order matches with the docid sequencing:
|
// Indexing in order here, so that the user provided order matches with the docid sequencing:
|
||||||
Iterable<ParseContext.Document> iterable = () -> docs.stream()
|
Iterable<ParseContext.Document> iterable = () -> docs.stream()
|
||||||
.map(ParsedDocument::docs)
|
.map(ParsedDocument::docs)
|
||||||
|
|
|
@ -74,8 +74,8 @@ import org.apache.lucene.search.spans.SpanNearQuery;
|
||||||
import org.apache.lucene.search.spans.SpanNotQuery;
|
import org.apache.lucene.search.spans.SpanNotQuery;
|
||||||
import org.apache.lucene.search.spans.SpanOrQuery;
|
import org.apache.lucene.search.spans.SpanOrQuery;
|
||||||
import org.apache.lucene.search.spans.SpanTermQuery;
|
import org.apache.lucene.search.spans.SpanTermQuery;
|
||||||
|
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||||
|
@ -842,7 +842,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase {
|
||||||
|
|
||||||
Version v = Version.CURRENT;
|
Version v = Version.CURRENT;
|
||||||
|
|
||||||
try (RAMDirectory directory = new RAMDirectory()) {
|
try (Directory directory = new ByteBuffersDirectory()) {
|
||||||
try (IndexWriter iw = new IndexWriter(directory, newIndexWriterConfig())) {
|
try (IndexWriter iw = new IndexWriter(directory, newIndexWriterConfig())) {
|
||||||
List<Document> documents = new ArrayList<>();
|
List<Document> documents = new ArrayList<>();
|
||||||
Document document = new Document();
|
Document document = new Document();
|
||||||
|
@ -881,7 +881,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
// This will trigger using the TermsQuery instead of individual term query clauses in the CoveringQuery:
|
// This will trigger using the TermsQuery instead of individual term query clauses in the CoveringQuery:
|
||||||
try (RAMDirectory directory = new RAMDirectory()) {
|
try (Directory directory = new ByteBuffersDirectory()) {
|
||||||
try (IndexWriter iw = new IndexWriter(directory, newIndexWriterConfig())) {
|
try (IndexWriter iw = new IndexWriter(directory, newIndexWriterConfig())) {
|
||||||
Document document = new Document();
|
Document document = new Document();
|
||||||
for (int i = 0; i < 1024; i++) {
|
for (int i = 0; i < 1024; i++) {
|
||||||
|
|
|
@ -45,8 +45,8 @@ import org.apache.lucene.search.IndexSearcher;
|
||||||
import org.apache.lucene.search.ScoreDoc;
|
import org.apache.lucene.search.ScoreDoc;
|
||||||
import org.apache.lucene.search.TermQuery;
|
import org.apache.lucene.search.TermQuery;
|
||||||
import org.apache.lucene.search.TopDocs;
|
import org.apache.lucene.search.TopDocs;
|
||||||
|
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
|
||||||
import org.elasticsearch.action.admin.indices.alias.Alias;
|
import org.elasticsearch.action.admin.indices.alias.Alias;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
@ -309,7 +309,7 @@ public abstract class AbstractTermVectorsTestCase extends ESIntegTestCase {
|
||||||
}
|
}
|
||||||
PerFieldAnalyzerWrapper wrapper = new PerFieldAnalyzerWrapper(new StandardAnalyzer(CharArraySet.EMPTY_SET), mapping);
|
PerFieldAnalyzerWrapper wrapper = new PerFieldAnalyzerWrapper(new StandardAnalyzer(CharArraySet.EMPTY_SET), mapping);
|
||||||
|
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = new ByteBuffersDirectory();
|
||||||
IndexWriterConfig conf = new IndexWriterConfig(wrapper);
|
IndexWriterConfig conf = new IndexWriterConfig(wrapper);
|
||||||
|
|
||||||
conf.setOpenMode(IndexWriterConfig.OpenMode.CREATE);
|
conf.setOpenMode(IndexWriterConfig.OpenMode.CREATE);
|
||||||
|
|
|
@ -28,7 +28,7 @@ import org.apache.lucene.index.IndexWriter;
|
||||||
import org.apache.lucene.index.IndexWriterConfig;
|
import org.apache.lucene.index.IndexWriterConfig;
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||||
import org.elasticsearch.common.lucene.Lucene;
|
import org.elasticsearch.common.lucene.Lucene;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
|
@ -36,7 +36,7 @@ import static org.hamcrest.Matchers.equalTo;
|
||||||
|
|
||||||
public class MultiPhrasePrefixQueryTests extends ESTestCase {
|
public class MultiPhrasePrefixQueryTests extends ESTestCase {
|
||||||
public void testSimple() throws Exception {
|
public void testSimple() throws Exception {
|
||||||
IndexWriter writer = new IndexWriter(new RAMDirectory(), new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
|
IndexWriter writer = new IndexWriter(new ByteBuffersDirectory(), new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
doc.add(new Field("field", "aaa bbb ccc ddd", TextField.TYPE_NOT_STORED));
|
doc.add(new Field("field", "aaa bbb ccc ddd", TextField.TYPE_NOT_STORED));
|
||||||
writer.addDocument(doc);
|
writer.addDocument(doc);
|
||||||
|
|
|
@ -27,8 +27,8 @@ import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.IndexWriter;
|
import org.apache.lucene.index.IndexWriter;
|
||||||
import org.apache.lucene.index.IndexWriterConfig;
|
import org.apache.lucene.index.IndexWriterConfig;
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
|
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
|
||||||
import org.elasticsearch.common.lucene.Lucene;
|
import org.elasticsearch.common.lucene.Lucene;
|
||||||
import org.elasticsearch.common.lucene.search.MoreLikeThisQuery;
|
import org.elasticsearch.common.lucene.search.MoreLikeThisQuery;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
@ -38,7 +38,7 @@ import static org.hamcrest.Matchers.equalTo;
|
||||||
|
|
||||||
public class MoreLikeThisQueryTests extends ESTestCase {
|
public class MoreLikeThisQueryTests extends ESTestCase {
|
||||||
public void testSimple() throws Exception {
|
public void testSimple() throws Exception {
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = new ByteBuffersDirectory();
|
||||||
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
|
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
|
||||||
indexWriter.commit();
|
indexWriter.commit();
|
||||||
|
|
||||||
|
|
|
@ -19,10 +19,11 @@
|
||||||
|
|
||||||
package org.elasticsearch.common.lucene.store;
|
package org.elasticsearch.common.lucene.store;
|
||||||
|
|
||||||
|
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||||
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.IOContext;
|
import org.apache.lucene.store.IOContext;
|
||||||
import org.apache.lucene.store.IndexInput;
|
import org.apache.lucene.store.IndexInput;
|
||||||
import org.apache.lucene.store.IndexOutput;
|
import org.apache.lucene.store.IndexOutput;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -32,7 +33,7 @@ import static org.hamcrest.Matchers.lessThan;
|
||||||
|
|
||||||
public class InputStreamIndexInputTests extends ESTestCase {
|
public class InputStreamIndexInputTests extends ESTestCase {
|
||||||
public void testSingleReadSingleByteLimit() throws IOException {
|
public void testSingleReadSingleByteLimit() throws IOException {
|
||||||
RAMDirectory dir = new RAMDirectory();
|
Directory dir = new ByteBuffersDirectory();
|
||||||
IndexOutput output = dir.createOutput("test", IOContext.DEFAULT);
|
IndexOutput output = dir.createOutput("test", IOContext.DEFAULT);
|
||||||
for (int i = 0; i < 3; i++) {
|
for (int i = 0; i < 3; i++) {
|
||||||
output.writeByte((byte) 1);
|
output.writeByte((byte) 1);
|
||||||
|
@ -68,7 +69,7 @@ public class InputStreamIndexInputTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testReadMultiSingleByteLimit1() throws IOException {
|
public void testReadMultiSingleByteLimit1() throws IOException {
|
||||||
RAMDirectory dir = new RAMDirectory();
|
Directory dir = new ByteBuffersDirectory();
|
||||||
IndexOutput output = dir.createOutput("test", IOContext.DEFAULT);
|
IndexOutput output = dir.createOutput("test", IOContext.DEFAULT);
|
||||||
for (int i = 0; i < 3; i++) {
|
for (int i = 0; i < 3; i++) {
|
||||||
output.writeByte((byte) 1);
|
output.writeByte((byte) 1);
|
||||||
|
@ -106,7 +107,7 @@ public class InputStreamIndexInputTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testSingleReadTwoBytesLimit() throws IOException {
|
public void testSingleReadTwoBytesLimit() throws IOException {
|
||||||
RAMDirectory dir = new RAMDirectory();
|
Directory dir = new ByteBuffersDirectory();
|
||||||
IndexOutput output = dir.createOutput("test", IOContext.DEFAULT);
|
IndexOutput output = dir.createOutput("test", IOContext.DEFAULT);
|
||||||
for (int i = 0; i < 3; i++) {
|
for (int i = 0; i < 3; i++) {
|
||||||
output.writeByte((byte) 1);
|
output.writeByte((byte) 1);
|
||||||
|
@ -147,7 +148,7 @@ public class InputStreamIndexInputTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testReadMultiTwoBytesLimit1() throws IOException {
|
public void testReadMultiTwoBytesLimit1() throws IOException {
|
||||||
RAMDirectory dir = new RAMDirectory();
|
Directory dir = new ByteBuffersDirectory();
|
||||||
IndexOutput output = dir.createOutput("test", IOContext.DEFAULT);
|
IndexOutput output = dir.createOutput("test", IOContext.DEFAULT);
|
||||||
for (int i = 0; i < 3; i++) {
|
for (int i = 0; i < 3; i++) {
|
||||||
output.writeByte((byte) 1);
|
output.writeByte((byte) 1);
|
||||||
|
@ -190,7 +191,7 @@ public class InputStreamIndexInputTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testReadMultiFourBytesLimit() throws IOException {
|
public void testReadMultiFourBytesLimit() throws IOException {
|
||||||
RAMDirectory dir = new RAMDirectory();
|
Directory dir = new ByteBuffersDirectory();
|
||||||
IndexOutput output = dir.createOutput("test", IOContext.DEFAULT);
|
IndexOutput output = dir.createOutput("test", IOContext.DEFAULT);
|
||||||
for (int i = 0; i < 3; i++) {
|
for (int i = 0; i < 3; i++) {
|
||||||
output.writeByte((byte) 1);
|
output.writeByte((byte) 1);
|
||||||
|
@ -228,7 +229,7 @@ public class InputStreamIndexInputTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testMarkRest() throws Exception {
|
public void testMarkRest() throws Exception {
|
||||||
RAMDirectory dir = new RAMDirectory();
|
Directory dir = new ByteBuffersDirectory();
|
||||||
IndexOutput output = dir.createOutput("test", IOContext.DEFAULT);
|
IndexOutput output = dir.createOutput("test", IOContext.DEFAULT);
|
||||||
for (int i = 0; i < 3; i++) {
|
for (int i = 0; i < 3; i++) {
|
||||||
output.writeByte((byte) 1);
|
output.writeByte((byte) 1);
|
||||||
|
|
|
@ -42,8 +42,8 @@ import org.apache.lucene.search.SortField;
|
||||||
import org.apache.lucene.search.TermQuery;
|
import org.apache.lucene.search.TermQuery;
|
||||||
import org.apache.lucene.search.TopDocs;
|
import org.apache.lucene.search.TopDocs;
|
||||||
import org.apache.lucene.search.TopFieldDocs;
|
import org.apache.lucene.search.TopFieldDocs;
|
||||||
|
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.elasticsearch.common.lucene.Lucene;
|
import org.elasticsearch.common.lucene.Lucene;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
@ -55,7 +55,7 @@ import static org.hamcrest.Matchers.equalTo;
|
||||||
|
|
||||||
public class SimpleLuceneTests extends ESTestCase {
|
public class SimpleLuceneTests extends ESTestCase {
|
||||||
public void testSortValues() throws Exception {
|
public void testSortValues() throws Exception {
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = new ByteBuffersDirectory();
|
||||||
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
|
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
|
||||||
for (int i = 0; i < 10; i++) {
|
for (int i = 0; i < 10; i++) {
|
||||||
Document document = new Document();
|
Document document = new Document();
|
||||||
|
@ -74,7 +74,7 @@ public class SimpleLuceneTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testSimpleNumericOps() throws Exception {
|
public void testSimpleNumericOps() throws Exception {
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = new ByteBuffersDirectory();
|
||||||
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
|
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
|
||||||
|
|
||||||
Document document = new Document();
|
Document document = new Document();
|
||||||
|
@ -104,7 +104,7 @@ public class SimpleLuceneTests extends ESTestCase {
|
||||||
* first (with load and break).
|
* first (with load and break).
|
||||||
*/
|
*/
|
||||||
public void testOrdering() throws Exception {
|
public void testOrdering() throws Exception {
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = new ByteBuffersDirectory();
|
||||||
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
|
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
|
||||||
|
|
||||||
Document document = new Document();
|
Document document = new Document();
|
||||||
|
@ -132,7 +132,7 @@ public class SimpleLuceneTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testNRTSearchOnClosedWriter() throws Exception {
|
public void testNRTSearchOnClosedWriter() throws Exception {
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = new ByteBuffersDirectory();
|
||||||
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
|
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
|
||||||
DirectoryReader reader = DirectoryReader.open(indexWriter);
|
DirectoryReader reader = DirectoryReader.open(indexWriter);
|
||||||
|
|
||||||
|
|
|
@ -35,8 +35,8 @@ import org.apache.lucene.search.TermQuery;
|
||||||
import org.apache.lucene.search.TopDocs;
|
import org.apache.lucene.search.TopDocs;
|
||||||
import org.apache.lucene.search.vectorhighlight.CustomFieldQuery;
|
import org.apache.lucene.search.vectorhighlight.CustomFieldQuery;
|
||||||
import org.apache.lucene.search.vectorhighlight.FastVectorHighlighter;
|
import org.apache.lucene.search.vectorhighlight.FastVectorHighlighter;
|
||||||
|
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
|
||||||
import org.elasticsearch.common.lucene.Lucene;
|
import org.elasticsearch.common.lucene.Lucene;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
|
@ -46,7 +46,7 @@ import static org.hamcrest.Matchers.nullValue;
|
||||||
|
|
||||||
public class VectorHighlighterTests extends ESTestCase {
|
public class VectorHighlighterTests extends ESTestCase {
|
||||||
public void testVectorHighlighter() throws Exception {
|
public void testVectorHighlighter() throws Exception {
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = new ByteBuffersDirectory();
|
||||||
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
|
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
|
||||||
|
|
||||||
Document document = new Document();
|
Document document = new Document();
|
||||||
|
@ -72,7 +72,7 @@ public class VectorHighlighterTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testVectorHighlighterPrefixQuery() throws Exception {
|
public void testVectorHighlighterPrefixQuery() throws Exception {
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = new ByteBuffersDirectory();
|
||||||
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
|
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
|
||||||
|
|
||||||
Document document = new Document();
|
Document document = new Document();
|
||||||
|
@ -113,7 +113,7 @@ public class VectorHighlighterTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testVectorHighlighterNoStore() throws Exception {
|
public void testVectorHighlighterNoStore() throws Exception {
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = new ByteBuffersDirectory();
|
||||||
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
|
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
|
||||||
|
|
||||||
Document document = new Document();
|
Document document = new Document();
|
||||||
|
@ -138,7 +138,7 @@ public class VectorHighlighterTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testVectorHighlighterNoTermVector() throws Exception {
|
public void testVectorHighlighterNoTermVector() throws Exception {
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = new ByteBuffersDirectory();
|
||||||
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
|
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
|
||||||
|
|
||||||
Document document = new Document();
|
Document document = new Document();
|
||||||
|
|
|
@ -33,8 +33,8 @@ import org.apache.lucene.index.Term;
|
||||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||||
import org.apache.lucene.search.TermQuery;
|
import org.apache.lucene.search.TermQuery;
|
||||||
import org.apache.lucene.search.join.BitSetProducer;
|
import org.apache.lucene.search.join.BitSetProducer;
|
||||||
|
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
|
||||||
import org.apache.lucene.util.Accountable;
|
import org.apache.lucene.util.Accountable;
|
||||||
import org.apache.lucene.util.BitSet;
|
import org.apache.lucene.util.BitSet;
|
||||||
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
|
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
|
||||||
|
@ -68,7 +68,7 @@ public class BitSetFilterCacheTests extends ESTestCase {
|
||||||
|
|
||||||
public void testInvalidateEntries() throws Exception {
|
public void testInvalidateEntries() throws Exception {
|
||||||
IndexWriter writer = new IndexWriter(
|
IndexWriter writer = new IndexWriter(
|
||||||
new RAMDirectory(),
|
new ByteBuffersDirectory(),
|
||||||
new IndexWriterConfig(new StandardAnalyzer()).setMergePolicy(new LogByteSizeMergePolicy())
|
new IndexWriterConfig(new StandardAnalyzer()).setMergePolicy(new LogByteSizeMergePolicy())
|
||||||
);
|
);
|
||||||
Document document = new Document();
|
Document document = new Document();
|
||||||
|
@ -128,7 +128,7 @@ public class BitSetFilterCacheTests extends ESTestCase {
|
||||||
|
|
||||||
public void testListener() throws IOException {
|
public void testListener() throws IOException {
|
||||||
IndexWriter writer = new IndexWriter(
|
IndexWriter writer = new IndexWriter(
|
||||||
new RAMDirectory(),
|
new ByteBuffersDirectory(),
|
||||||
new IndexWriterConfig(new StandardAnalyzer()).setMergePolicy(new LogByteSizeMergePolicy())
|
new IndexWriterConfig(new StandardAnalyzer()).setMergePolicy(new LogByteSizeMergePolicy())
|
||||||
);
|
);
|
||||||
Document document = new Document();
|
Document document = new Document();
|
||||||
|
|
|
@ -30,7 +30,7 @@ import org.apache.lucene.index.LeafReaderContext;
|
||||||
import org.apache.lucene.index.LogByteSizeMergePolicy;
|
import org.apache.lucene.index.LogByteSizeMergePolicy;
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||||
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
|
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.index.IndexService;
|
import org.elasticsearch.index.IndexService;
|
||||||
|
@ -136,7 +136,7 @@ public abstract class AbstractFieldDataTestCase extends ESSingleNodeTestCase {
|
||||||
indicesFieldDataCache = getInstanceFromNode(IndicesService.class).getIndicesFieldDataCache();
|
indicesFieldDataCache = getInstanceFromNode(IndicesService.class).getIndicesFieldDataCache();
|
||||||
// LogByteSizeMP to preserve doc ID order
|
// LogByteSizeMP to preserve doc ID order
|
||||||
writer = new IndexWriter(
|
writer = new IndexWriter(
|
||||||
new RAMDirectory(), new IndexWriterConfig(new StandardAnalyzer()).setMergePolicy(new LogByteSizeMergePolicy())
|
new ByteBuffersDirectory(), new IndexWriterConfig(new StandardAnalyzer()).setMergePolicy(new LogByteSizeMergePolicy())
|
||||||
);
|
);
|
||||||
shardContext = indexService.newQueryShardContext(0, null, () -> 0, null);
|
shardContext = indexService.newQueryShardContext(0, null, () -> 0, null);
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,7 +28,7 @@ import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.IndexWriter;
|
import org.apache.lucene.index.IndexWriter;
|
||||||
import org.apache.lucene.index.IndexWriterConfig;
|
import org.apache.lucene.index.IndexWriterConfig;
|
||||||
import org.apache.lucene.index.LeafReaderContext;
|
import org.apache.lucene.index.LeafReaderContext;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||||
import org.apache.lucene.util.Accountable;
|
import org.apache.lucene.util.Accountable;
|
||||||
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
|
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
@ -110,7 +110,7 @@ public class IndexFieldDataServiceTests extends ESSingleNodeTestCase {
|
||||||
final BuilderContext ctx = new BuilderContext(indexService.getIndexSettings().getSettings(), new ContentPath(1));
|
final BuilderContext ctx = new BuilderContext(indexService.getIndexSettings().getSettings(), new ContentPath(1));
|
||||||
final MappedFieldType mapper1 = new TextFieldMapper.Builder("field_1").fielddata(true).build(ctx).fieldType();
|
final MappedFieldType mapper1 = new TextFieldMapper.Builder("field_1").fielddata(true).build(ctx).fieldType();
|
||||||
final MappedFieldType mapper2 = new TextFieldMapper.Builder("field_2").fielddata(true).build(ctx).fieldType();
|
final MappedFieldType mapper2 = new TextFieldMapper.Builder("field_2").fielddata(true).build(ctx).fieldType();
|
||||||
final IndexWriter writer = new IndexWriter(new RAMDirectory(), new IndexWriterConfig(new KeywordAnalyzer()));
|
final IndexWriter writer = new IndexWriter(new ByteBuffersDirectory(), new IndexWriterConfig(new KeywordAnalyzer()));
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
doc.add(new StringField("field_1", "thisisastring", Store.NO));
|
doc.add(new StringField("field_1", "thisisastring", Store.NO));
|
||||||
doc.add(new StringField("field_2", "thisisanotherstring", Store.NO));
|
doc.add(new StringField("field_2", "thisisanotherstring", Store.NO));
|
||||||
|
@ -169,7 +169,7 @@ public class IndexFieldDataServiceTests extends ESSingleNodeTestCase {
|
||||||
|
|
||||||
final BuilderContext ctx = new BuilderContext(indexService.getIndexSettings().getSettings(), new ContentPath(1));
|
final BuilderContext ctx = new BuilderContext(indexService.getIndexSettings().getSettings(), new ContentPath(1));
|
||||||
final MappedFieldType mapper1 = new TextFieldMapper.Builder("s").fielddata(true).build(ctx).fieldType();
|
final MappedFieldType mapper1 = new TextFieldMapper.Builder("s").fielddata(true).build(ctx).fieldType();
|
||||||
final IndexWriter writer = new IndexWriter(new RAMDirectory(), new IndexWriterConfig(new KeywordAnalyzer()));
|
final IndexWriter writer = new IndexWriter(new ByteBuffersDirectory(), new IndexWriterConfig(new KeywordAnalyzer()));
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
doc.add(new StringField("s", "thisisastring", Store.NO));
|
doc.add(new StringField("s", "thisisastring", Store.NO));
|
||||||
writer.addDocument(doc);
|
writer.addDocument(doc);
|
||||||
|
|
|
@ -27,8 +27,8 @@ import org.apache.lucene.index.IndexWriterConfig;
|
||||||
import org.apache.lucene.index.IndexableField;
|
import org.apache.lucene.index.IndexableField;
|
||||||
import org.apache.lucene.index.LeafReader;
|
import org.apache.lucene.index.LeafReader;
|
||||||
import org.apache.lucene.index.SortedNumericDocValues;
|
import org.apache.lucene.index.SortedNumericDocValues;
|
||||||
|
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
|
@ -80,7 +80,7 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase {
|
||||||
.endObject()),
|
.endObject()),
|
||||||
XContentType.JSON));
|
XContentType.JSON));
|
||||||
|
|
||||||
try (Directory dir = new RAMDirectory();
|
try (Directory dir = new ByteBuffersDirectory();
|
||||||
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())))) {
|
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())))) {
|
||||||
w.addDocuments(doc.docs());
|
w.addDocuments(doc.docs());
|
||||||
try (DirectoryReader reader = DirectoryReader.open(w)) {
|
try (DirectoryReader reader = DirectoryReader.open(w)) {
|
||||||
|
|
|
@ -23,7 +23,7 @@ import org.apache.lucene.index.DirectoryReader;
|
||||||
import org.apache.lucene.index.IndexWriter;
|
import org.apache.lucene.index.IndexWriter;
|
||||||
import org.apache.lucene.index.IndexWriterConfig;
|
import org.apache.lucene.index.IndexWriterConfig;
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.compress.CompressedXContent;
|
import org.elasticsearch.common.compress.CompressedXContent;
|
||||||
|
@ -41,7 +41,7 @@ import static org.hamcrest.Matchers.equalTo;
|
||||||
|
|
||||||
public class StoredNumericValuesTests extends ESSingleNodeTestCase {
|
public class StoredNumericValuesTests extends ESSingleNodeTestCase {
|
||||||
public void testBytesAndNumericRepresentation() throws Exception {
|
public void testBytesAndNumericRepresentation() throws Exception {
|
||||||
IndexWriter writer = new IndexWriter(new RAMDirectory(), new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
|
IndexWriter writer = new IndexWriter(new ByteBuffersDirectory(), new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
|
||||||
|
|
||||||
String mapping = Strings
|
String mapping = Strings
|
||||||
.toString(XContentFactory.jsonBuilder()
|
.toString(XContentFactory.jsonBuilder()
|
||||||
|
|
|
@ -37,8 +37,8 @@ import org.apache.lucene.search.Query;
|
||||||
import org.apache.lucene.search.TermQuery;
|
import org.apache.lucene.search.TermQuery;
|
||||||
import org.apache.lucene.search.TopDocs;
|
import org.apache.lucene.search.TopDocs;
|
||||||
import org.apache.lucene.search.similarities.BM25Similarity;
|
import org.apache.lucene.search.similarities.BM25Similarity;
|
||||||
|
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
|
||||||
import org.apache.lucene.util.TestUtil;
|
import org.apache.lucene.util.TestUtil;
|
||||||
import org.apache.lucene.util.Version;
|
import org.apache.lucene.util.Version;
|
||||||
import org.elasticsearch.script.SimilarityScript;
|
import org.elasticsearch.script.SimilarityScript;
|
||||||
|
@ -117,7 +117,7 @@ public class ScriptedSimilarityTests extends ESTestCase {
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
ScriptedSimilarity sim = new ScriptedSimilarity("foobar", null, "foobaz", scriptFactory, true);
|
ScriptedSimilarity sim = new ScriptedSimilarity("foobar", null, "foobaz", scriptFactory, true);
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = new ByteBuffersDirectory();
|
||||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setSimilarity(sim));
|
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setSimilarity(sim));
|
||||||
|
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
|
@ -211,7 +211,7 @@ public class ScriptedSimilarityTests extends ESTestCase {
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
ScriptedSimilarity sim = new ScriptedSimilarity("foobar", weightScriptFactory, "foobaz", scriptFactory, true);
|
ScriptedSimilarity sim = new ScriptedSimilarity("foobar", weightScriptFactory, "foobaz", scriptFactory, true);
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = new ByteBuffersDirectory();
|
||||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setSimilarity(sim));
|
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setSimilarity(sim));
|
||||||
|
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
|
|
|
@ -40,6 +40,7 @@ import org.apache.lucene.index.SegmentInfos;
|
||||||
import org.apache.lucene.index.SnapshotDeletionPolicy;
|
import org.apache.lucene.index.SnapshotDeletionPolicy;
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
import org.apache.lucene.store.BaseDirectoryWrapper;
|
import org.apache.lucene.store.BaseDirectoryWrapper;
|
||||||
|
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||||
import org.apache.lucene.store.ChecksumIndexInput;
|
import org.apache.lucene.store.ChecksumIndexInput;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.FilterDirectory;
|
import org.apache.lucene.store.FilterDirectory;
|
||||||
|
@ -47,7 +48,6 @@ import org.apache.lucene.store.IOContext;
|
||||||
import org.apache.lucene.store.IndexInput;
|
import org.apache.lucene.store.IndexInput;
|
||||||
import org.apache.lucene.store.IndexOutput;
|
import org.apache.lucene.store.IndexOutput;
|
||||||
import org.apache.lucene.store.NIOFSDirectory;
|
import org.apache.lucene.store.NIOFSDirectory;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.apache.lucene.util.TestUtil;
|
import org.apache.lucene.util.TestUtil;
|
||||||
import org.apache.lucene.util.Version;
|
import org.apache.lucene.util.Version;
|
||||||
|
@ -955,7 +955,7 @@ public class StoreTests extends ESTestCase {
|
||||||
|
|
||||||
public void testDeserializeCorruptionException() throws IOException {
|
public void testDeserializeCorruptionException() throws IOException {
|
||||||
final ShardId shardId = new ShardId("index", "_na_", 1);
|
final ShardId shardId = new ShardId("index", "_na_", 1);
|
||||||
final Directory dir = new RAMDirectory(); // I use ram dir to prevent that virusscanner being a PITA
|
final Directory dir = new ByteBuffersDirectory(); // I use ram dir to prevent that virusscanner being a PITA
|
||||||
Store store = new Store(shardId, INDEX_SETTINGS, dir, new DummyShardLock(shardId));
|
Store store = new Store(shardId, INDEX_SETTINGS, dir, new DummyShardLock(shardId));
|
||||||
CorruptIndexException ex = new CorruptIndexException("foo", "bar");
|
CorruptIndexException ex = new CorruptIndexException("foo", "bar");
|
||||||
store.markStoreCorrupted(ex);
|
store.markStoreCorrupted(ex);
|
||||||
|
@ -984,7 +984,7 @@ public class StoreTests extends ESTestCase {
|
||||||
|
|
||||||
public void testCorruptionMarkerVersionCheck() throws IOException {
|
public void testCorruptionMarkerVersionCheck() throws IOException {
|
||||||
final ShardId shardId = new ShardId("index", "_na_", 1);
|
final ShardId shardId = new ShardId("index", "_na_", 1);
|
||||||
final Directory dir = new RAMDirectory(); // I use ram dir to prevent that virusscanner being a PITA
|
final Directory dir = new ByteBuffersDirectory(); // I use ram dir to prevent that virusscanner being a PITA
|
||||||
|
|
||||||
try (Store store = new Store(shardId, INDEX_SETTINGS, dir, new DummyShardLock(shardId))) {
|
try (Store store = new Store(shardId, INDEX_SETTINGS, dir, new DummyShardLock(shardId))) {
|
||||||
final String corruptionMarkerName = Store.CORRUPTED_MARKER_NAME_PREFIX + UUIDs.randomBase64UUID();
|
final String corruptionMarkerName = Store.CORRUPTED_MARKER_NAME_PREFIX + UUIDs.randomBase64UUID();
|
||||||
|
|
|
@ -24,8 +24,8 @@ import org.apache.lucene.index.IndexOptions;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.IndexWriter;
|
import org.apache.lucene.index.IndexWriter;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
|
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
|
||||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||||
import org.elasticsearch.common.io.stream.Writeable;
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
@ -137,7 +137,7 @@ public class CollapseBuilderTests extends AbstractSerializingTestCase<CollapseBu
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testBuild() throws IOException {
|
public void testBuild() throws IOException {
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = new ByteBuffersDirectory();
|
||||||
try (IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())))) {
|
try (IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())))) {
|
||||||
writer.commit();
|
writer.commit();
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,8 +27,8 @@ import org.apache.lucene.index.IndexWriter;
|
||||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||||
import org.apache.lucene.search.MatchNoDocsQuery;
|
import org.apache.lucene.search.MatchNoDocsQuery;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
|
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.action.OriginalIndices;
|
import org.elasticsearch.action.OriginalIndices;
|
||||||
import org.elasticsearch.action.search.SearchRequest;
|
import org.elasticsearch.action.search.SearchRequest;
|
||||||
|
@ -204,7 +204,7 @@ public class SliceBuilderTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testToFilterSimple() throws IOException {
|
public void testToFilterSimple() throws IOException {
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = new ByteBuffersDirectory();
|
||||||
try (IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())))) {
|
try (IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())))) {
|
||||||
writer.commit();
|
writer.commit();
|
||||||
}
|
}
|
||||||
|
@ -224,7 +224,7 @@ public class SliceBuilderTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testToFilterRandom() throws IOException {
|
public void testToFilterRandom() throws IOException {
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = new ByteBuffersDirectory();
|
||||||
try (IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())))) {
|
try (IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())))) {
|
||||||
writer.commit();
|
writer.commit();
|
||||||
}
|
}
|
||||||
|
@ -307,7 +307,7 @@ public class SliceBuilderTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testInvalidField() throws IOException {
|
public void testInvalidField() throws IOException {
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = new ByteBuffersDirectory();
|
||||||
try (IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())))) {
|
try (IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())))) {
|
||||||
writer.commit();
|
writer.commit();
|
||||||
}
|
}
|
||||||
|
@ -321,7 +321,7 @@ public class SliceBuilderTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testToFilterDeprecationMessage() throws IOException {
|
public void testToFilterDeprecationMessage() throws IOException {
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = new ByteBuffersDirectory();
|
||||||
try (IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())))) {
|
try (IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())))) {
|
||||||
writer.commit();
|
writer.commit();
|
||||||
}
|
}
|
||||||
|
@ -351,7 +351,7 @@ public class SliceBuilderTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testToFilterWithRouting() throws IOException {
|
public void testToFilterWithRouting() throws IOException {
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = new ByteBuffersDirectory();
|
||||||
try (IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())))) {
|
try (IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())))) {
|
||||||
writer.commit();
|
writer.commit();
|
||||||
}
|
}
|
||||||
|
|
|
@ -41,8 +41,8 @@ import org.apache.lucene.index.IndexWriterConfig;
|
||||||
import org.apache.lucene.index.MultiTerms;
|
import org.apache.lucene.index.MultiTerms;
|
||||||
import org.apache.lucene.search.spell.DirectSpellChecker;
|
import org.apache.lucene.search.spell.DirectSpellChecker;
|
||||||
import org.apache.lucene.search.spell.SuggestMode;
|
import org.apache.lucene.search.spell.SuggestMode;
|
||||||
|
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.apache.lucene.util.CharsRefBuilder;
|
import org.apache.lucene.util.CharsRefBuilder;
|
||||||
import org.elasticsearch.search.suggest.phrase.NoisyChannelSpellChecker.Result;
|
import org.elasticsearch.search.suggest.phrase.NoisyChannelSpellChecker.Result;
|
||||||
|
@ -65,7 +65,7 @@ public class NoisyChannelSpellCheckerTests extends ESTestCase {
|
||||||
private final BytesRef postTag = new BytesRef("</em>");
|
private final BytesRef postTag = new BytesRef("</em>");
|
||||||
|
|
||||||
public void testNgram() throws IOException {
|
public void testNgram() throws IOException {
|
||||||
RAMDirectory dir = new RAMDirectory();
|
Directory dir = new ByteBuffersDirectory();
|
||||||
Map<String, Analyzer> mapping = new HashMap<>();
|
Map<String, Analyzer> mapping = new HashMap<>();
|
||||||
mapping.put("body_ngram", new Analyzer() {
|
mapping.put("body_ngram", new Analyzer() {
|
||||||
|
|
||||||
|
@ -226,7 +226,7 @@ public class NoisyChannelSpellCheckerTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testMultiGenerator() throws IOException {
|
public void testMultiGenerator() throws IOException {
|
||||||
RAMDirectory dir = new RAMDirectory();
|
Directory dir = new ByteBuffersDirectory();
|
||||||
Map<String, Analyzer> mapping = new HashMap<>();
|
Map<String, Analyzer> mapping = new HashMap<>();
|
||||||
mapping.put("body_ngram", new Analyzer() {
|
mapping.put("body_ngram", new Analyzer() {
|
||||||
|
|
||||||
|
@ -343,7 +343,7 @@ public class NoisyChannelSpellCheckerTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testTrigram() throws IOException {
|
public void testTrigram() throws IOException {
|
||||||
RAMDirectory dir = new RAMDirectory();
|
Directory dir = new ByteBuffersDirectory();
|
||||||
Map<String, Analyzer> mapping = new HashMap<>();
|
Map<String, Analyzer> mapping = new HashMap<>();
|
||||||
mapping.put("body_ngram", new Analyzer() {
|
mapping.put("body_ngram", new Analyzer() {
|
||||||
|
|
||||||
|
|
|
@ -29,7 +29,7 @@ import org.apache.lucene.index.DirectoryReader;
|
||||||
import org.apache.lucene.index.IndexWriter;
|
import org.apache.lucene.index.IndexWriter;
|
||||||
import org.apache.lucene.index.IndexWriterConfig;
|
import org.apache.lucene.index.IndexWriterConfig;
|
||||||
import org.apache.lucene.index.MultiTerms;
|
import org.apache.lucene.index.MultiTerms;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||||
import org.elasticsearch.common.lucene.BytesRefs;
|
import org.elasticsearch.common.lucene.BytesRefs;
|
||||||
import org.elasticsearch.common.xcontent.ToXContent;
|
import org.elasticsearch.common.xcontent.ToXContent;
|
||||||
|
@ -112,7 +112,7 @@ public abstract class SmoothingModelTestCase extends ESTestCase {
|
||||||
Map<String, Analyzer> mapping = new HashMap<>();
|
Map<String, Analyzer> mapping = new HashMap<>();
|
||||||
mapping.put("field", new WhitespaceAnalyzer());
|
mapping.put("field", new WhitespaceAnalyzer());
|
||||||
PerFieldAnalyzerWrapper wrapper = new PerFieldAnalyzerWrapper(new WhitespaceAnalyzer(), mapping);
|
PerFieldAnalyzerWrapper wrapper = new PerFieldAnalyzerWrapper(new WhitespaceAnalyzer(), mapping);
|
||||||
IndexWriter writer = new IndexWriter(new RAMDirectory(), new IndexWriterConfig(wrapper));
|
IndexWriter writer = new IndexWriter(new ByteBuffersDirectory(), new IndexWriterConfig(wrapper));
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
doc.add(new Field("field", "someText", TextField.TYPE_NOT_STORED));
|
doc.add(new Field("field", "someText", TextField.TYPE_NOT_STORED));
|
||||||
writer.addDocument(doc);
|
writer.addDocument(doc);
|
||||||
|
|
Loading…
Reference in New Issue