mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-03-25 01:19:02 +00:00
Update to lucene snapshot e7c625430ed (#57981)
Includes LUCENE-9148 and LUCENE-9398, which splits the BKD metadata, index and data into separate files and keeps the index off-heap.
This commit is contained in:
parent
34fc52dbf3
commit
16e230dcb8
@ -1,5 +1,5 @@
|
||||
elasticsearch = 7.9.0
|
||||
lucene = 8.6.0-snapshot-9d6c738ffce
|
||||
lucene = 8.6.0-snapshot-e7c625430ed
|
||||
|
||||
bundled_jdk_vendor = adoptopenjdk
|
||||
bundled_jdk = 14.0.1+7
|
||||
|
@ -1 +0,0 @@
|
||||
b151419d588744101ab6aa1a7e202a07a17e9746
|
@ -0,0 +1 @@
|
||||
c9dbc427cb1998b500ff07f8af4c13ebd72fa4ba
|
@ -1 +0,0 @@
|
||||
4b7164b5b1117507c0ef3c8eef998ad737ff1fd2
|
@ -0,0 +1 @@
|
||||
23ef92764508757a04703fa129930774862bce31
|
@ -1 +0,0 @@
|
||||
d7c96a9ae727b6c5e18bb1ed63faf5c3bf875203
|
@ -0,0 +1 @@
|
||||
04432005742de8e9ad8e1248e0ac4ef41319e687
|
@ -1 +0,0 @@
|
||||
6445c4715b36be6c0d7e843c5b1ec6e303ab6e6c
|
@ -0,0 +1 @@
|
||||
c994b8702089de33cbba337d390553707d2e4225
|
@ -1 +0,0 @@
|
||||
93aee27fc71737bd366b25c3a2957fe0429aef29
|
@ -0,0 +1 @@
|
||||
65d46f9ef354fd88f7b971d293ed2b90e4e5373b
|
@ -1 +0,0 @@
|
||||
525d83fae93270b77dee719c64dd4e81d001c15e
|
@ -0,0 +1 @@
|
||||
d4903699eb1c5932f8c120a6e71321917a2ea1aa
|
@ -1 +0,0 @@
|
||||
4fc5fc61c7c978f0eb6ea5695a70d6c4de747f35
|
@ -0,0 +1 @@
|
||||
fd67d4aa9730a8a7a7bcba2dc5c7f53838ffd1d8
|
@ -1 +0,0 @@
|
||||
c2c64f756ee96f761f00f9a38defd43d507eea0b
|
@ -0,0 +1 @@
|
||||
561cb8c4efa2e703246526cd5ee2beac7e28ead8
|
@ -1 +0,0 @@
|
||||
9d204ea34ae88a4470f1e9011efac42e3ff8b58c
|
@ -0,0 +1 @@
|
||||
188c577102f49cdb097e2487ff6958dccff1a2de
|
@ -1 +0,0 @@
|
||||
b597d27ddaaa890a09b4f61a571f712864f2ced6
|
@ -0,0 +1 @@
|
||||
0cada355cf6f8b210c13602ad39378cbc568a6f9
|
@ -1 +0,0 @@
|
||||
69838ed06991014d1f2530208bef66c59a554fe2
|
@ -0,0 +1 @@
|
||||
180065e697f361ead5e6ee831582cc2cf37bd5b9
|
@ -1 +0,0 @@
|
||||
34a73729a8be3fde91f9920b5b79615b04e3ea03
|
@ -0,0 +1 @@
|
||||
e5a26420f917e6d5f8bc4888594fc9f156a5f7ba
|
@ -1 +0,0 @@
|
||||
20332b08670bf874c0db27113dd9c1507e2ce464
|
@ -0,0 +1 @@
|
||||
06d0935908b272667a2f5aa3aa7da7f1832d27a8
|
@ -1 +0,0 @@
|
||||
42d0da090a8ae1fc411bbba62083621a9e880c06
|
@ -0,0 +1 @@
|
||||
45c68a4d29befe8d1d9aef7a92f933598cba85e5
|
@ -1 +0,0 @@
|
||||
001bf352ba947be5392d45586b8e53b04fabd58c
|
@ -0,0 +1 @@
|
||||
ac72a12e5725ce030d63bad44bd6a169594ac651
|
@ -1 +0,0 @@
|
||||
f730849f6a1c894ede85721423e421974eaa11a8
|
@ -0,0 +1 @@
|
||||
e196e5ef80672e894d61fe0f0ae2568dfb83f63a
|
@ -1 +0,0 @@
|
||||
92e10bd79a1e330e5257fa204bce0ed0b3611f6d
|
@ -0,0 +1 @@
|
||||
15bd0e6b44e0192db9a8c5a5e0f8d392b4610884
|
@ -1 +0,0 @@
|
||||
83cf037431b46ca97303f9c8e6b61658835d5ee2
|
@ -0,0 +1 @@
|
||||
627e685ec2d1025429abc02441e22f466443be5a
|
@ -1 +0,0 @@
|
||||
3a1add29c46420b2ad406ff4855591bd5a9cb3ab
|
@ -0,0 +1 @@
|
||||
1c4a1e4281865c329e1fec53a0e6d5e53d25155f
|
@ -1 +0,0 @@
|
||||
a6c7cc66b953e4f5957f96b80b0c8f43ed6d3dfa
|
@ -0,0 +1 @@
|
||||
e0c9b19c5e9d99d924b1cf9e6b4329b94920c9b8
|
@ -1 +0,0 @@
|
||||
4edcd6c035201be43fde76a85982fc2b9a9cbdbf
|
@ -0,0 +1 @@
|
||||
d780c5bf1374627afcca67d487319507211a5620
|
@ -1 +0,0 @@
|
||||
a1dca9d9c650b843ca3ae812d54d8b2219b38f3a
|
@ -0,0 +1 @@
|
||||
0906b0aca0283153ab00b448a57e755847a659a4
|
@ -103,7 +103,7 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class Lucene {
|
||||
public static final String LATEST_CODEC = "Lucene84";
|
||||
public static final String LATEST_CODEC = "Lucene86";
|
||||
|
||||
public static final String SOFT_DELETES_FIELD = "__soft_deletes";
|
||||
|
||||
|
@ -22,7 +22,7 @@ package org.elasticsearch.index.codec;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.lucene.codecs.Codec;
|
||||
import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat.Mode;
|
||||
import org.apache.lucene.codecs.lucene84.Lucene84Codec;
|
||||
import org.apache.lucene.codecs.lucene86.Lucene86Codec;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
@ -47,8 +47,8 @@ public class CodecService {
|
||||
public CodecService(@Nullable MapperService mapperService, Logger logger) {
|
||||
final MapBuilder<String, Codec> codecs = MapBuilder.<String, Codec>newMapBuilder();
|
||||
if (mapperService == null) {
|
||||
codecs.put(DEFAULT_CODEC, new Lucene84Codec());
|
||||
codecs.put(BEST_COMPRESSION_CODEC, new Lucene84Codec(Mode.BEST_COMPRESSION));
|
||||
codecs.put(DEFAULT_CODEC, new Lucene86Codec());
|
||||
codecs.put(BEST_COMPRESSION_CODEC, new Lucene86Codec(Mode.BEST_COMPRESSION));
|
||||
} else {
|
||||
codecs.put(DEFAULT_CODEC,
|
||||
new PerFieldMappingPostingFormatCodec(Mode.BEST_SPEED, mapperService, logger));
|
||||
|
@ -23,7 +23,7 @@ import org.apache.logging.log4j.Logger;
|
||||
import org.apache.lucene.codecs.Codec;
|
||||
import org.apache.lucene.codecs.PostingsFormat;
|
||||
import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat;
|
||||
import org.apache.lucene.codecs.lucene84.Lucene84Codec;
|
||||
import org.apache.lucene.codecs.lucene86.Lucene86Codec;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.index.mapper.CompletionFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
@ -37,7 +37,7 @@ import org.elasticsearch.index.mapper.MapperService;
|
||||
* per index in real time via the mapping API. If no specific postings format is
|
||||
* configured for a specific field the default postings format is used.
|
||||
*/
|
||||
public class PerFieldMappingPostingFormatCodec extends Lucene84Codec {
|
||||
public class PerFieldMappingPostingFormatCodec extends Lucene86Codec {
|
||||
private final Logger logger;
|
||||
private final MapperService mapperService;
|
||||
|
||||
|
@ -19,16 +19,11 @@
|
||||
|
||||
package org.elasticsearch.index.codec;
|
||||
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.lucene.codecs.Codec;
|
||||
import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat;
|
||||
import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat.Mode;
|
||||
import org.apache.lucene.codecs.lucene84.Lucene84Codec;
|
||||
import org.apache.lucene.codecs.lucene86.Lucene86Codec;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
@ -47,14 +42,19 @@ import org.elasticsearch.plugins.MapperPlugin;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.IndexSettingsModule;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
|
||||
@SuppressCodecs("*") // we test against default codec so never get a random one here!
|
||||
public class CodecTests extends ESTestCase {
|
||||
|
||||
public void testResolveDefaultCodecs() throws Exception {
|
||||
CodecService codecService = createCodecService();
|
||||
assertThat(codecService.codec("default"), instanceOf(PerFieldMappingPostingFormatCodec.class));
|
||||
assertThat(codecService.codec("default"), instanceOf(Lucene84Codec.class));
|
||||
assertThat(codecService.codec("Lucene84"), instanceOf(Lucene84Codec.class));
|
||||
assertThat(codecService.codec("default"), instanceOf(Lucene86Codec.class));
|
||||
assertThat(codecService.codec("Lucene86"), instanceOf(Lucene86Codec.class));
|
||||
}
|
||||
|
||||
public void testDefault() throws Exception {
|
||||
|
@ -19,7 +19,7 @@
|
||||
package org.elasticsearch.index.engine;
|
||||
|
||||
import org.apache.lucene.codecs.PostingsFormat;
|
||||
import org.apache.lucene.codecs.lucene84.Lucene84Codec;
|
||||
import org.apache.lucene.codecs.lucene86.Lucene86Codec;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
@ -57,7 +57,7 @@ public class CompletionStatsCacheTests extends ESTestCase {
|
||||
public void testCompletionStatsCache() throws IOException, InterruptedException {
|
||||
final IndexWriterConfig indexWriterConfig = newIndexWriterConfig();
|
||||
final PostingsFormat postingsFormat = new Completion84PostingsFormat();
|
||||
indexWriterConfig.setCodec(new Lucene84Codec() {
|
||||
indexWriterConfig.setCodec(new Lucene86Codec() {
|
||||
@Override
|
||||
public PostingsFormat getPostingsFormatForField(String field) {
|
||||
return postingsFormat; // all fields are suggest fields
|
||||
|
@ -481,7 +481,7 @@ public class QueryPhaseTests extends IndexShardTestCase {
|
||||
context.parsedQuery(new ParsedQuery(new MatchAllDocsQuery()));
|
||||
context.setSize(1);
|
||||
context.setTask(new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()));
|
||||
context.sort(new SortAndFormats(sort, new DocValueFormat[] {DocValueFormat.RAW}));
|
||||
context.sort(new SortAndFormats(sort, new DocValueFormat[]{DocValueFormat.RAW}));
|
||||
|
||||
|
||||
QueryPhase.executeInternal(context);
|
||||
@ -635,7 +635,7 @@ public class QueryPhaseTests extends IndexShardTestCase {
|
||||
|
||||
|
||||
context.sort(new SortAndFormats(new Sort(new SortField("other", SortField.Type.INT)),
|
||||
new DocValueFormat[] { DocValueFormat.RAW }));
|
||||
new DocValueFormat[]{DocValueFormat.RAW}));
|
||||
topDocsContext = TopDocsCollectorContext.createTopDocsCollectorContext(context, false);
|
||||
assertEquals(topDocsContext.create(null).scoreMode(), org.apache.lucene.search.ScoreMode.COMPLETE_NO_SCORES);
|
||||
QueryPhase.executeInternal(context);
|
||||
@ -757,14 +757,16 @@ public class QueryPhaseTests extends IndexShardTestCase {
|
||||
LongPoint.encodeDimension(value, longBytes, 0);
|
||||
w.add(longBytes, docId);
|
||||
}
|
||||
long indexFP;
|
||||
try (IndexOutput out = dir.createOutput("bkd", IOContext.DEFAULT)) {
|
||||
indexFP = w.finish(out);
|
||||
try (IndexOutput metaout = dir.createOutput("bkdmeta", IOContext.DEFAULT);
|
||||
IndexOutput indexout = dir.createOutput("bkdindex", IOContext.DEFAULT);
|
||||
IndexOutput dataout = dir.createOutput("bkddata", IOContext.DEFAULT)) {
|
||||
w.finish(metaout, indexout, dataout).run();
|
||||
}
|
||||
try (IndexInput in = dir.openInput("bkd", IOContext.DEFAULT)) {
|
||||
in.seek(indexFP);
|
||||
BKDReader r = new BKDReader(in);
|
||||
assertTrue(pointsHaveDuplicateData(r, r.getDocCount()/2));
|
||||
try (IndexInput metain = dir.openInput("bkdmeta", IOContext.DEFAULT);
|
||||
IndexInput indexin = dir.openInput("bkdindex", IOContext.DEFAULT);
|
||||
IndexInput datain = dir.openInput("bkddata", IOContext.DEFAULT)) {
|
||||
BKDReader r = new BKDReader(metain, indexin, datain);
|
||||
assertTrue(pointsHaveDuplicateData(r, r.getDocCount() / 2));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -785,12 +787,14 @@ public class QueryPhaseTests extends IndexShardTestCase {
|
||||
}
|
||||
long indexFP;
|
||||
try (IndexOutput out = dir.createOutput("bkd", IOContext.DEFAULT)) {
|
||||
indexFP = w.finish(out);
|
||||
Runnable finalizer = w.finish(out, out, out);
|
||||
indexFP = out.getFilePointer();
|
||||
finalizer.run();;
|
||||
}
|
||||
try (IndexInput in = dir.openInput("bkd", IOContext.DEFAULT)) {
|
||||
in.seek(indexFP);
|
||||
BKDReader r = new BKDReader(in);
|
||||
assertFalse(pointsHaveDuplicateData(r, r.getDocCount()/2));
|
||||
BKDReader r = new BKDReader(in, in, in);
|
||||
assertFalse(pointsHaveDuplicateData(r, r.getDocCount() / 2));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -920,7 +924,7 @@ public class QueryPhaseTests extends IndexShardTestCase {
|
||||
|
||||
try (IndexReader reader = DirectoryReader.open(dir)) {
|
||||
TestSearchContext context = new TestSearchContextWithRewriteAndCancellation(
|
||||
null, indexShard, newContextSearcher(reader));
|
||||
null, indexShard, newContextSearcher(reader));
|
||||
PrefixQuery prefixQuery = new PrefixQuery(new Term("foo", "a"));
|
||||
prefixQuery.setRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_REWRITE);
|
||||
context.parsedQuery(new ParsedQuery(prefixQuery));
|
||||
@ -979,7 +983,7 @@ public class QueryPhaseTests extends IndexShardTestCase {
|
||||
|
||||
@Override
|
||||
public void search(List<LeafReaderContext> leaves, Weight weight, CollectorManager manager,
|
||||
QuerySearchResult result, DocValueFormat[] formats, TotalHits totalHits) throws IOException {
|
||||
QuerySearchResult result, DocValueFormat[] formats, TotalHits totalHits) throws IOException {
|
||||
final Query query = weight.getQuery();
|
||||
assertTrue(query instanceof BooleanQuery);
|
||||
List<BooleanClause> clauses = ((BooleanQuery) query).clauses();
|
||||
@ -987,7 +991,7 @@ public class QueryPhaseTests extends IndexShardTestCase {
|
||||
assertTrue(clauses.get(0).getOccur() == Occur.FILTER);
|
||||
assertTrue(clauses.get(1).getOccur() == Occur.SHOULD);
|
||||
if (queryType == 0) {
|
||||
assertTrue (clauses.get(1).getQuery().getClass() ==
|
||||
assertTrue(clauses.get(1).getQuery().getClass() ==
|
||||
LongPoint.newDistanceFeatureQuery("random_field", 1, 1, 1).getClass()
|
||||
);
|
||||
}
|
||||
@ -997,7 +1001,7 @@ public class QueryPhaseTests extends IndexShardTestCase {
|
||||
|
||||
@Override
|
||||
public void search(List<LeafReaderContext> leaves, Weight weight, Collector collector) {
|
||||
assert(false); // should not be there, expected to search with CollectorManager
|
||||
assert (false); // should not be there, expected to search with CollectorManager
|
||||
}
|
||||
};
|
||||
}
|
||||
@ -1019,7 +1023,7 @@ public class QueryPhaseTests extends IndexShardTestCase {
|
||||
@Override
|
||||
public void collect(int doc) throws IOException {
|
||||
assert collected <= size : "should not collect more than " + size + " doc per segment, got " + collected;
|
||||
++ collected;
|
||||
++collected;
|
||||
super.collect(doc);
|
||||
}
|
||||
};
|
||||
|
@ -1 +0,0 @@
|
||||
69838ed06991014d1f2530208bef66c59a554fe2
|
@ -0,0 +1 @@
|
||||
180065e697f361ead5e6ee831582cc2cf37bd5b9
|
Loading…
x
Reference in New Issue
Block a user