From 97336434661cf32f4674ddb43901219f678e2608 Mon Sep 17 00:00:00 2001 From: Bruno Roustant Date: Thu, 5 Mar 2020 11:54:23 +0100 Subject: [PATCH] LUCENE-9257: Always keep FST off-heap. Remove FSTLoadMode and Reader attributes. Closes #1320 --- lucene/CHANGES.txt | 2 + .../lucene50/Lucene50PostingsFormat.java | 9 +- .../lucene50/TestBlockPostingsFormat.java | 205 ----------------- .../blocktree/BlockTreeTermsReader.java | 53 +---- .../lucene/codecs/blocktree/FieldReader.java | 38 +--- .../lucene84/Lucene84PostingsFormat.java | 9 +- .../org/apache/lucene/index/CheckIndex.java | 5 +- .../lucene/index/DefaultIndexingChain.java | 3 +- .../apache/lucene/index/DirectoryReader.java | 29 +-- .../org/apache/lucene/index/IndexWriter.java | 4 +- .../lucene/index/IndexWriterConfig.java | 12 - .../lucene/index/LiveIndexWriterConfig.java | 12 - .../org/apache/lucene/index/ReaderPool.java | 8 +- .../lucene/index/ReadersAndUpdates.java | 18 +- .../lucene/index/SegmentCoreReaders.java | 5 +- .../apache/lucene/index/SegmentDocValues.java | 3 +- .../apache/lucene/index/SegmentMerger.java | 3 +- .../apache/lucene/index/SegmentReadState.java | 17 +- .../apache/lucene/index/SegmentReader.java | 5 +- .../lucene/index/StandardDirectoryReader.java | 25 +-- .../lucene84/TestLucene84PostingsFormat.java | 210 +----------------- .../org/apache/lucene/index/TestCodecs.java | 4 +- .../index/TestDemoParallelLeafReader.java | 2 +- .../test/org/apache/lucene/index/TestDoc.java | 6 +- .../lucene/index/TestDocumentWriter.java | 9 +- .../TestIndexWriterThreadsToSegments.java | 3 +- .../apache/lucene/index/TestReaderPool.java | 12 +- .../lucene/index/TestSegmentMerger.java | 12 +- .../lucene/index/TestSegmentReader.java | 3 +- .../lucene/index/TestSegmentTermDocs.java | 7 +- .../nrt/SegmentInfosSearcherManager.java | 5 +- .../cheapbastard/CheapBastardCodec.java | 3 +- .../mockrandom/MockRandomPostingsFormat.java | 3 +- .../index/BaseIndexFileFormatTestCase.java | 2 +- .../org/apache/lucene/index/RandomCodec.java | 4 +- .../lucene/index/RandomPostingsTester.java | 2 +- .../java/org/apache/lucene/util/TestUtil.java | 5 +- 37 files changed, 82 insertions(+), 675 deletions(-) diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt index 5e710ceafde..58432299bb0 100644 --- a/lucene/CHANGES.txt +++ b/lucene/CHANGES.txt @@ -239,6 +239,8 @@ Other * LUCENE-9225: Rectangle extends LatLonGeometry so it can be used in a geometry collection. (Ignacio Vera) +* LUCENE-9257: Always keep FST off-heap. FSTLoadMode and Reader attributes removed. (Bruno Roustant) + ======================= Lucene 8.4.1 ======================= Bug Fixes diff --git a/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene50/Lucene50PostingsFormat.java b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene50/Lucene50PostingsFormat.java index 4dfd935eea0..d8d23387bee 100644 --- a/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene50/Lucene50PostingsFormat.java +++ b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene50/Lucene50PostingsFormat.java @@ -391,18 +391,11 @@ public class Lucene50PostingsFormat extends PostingsFormat { */ // NOTE: must be multiple of 64 because of PackedInts long-aligned encoding/decoding public final static int BLOCK_SIZE = 128; - private final BlockTreeTermsReader.FSTLoadMode fstLoadMode; /** Creates {@code Lucene50PostingsFormat} with default * settings. */ public Lucene50PostingsFormat() { - this(BlockTreeTermsReader.FSTLoadMode.AUTO); - } - - /** Creates {@code Lucene50PostingsFormat}. */ - public Lucene50PostingsFormat(BlockTreeTermsReader.FSTLoadMode loadMode) { super("Lucene50"); - this.fstLoadMode = loadMode; } @Override @@ -420,7 +413,7 @@ public class Lucene50PostingsFormat extends PostingsFormat { PostingsReaderBase postingsReader = new Lucene50PostingsReader(state); boolean success = false; try { - FieldsProducer ret = new BlockTreeTermsReader(postingsReader, state, fstLoadMode); + FieldsProducer ret = new BlockTreeTermsReader(postingsReader, state); success = true; return ret; } finally { diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene50/TestBlockPostingsFormat.java b/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene50/TestBlockPostingsFormat.java index 690eba8aeeb..1ad5d2f4518 100644 --- a/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene50/TestBlockPostingsFormat.java +++ b/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene50/TestBlockPostingsFormat.java @@ -18,17 +18,13 @@ package org.apache.lucene.codecs.lucene50; import java.io.IOException; -import java.nio.file.Path; import java.util.Arrays; import java.util.Collections; -import java.util.HashMap; import java.util.List; -import java.util.Map; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.CompetitiveImpactAccumulator; -import org.apache.lucene.codecs.blocktree.BlockTreeTermsReader; import org.apache.lucene.codecs.blocktree.FieldReader; import org.apache.lucene.codecs.blocktree.Stats; import org.apache.lucene.codecs.lucene50.Lucene50ScoreSkipReader.MutableImpactList; @@ -39,16 +35,11 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.Impact; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.Term; import org.apache.lucene.store.ByteArrayDataInput; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.FSDirectory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexOutput; -import org.apache.lucene.store.MMapDirectory; -import org.apache.lucene.store.SimpleFSDirectory; import org.apache.lucene.util.TestUtil; /** @@ -62,202 +53,6 @@ public class TestBlockPostingsFormat extends BasePostingsFormatTestCase { return codec; } - public void testFstOffHeap() throws IOException { - Path tempDir = createTempDir(); - try (Directory d = FSDirectory.open(tempDir)) { - assumeTrue("only works with mmap directory", d instanceof MMapDirectory); - try (IndexWriter w = new IndexWriter(d, new IndexWriterConfig(new MockAnalyzer(random())))) { - DirectoryReader readerFromWriter = DirectoryReader.open(w); - for (int i = 0; i < 50; i++) { - Document doc = new Document(); - doc.add(newStringField("id", "" + i, Field.Store.NO)); - doc.add(newStringField("field", Character.toString((char) (97 + i)), Field.Store.NO)); - doc.add(newStringField("field", Character.toString((char) (98 + i)), Field.Store.NO)); - if (rarely()) { - w.addDocument(doc); - } else { - w.updateDocument(new Term("id", "" + i), doc); - } - if (random().nextBoolean()) { - w.commit(); - } - - if (random().nextBoolean()) { - DirectoryReader newReader = DirectoryReader.openIfChanged(readerFromWriter); - if (newReader != null) { - readerFromWriter.close(); - readerFromWriter = newReader; - } - for (LeafReaderContext leaf : readerFromWriter.leaves()) { - FieldReader field = (FieldReader) leaf.reader().terms("field"); - FieldReader id = (FieldReader) leaf.reader().terms("id"); - assertFalse(id.isFstOffHeap()); - assertTrue(field.isFstOffHeap()); - } - } - } - readerFromWriter.close(); - - w.forceMerge(1); - try (DirectoryReader r = DirectoryReader.open(w)) { - assertEquals(1, r.leaves().size()); - FieldReader field = (FieldReader) r.leaves().get(0).reader().terms("field"); - FieldReader id = (FieldReader) r.leaves().get(0).reader().terms("id"); - assertFalse(id.isFstOffHeap()); - assertTrue(field.isFstOffHeap()); - } - w.commit(); - try (DirectoryReader r = DirectoryReader.open(d)) { - assertEquals(1, r.leaves().size()); - FieldReader field = (FieldReader) r.leaves().get(0).reader().terms("field"); - FieldReader id = (FieldReader) r.leaves().get(0).reader().terms("id"); - assertTrue(id.isFstOffHeap()); - assertTrue(field.isFstOffHeap()); - } - } - } - - try (Directory d = new SimpleFSDirectory(tempDir)) { - // test auto - try (DirectoryReader r = DirectoryReader.open(d)) { - assertEquals(1, r.leaves().size()); - FieldReader field = (FieldReader) r.leaves().get(0).reader().terms("field"); - FieldReader id = (FieldReader) r.leaves().get(0).reader().terms("id"); - assertFalse(id.isFstOffHeap()); - assertFalse(field.isFstOffHeap()); - } - } - - try (Directory d = new SimpleFSDirectory(tempDir)) { - // test per field - Map readerAttributes = new HashMap<>(); - readerAttributes.put(BlockTreeTermsReader.FST_MODE_KEY, BlockTreeTermsReader.FSTLoadMode.OFF_HEAP.name()); - readerAttributes.put(BlockTreeTermsReader.FST_MODE_KEY + ".field", BlockTreeTermsReader.FSTLoadMode.ON_HEAP.name()); - try (DirectoryReader r = DirectoryReader.open(d, readerAttributes)) { - assertEquals(1, r.leaves().size()); - FieldReader field = (FieldReader) r.leaves().get(0).reader().terms("field"); - FieldReader id = (FieldReader) r.leaves().get(0).reader().terms("id"); - assertTrue(id.isFstOffHeap()); - assertFalse(field.isFstOffHeap()); - } - } - - IllegalArgumentException invalid = expectThrows(IllegalArgumentException.class, () -> { - try (Directory d = new SimpleFSDirectory(tempDir)) { - Map readerAttributes = new HashMap<>(); - readerAttributes.put(BlockTreeTermsReader.FST_MODE_KEY, "invalid"); - DirectoryReader.open(d, readerAttributes); - } - }); - - assertEquals("Invalid value for blocktree.terms.fst expected one of: [OFF_HEAP, ON_HEAP, OPTIMIZE_UPDATES_OFF_HEAP, AUTO] but was: invalid", invalid.getMessage()); - } - - public void testDisableFSTOffHeap() throws IOException { - Path tempDir = createTempDir(); - try (Directory d = MMapDirectory.open(tempDir)) { - try (IndexWriter w = new IndexWriter(d, new IndexWriterConfig(new MockAnalyzer(random())) - .setReaderAttributes(Collections.singletonMap(BlockTreeTermsReader.FST_MODE_KEY, BlockTreeTermsReader.FSTLoadMode.ON_HEAP.name())))) { - assumeTrue("only works with mmap directory", d instanceof MMapDirectory); - DirectoryReader readerFromWriter = DirectoryReader.open(w); - for (int i = 0; i < 50; i++) { - Document doc = new Document(); - doc.add(newStringField("id", "" + i, Field.Store.NO)); - doc.add(newStringField("field", Character.toString((char) (97 + i)), Field.Store.NO)); - doc.add(newStringField("field", Character.toString((char) (98 + i)), Field.Store.NO)); - if (rarely()) { - w.addDocument(doc); - } else { - w.updateDocument(new Term("id", "" + i), doc); - } - if (random().nextBoolean()) { - w.commit(); - } - if (random().nextBoolean()) { - DirectoryReader newReader = DirectoryReader.openIfChanged(readerFromWriter); - if (newReader != null) { - readerFromWriter.close(); - readerFromWriter = newReader; - } - for (LeafReaderContext leaf : readerFromWriter.leaves()) { - FieldReader field = (FieldReader) leaf.reader().terms("field"); - FieldReader id = (FieldReader) leaf.reader().terms("id"); - assertFalse(id.isFstOffHeap()); - assertFalse(field.isFstOffHeap()); - } - } - } - readerFromWriter.close(); - w.forceMerge(1); - w.commit(); - } - try (DirectoryReader r = DirectoryReader.open(d, Collections.singletonMap(BlockTreeTermsReader.FST_MODE_KEY, BlockTreeTermsReader.FSTLoadMode.ON_HEAP.name()))) { - assertEquals(1, r.leaves().size()); - FieldReader field = (FieldReader) r.leaves().get(0).reader().terms("field"); - FieldReader id = (FieldReader) r.leaves().get(0).reader().terms("id"); - assertFalse(id.isFstOffHeap()); - assertFalse(field.isFstOffHeap()); - } - } - } - - public void testAlwaysFSTOffHeap() throws IOException { - boolean alsoLoadIdOffHeap = random().nextBoolean(); - BlockTreeTermsReader.FSTLoadMode loadMode; - if (alsoLoadIdOffHeap) { - loadMode = BlockTreeTermsReader.FSTLoadMode.OFF_HEAP; - } else { - loadMode = BlockTreeTermsReader.FSTLoadMode.OPTIMIZE_UPDATES_OFF_HEAP; - } - try (Directory d = newDirectory()) { // any directory should work now - try (IndexWriter w = new IndexWriter(d, new IndexWriterConfig(new MockAnalyzer(random())) - .setReaderAttributes(Collections.singletonMap(BlockTreeTermsReader.FST_MODE_KEY, loadMode.name())))) { - DirectoryReader readerFromWriter = DirectoryReader.open(w); - for (int i = 0; i < 50; i++) { - Document doc = new Document(); - doc.add(newStringField("id", "" + i, Field.Store.NO)); - doc.add(newStringField("field", Character.toString((char) (97 + i)), Field.Store.NO)); - doc.add(newStringField("field", Character.toString((char) (98 + i)), Field.Store.NO)); - if (rarely()) { - w.addDocument(doc); - } else { - w.updateDocument(new Term("id", "" + i), doc); - } - if (random().nextBoolean()) { - w.commit(); - } - if (random().nextBoolean()) { - DirectoryReader newReader = DirectoryReader.openIfChanged(readerFromWriter); - if (newReader != null) { - readerFromWriter.close(); - readerFromWriter = newReader; - } - for (LeafReaderContext leaf : readerFromWriter.leaves()) { - FieldReader field = (FieldReader) leaf.reader().terms("field"); - FieldReader id = (FieldReader) leaf.reader().terms("id"); - if (alsoLoadIdOffHeap) { - assertTrue(id.isFstOffHeap()); - } else { - assertFalse(id.isFstOffHeap()); - } - assertTrue(field.isFstOffHeap()); - } - } - } - readerFromWriter.close(); - w.forceMerge(1); - w.commit(); - } - try (DirectoryReader r = DirectoryReader.open(d, Collections.singletonMap(BlockTreeTermsReader.FST_MODE_KEY, loadMode.name()))) { - assertEquals(1, r.leaves().size()); - FieldReader field = (FieldReader) r.leaves().get(0).reader().terms("field"); - FieldReader id = (FieldReader) r.leaves().get(0).reader().terms("id"); - assertTrue(id.isFstOffHeap()); - assertTrue(field.isFstOffHeap()); - } - } - } - /** Make sure the final sub-block(s) are not skipped. */ public void testFinalBlock() throws Exception { Directory d = newDirectory(); diff --git a/lucene/core/src/java/org/apache/lucene/codecs/blocktree/BlockTreeTermsReader.java b/lucene/core/src/java/org/apache/lucene/codecs/blocktree/BlockTreeTermsReader.java index 78112590965..f369e80f9ba 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/blocktree/BlockTreeTermsReader.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/blocktree/BlockTreeTermsReader.java @@ -19,7 +19,6 @@ package org.apache.lucene.codecs.blocktree; import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; @@ -76,39 +75,6 @@ import org.apache.lucene.util.fst.Outputs; public final class BlockTreeTermsReader extends FieldsProducer { - /** - * An enum that allows to control if term index FSTs are loaded into memory or read off-heap - */ - public enum FSTLoadMode { - /** - * Always read FSTs from disk. - * NOTE: If this option is used the FST will be read off-heap even if buffered directory implementations - * are used. - */ - OFF_HEAP, - /** - * Never read FSTs from disk ie. all fields FSTs are loaded into memory - */ - ON_HEAP, - /** - * Always read FSTs from disk. - * An exception is made for ID fields in an IndexWriter context which are always loaded into memory. - * This is useful to guarantee best update performance even if a non MMapDirectory is used. - * NOTE: If this option is used the FST will be read off-heap even if buffered directory implementations - * are used. - * See {@link FSTLoadMode#AUTO} - */ - OPTIMIZE_UPDATES_OFF_HEAP, - /** - * Automatically make the decision if FSTs are read from disk depending if the segment read from an MMAPDirectory - * An exception is made for ID fields in an IndexWriter context which are always loaded into memory. - */ - AUTO - } - - /** Attribute key for fst mode. */ - public static final String FST_MODE_KEY = "blocktree.terms.fst"; - static final Outputs FST_OUTPUTS = ByteSequenceOutputs.getSingleton(); static final BytesRef NO_OUTPUT = FST_OUTPUTS.getNoOutput(); @@ -160,7 +126,7 @@ public final class BlockTreeTermsReader extends FieldsProducer { final int version; /** Sole constructor. */ - public BlockTreeTermsReader(PostingsReaderBase postingsReader, SegmentReadState state, FSTLoadMode defaultLoadMode) throws IOException { + public BlockTreeTermsReader(PostingsReaderBase postingsReader, SegmentReadState state) throws IOException { boolean success = false; this.postingsReader = postingsReader; @@ -197,7 +163,6 @@ public final class BlockTreeTermsReader extends FieldsProducer { seekDir(termsIn); seekDir(indexIn); - final FSTLoadMode fstLoadMode = getLoadMode(state.readerAttributes, FST_MODE_KEY, defaultLoadMode); final int numFields = termsIn.readVInt(); if (numFields < 0) { throw new CorruptIndexException("invalid numFields: " + numFields, termsIn); @@ -235,11 +200,10 @@ public final class BlockTreeTermsReader extends FieldsProducer { if (sumTotalTermFreq < sumDocFreq) { // #positions must be >= #postings throw new CorruptIndexException("invalid sumTotalTermFreq: " + sumTotalTermFreq + " sumDocFreq: " + sumDocFreq, termsIn); } - final FSTLoadMode perFieldLoadMode = getLoadMode(state.readerAttributes, FST_MODE_KEY + "." + fieldInfo.name, fstLoadMode); final long indexStartFP = indexIn.readVLong(); FieldReader previous = fieldMap.put(fieldInfo.name, new FieldReader(this, fieldInfo, numTerms, rootCode, sumTotalTermFreq, sumDocFreq, docCount, - indexStartFP, indexIn, minTerm, maxTerm, state.openedFromWriter, perFieldLoadMode)); + indexStartFP, indexIn, minTerm, maxTerm, state.openedFromWriter)); if (previous != null) { throw new CorruptIndexException("duplicate field: " + fieldInfo.name, termsIn); } @@ -256,19 +220,6 @@ public final class BlockTreeTermsReader extends FieldsProducer { } } - private static FSTLoadMode getLoadMode(Map attributes, String key, FSTLoadMode defaultValue) { - String value = attributes.get(key); - if (value == null) { - return defaultValue; - } - try { - return FSTLoadMode.valueOf(value); - } catch (IllegalArgumentException ex) { - throw new IllegalArgumentException("Invalid value for " + key + " expected one of: " - + Arrays.toString(FSTLoadMode.values()) + " but was: " + value, ex); - } - } - private static BytesRef readBytesRef(IndexInput in) throws IOException { int numBytes = in.readVInt(); if (numBytes < 0) { diff --git a/lucene/core/src/java/org/apache/lucene/codecs/blocktree/FieldReader.java b/lucene/core/src/java/org/apache/lucene/codecs/blocktree/FieldReader.java index c185cbcb733..79816f2862d 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/blocktree/FieldReader.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/blocktree/FieldReader.java @@ -25,7 +25,6 @@ import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.store.ByteArrayDataInput; -import org.apache.lucene.store.ByteBufferIndexInput; import org.apache.lucene.store.IndexInput; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountables; @@ -61,11 +60,10 @@ public final class FieldReader extends Terms implements Accountable { final BlockTreeTermsReader parent; final FST index; - final boolean isFSTOffHeap; //private boolean DEBUG; FieldReader(BlockTreeTermsReader parent, FieldInfo fieldInfo, long numTerms, BytesRef rootCode, long sumTotalTermFreq, long sumDocFreq, int docCount, - long indexStartFP, IndexInput indexIn, BytesRef minTerm, BytesRef maxTerm, boolean openedFromWriter, BlockTreeTermsReader.FSTLoadMode fstLoadMode) throws IOException { + long indexStartFP, IndexInput indexIn, BytesRef minTerm, BytesRef maxTerm, boolean openedFromWriter) throws IOException { assert numTerms > 0; this.fieldInfo = fieldInfo; //DEBUG = BlockTreeTermsReader.DEBUG && fieldInfo.name.equals("id"); @@ -82,32 +80,11 @@ public final class FieldReader extends Terms implements Accountable { // System.out.println("BTTR: seg=" + segment + " field=" + fieldInfo.name + " rootBlockCode=" + rootCode + " divisor=" + indexDivisor); // } rootBlockFP = (new ByteArrayDataInput(rootCode.bytes, rootCode.offset, rootCode.length)).readVLong() >>> BlockTreeTermsReader.OUTPUT_FLAGS_NUM_BITS; - // Initialize FST offheap if index is MMapDirectory and - // docCount != sumDocFreq implying field is not primary key + // Initialize FST always off-heap. if (indexIn != null) { - switch (fstLoadMode) { - case ON_HEAP: - isFSTOffHeap = false; - break; - case OFF_HEAP: - isFSTOffHeap = true; - break; - case OPTIMIZE_UPDATES_OFF_HEAP: - isFSTOffHeap = ((this.docCount != this.sumDocFreq) || openedFromWriter == false); - break; - case AUTO: - isFSTOffHeap = ((this.docCount != this.sumDocFreq) || openedFromWriter == false) && indexIn instanceof ByteBufferIndexInput; - break; - default: - throw new IllegalStateException("unknown enum constant: " + fstLoadMode); - } final IndexInput clone = indexIn.clone(); clone.seek(indexStartFP); - if (isFSTOffHeap) { - index = new FST<>(clone, ByteSequenceOutputs.getSingleton(), new OffHeapFSTStore()); - } else { - index = new FST<>(clone, ByteSequenceOutputs.getSingleton()); - } + index = new FST<>(clone, ByteSequenceOutputs.getSingleton(), new OffHeapFSTStore()); /* if (false) { final String dotFileName = segment + "_" + fieldInfo.name + ".dot"; @@ -118,7 +95,6 @@ public final class FieldReader extends Terms implements Accountable { } */ } else { - isFSTOffHeap = false; index = null; } } @@ -224,12 +200,4 @@ public final class FieldReader extends Terms implements Accountable { public String toString() { return "BlockTreeTerms(seg=" + parent.segment +" terms=" + numTerms + ",postings=" + sumDocFreq + ",positions=" + sumTotalTermFreq + ",docs=" + docCount + ")"; } - - /** - * Returns true iff the FST is read off-heap. - */ - public boolean isFstOffHeap() { - return isFSTOffHeap; - } - } diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene84/Lucene84PostingsFormat.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene84/Lucene84PostingsFormat.java index 23dbfec2f10..f26b1a047f0 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/lucene84/Lucene84PostingsFormat.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene84/Lucene84PostingsFormat.java @@ -393,24 +393,21 @@ public final class Lucene84PostingsFormat extends PostingsFormat { private final int minTermBlockSize; private final int maxTermBlockSize; - private final BlockTreeTermsReader.FSTLoadMode fstLoadMode; - /** Creates {@code Lucene84PostingsFormat} with default * settings. */ public Lucene84PostingsFormat() { - this(BlockTreeTermsWriter.DEFAULT_MIN_BLOCK_SIZE, BlockTreeTermsWriter.DEFAULT_MAX_BLOCK_SIZE, BlockTreeTermsReader.FSTLoadMode.AUTO); + this(BlockTreeTermsWriter.DEFAULT_MIN_BLOCK_SIZE, BlockTreeTermsWriter.DEFAULT_MAX_BLOCK_SIZE); } /** Creates {@code Lucene84PostingsFormat} with custom * values for {@code minBlockSize} and {@code * maxBlockSize} passed to block terms dictionary. * @see BlockTreeTermsWriter#BlockTreeTermsWriter(SegmentWriteState,PostingsWriterBase,int,int) */ - public Lucene84PostingsFormat(int minTermBlockSize, int maxTermBlockSize, BlockTreeTermsReader.FSTLoadMode loadMode) { + public Lucene84PostingsFormat(int minTermBlockSize, int maxTermBlockSize) { super("Lucene84"); BlockTreeTermsWriter.validateSettings(minTermBlockSize, maxTermBlockSize); this.minTermBlockSize = minTermBlockSize; this.maxTermBlockSize = maxTermBlockSize; - this.fstLoadMode = loadMode; } @Override @@ -441,7 +438,7 @@ public final class Lucene84PostingsFormat extends PostingsFormat { PostingsReaderBase postingsReader = new Lucene84PostingsReader(state); boolean success = false; try { - FieldsProducer ret = new BlockTreeTermsReader(postingsReader, state, fstLoadMode); + FieldsProducer ret = new BlockTreeTermsReader(postingsReader, state); success = true; return ret; } finally { diff --git a/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java b/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java index 45552d0ed0b..347f2ff2c38 100644 --- a/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java +++ b/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java @@ -25,7 +25,6 @@ import java.nio.file.Paths; import java.text.NumberFormat; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; @@ -39,7 +38,6 @@ import org.apache.lucene.codecs.PointsReader; import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.codecs.StoredFieldsReader; import org.apache.lucene.codecs.TermVectorsReader; -import org.apache.lucene.codecs.blocktree.BlockTreeTermsReader; import org.apache.lucene.document.Document; import org.apache.lucene.document.DocumentStoredFieldVisitor; import org.apache.lucene.index.CheckIndex.Status.DocValuesStatus; @@ -674,8 +672,7 @@ public final class CheckIndex implements Closeable { long startOpenReaderNS = System.nanoTime(); if (infoStream != null) infoStream.print(" test: open reader........."); - reader = new SegmentReader(info, sis.getIndexCreatedVersionMajor(), false, IOContext.DEFAULT, - Collections.singletonMap(BlockTreeTermsReader.FST_MODE_KEY, BlockTreeTermsReader.FSTLoadMode.OFF_HEAP.name())); // lets keep stuff on disk for check-index + reader = new SegmentReader(info, sis.getIndexCreatedVersionMajor(), false, IOContext.DEFAULT); msg(infoStream, String.format(Locale.ROOT, "OK [took %.3f sec]", nsToSec(System.nanoTime()-startOpenReaderNS))); segInfoStat.openReaderPassed = true; diff --git a/lucene/core/src/java/org/apache/lucene/index/DefaultIndexingChain.java b/lucene/core/src/java/org/apache/lucene/index/DefaultIndexingChain.java index 0d455b559b2..c2dae6063b3 100644 --- a/lucene/core/src/java/org/apache/lucene/index/DefaultIndexingChain.java +++ b/lucene/core/src/java/org/apache/lucene/index/DefaultIndexingChain.java @@ -21,7 +21,6 @@ import java.io.Closeable; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -132,7 +131,7 @@ final class DefaultIndexingChain extends DocConsumer { if (docState.infoStream.isEnabled("IW")) { docState.infoStream.message("IW", ((System.nanoTime()-t0)/1000000) + " msec to write norms"); } - SegmentReadState readState = new SegmentReadState(state.directory, state.segmentInfo, state.fieldInfos, true, IOContext.READ, state.segmentSuffix, Collections.emptyMap()); + SegmentReadState readState = new SegmentReadState(state.directory, state.segmentInfo, state.fieldInfos, true, IOContext.READ, state.segmentSuffix); t0 = System.nanoTime(); writeDocValues(state, sortMap); diff --git a/lucene/core/src/java/org/apache/lucene/index/DirectoryReader.java b/lucene/core/src/java/org/apache/lucene/index/DirectoryReader.java index 1b2a81f0f87..f7a86684c57 100644 --- a/lucene/core/src/java/org/apache/lucene/index/DirectoryReader.java +++ b/lucene/core/src/java/org/apache/lucene/index/DirectoryReader.java @@ -23,7 +23,6 @@ import java.nio.file.NoSuchFileException; import java.util.ArrayList; import java.util.Collections; import java.util.List; -import java.util.Map; import org.apache.lucene.search.SearcherManager; // javadocs import org.apache.lucene.store.Directory; @@ -61,19 +60,7 @@ public abstract class DirectoryReader extends BaseCompositeReader { * @throws IOException if there is a low-level IO error */ public static DirectoryReader open(final Directory directory) throws IOException { - return open(directory, Collections.emptyMap()); - } - - /** Returns a IndexReader reading the index in the given - * Directory - * @param directory the index directory - * @param readerAttributes the reader attributes passed to the {@link org.apache.lucene.codecs.Codec} layer of the - * directory reader. This attribute map is forwarded to all leaf readers as well as to the readers - * that are opened subsequently via the different flavors of {@link DirectoryReader#openIfChanged(DirectoryReader)} - * @throws IOException if there is a low-level IO error - */ - public static DirectoryReader open(final Directory directory, final Map readerAttributes) throws IOException { - return StandardDirectoryReader.open(directory, null, readerAttributes); + return StandardDirectoryReader.open(directory, null); } /** @@ -122,19 +109,7 @@ public abstract class DirectoryReader extends BaseCompositeReader { * @throws IOException if there is a low-level IO error */ public static DirectoryReader open(final IndexCommit commit) throws IOException { - return open(commit, Collections.emptyMap()); - } - - /** Expert: returns an IndexReader reading the index in the given - * {@link IndexCommit}. - * @param commit the commit point to open - * @param readerAttributes the reader attributes passed to the {@link org.apache.lucene.codecs.Codec} layer of the - * directory reader. This attribute map is forwarded to all leaf readers as well as to the readers - * that are opened subsequently via the different flavors of {@link DirectoryReader#openIfChanged(DirectoryReader)} - * @throws IOException if there is a low-level IO error - */ - public static DirectoryReader open(final IndexCommit commit, Map readerAttributes) throws IOException { - return StandardDirectoryReader.open(commit.getDirectory(), commit, readerAttributes); + return StandardDirectoryReader.open(commit.getDirectory(), commit); } /** diff --git a/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java b/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java index bfa04feb1bb..c470ac74a98 100644 --- a/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java +++ b/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java @@ -525,7 +525,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable, // reader; in theory we could instead do similar retry logic, // just like we do when loading segments_N - r = StandardDirectoryReader.open(this, segmentInfos, applyAllDeletes, writeAllDeletes, config.getReaderAttributes()); + r = StandardDirectoryReader.open(this, segmentInfos, applyAllDeletes, writeAllDeletes); if (infoStream.isEnabled("IW")) { infoStream.message("IW", "return reader version=" + r.getVersion() + " reader=" + r); } @@ -887,7 +887,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable, enableTestPoints, this::newSegmentName, config, directoryOrig, directory, globalFieldNumberMap); readerPool = new ReaderPool(directory, directoryOrig, segmentInfos, globalFieldNumberMap, - bufferedUpdatesStream::getCompletedDelGen, infoStream, conf.getSoftDeletesField(), reader, config.getReaderAttributes()); + bufferedUpdatesStream::getCompletedDelGen, infoStream, conf.getSoftDeletesField(), reader); if (config.getReaderPooling()) { readerPool.enableReaderPooling(); } diff --git a/lucene/core/src/java/org/apache/lucene/index/IndexWriterConfig.java b/lucene/core/src/java/org/apache/lucene/index/IndexWriterConfig.java index 638ec0a39bd..6a4bd619842 100644 --- a/lucene/core/src/java/org/apache/lucene/index/IndexWriterConfig.java +++ b/lucene/core/src/java/org/apache/lucene/index/IndexWriterConfig.java @@ -20,8 +20,6 @@ package org.apache.lucene.index; import java.io.PrintStream; import java.util.Arrays; import java.util.EnumSet; -import java.util.Map; -import java.util.Objects; import java.util.stream.Collectors; import org.apache.lucene.analysis.Analyzer; @@ -566,14 +564,4 @@ public final class IndexWriterConfig extends LiveIndexWriterConfig { this.softDeletesField = softDeletesField; return this; } - - /** - * Sets the reader attributes used for all readers pulled from the IndexWriter. Reader attributes allow configuration - * of low-level aspects like ram utilization on a per-reader basis. - * Note: This method make a shallow copy of the provided map. - */ - public IndexWriterConfig setReaderAttributes(Map readerAttributes) { - this.readerAttributes = Map.copyOf(Objects.requireNonNull(readerAttributes)); - return this; - } } diff --git a/lucene/core/src/java/org/apache/lucene/index/LiveIndexWriterConfig.java b/lucene/core/src/java/org/apache/lucene/index/LiveIndexWriterConfig.java index 7236b33504a..c29944f763e 100644 --- a/lucene/core/src/java/org/apache/lucene/index/LiveIndexWriterConfig.java +++ b/lucene/core/src/java/org/apache/lucene/index/LiveIndexWriterConfig.java @@ -18,7 +18,6 @@ package org.apache.lucene.index; import java.util.Collections; -import java.util.Map; import java.util.Set; import org.apache.lucene.analysis.Analyzer; @@ -114,9 +113,6 @@ public class LiveIndexWriterConfig { /** soft deletes field */ protected String softDeletesField = null; - /** the attributes for the NRT readers */ - protected Map readerAttributes = Collections.emptyMap(); - /** Amount of time to wait for merges returned by MergePolicy.findFullFlushMerges(...) */ protected volatile double maxCommitMergeWaitSeconds; @@ -528,16 +524,8 @@ public class LiveIndexWriterConfig { sb.append("indexSort=").append(getIndexSort()).append("\n"); sb.append("checkPendingFlushOnUpdate=").append(isCheckPendingFlushOnUpdate()).append("\n"); sb.append("softDeletesField=").append(getSoftDeletesField()).append("\n"); - sb.append("readerAttributes=").append(getReaderAttributes()).append("\n"); sb.append("maxCommitMergeWaitSeconds=").append(getMaxCommitMergeWaitSeconds()).append("\n"); sb.append("indexWriterEvents=").append(getIndexWriterEvents().getClass().getName()).append("\n"); return sb.toString(); } - - /** - * Returns the reader attributes passed to all published readers opened on or within the IndexWriter - */ - public Map getReaderAttributes() { - return this.readerAttributes; - } } diff --git a/lucene/core/src/java/org/apache/lucene/index/ReaderPool.java b/lucene/core/src/java/org/apache/lucene/index/ReaderPool.java index 859f8990302..b792be26873 100644 --- a/lucene/core/src/java/org/apache/lucene/index/ReaderPool.java +++ b/lucene/core/src/java/org/apache/lucene/index/ReaderPool.java @@ -54,7 +54,6 @@ final class ReaderPool implements Closeable { private final InfoStream infoStream; private final SegmentInfos segmentInfos; private final String softDeletesField; - private final Map readerAttributes; // This is a "write once" variable (like the organic dye // on a DVD-R that may or may not be heated by a laser and // then cooled to permanently record the event): it's @@ -72,7 +71,7 @@ final class ReaderPool implements Closeable { ReaderPool(Directory directory, Directory originalDirectory, SegmentInfos segmentInfos, FieldInfos.FieldNumbers fieldNumbers, LongSupplier completedDelGenSupplier, InfoStream infoStream, - String softDeletesField, StandardDirectoryReader reader, Map readerAttributes) throws IOException { + String softDeletesField, StandardDirectoryReader reader) throws IOException { this.directory = directory; this.originalDirectory = originalDirectory; this.segmentInfos = segmentInfos; @@ -80,7 +79,6 @@ final class ReaderPool implements Closeable { this.completedDelGenSupplier = completedDelGenSupplier; this.infoStream = infoStream; this.softDeletesField = softDeletesField; - this.readerAttributes = readerAttributes; if (reader != null) { // Pre-enroll all segment readers into the reader pool; this is necessary so // any in-memory NRT live docs are correctly carried over, and so NRT readers @@ -93,7 +91,7 @@ final class ReaderPool implements Closeable { SegmentReader newReader = new SegmentReader(segmentInfos.info(i), segReader, segReader.getLiveDocs(), segReader.getHardLiveDocs(), segReader.numDocs(), true); readerMap.put(newReader.getOriginalSegmentInfo(), new ReadersAndUpdates(segmentInfos.getIndexCreatedVersionMajor(), - newReader, newPendingDeletes(newReader, newReader.getOriginalSegmentInfo()), readerAttributes)); + newReader, newPendingDeletes(newReader, newReader.getOriginalSegmentInfo()))); } } } @@ -374,7 +372,7 @@ final class ReaderPool implements Closeable { if (create == false) { return null; } - rld = new ReadersAndUpdates(segmentInfos.getIndexCreatedVersionMajor(), info, newPendingDeletes(info), readerAttributes); + rld = new ReadersAndUpdates(segmentInfos.getIndexCreatedVersionMajor(), info, newPendingDeletes(info)); // Steal initial reference: readerMap.put(info, rld); } else { diff --git a/lucene/core/src/java/org/apache/lucene/index/ReadersAndUpdates.java b/lucene/core/src/java/org/apache/lucene/index/ReadersAndUpdates.java index 73f7d664abf..2baa0d0d7cd 100644 --- a/lucene/core/src/java/org/apache/lucene/index/ReadersAndUpdates.java +++ b/lucene/core/src/java/org/apache/lucene/index/ReadersAndUpdates.java @@ -36,7 +36,6 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.DocValuesConsumer; import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.FieldInfosFormat; -import org.apache.lucene.codecs.blocktree.BlockTreeTermsReader; import org.apache.lucene.store.Directory; import org.apache.lucene.store.FlushInfo; import org.apache.lucene.store.IOContext; @@ -89,22 +88,17 @@ final class ReadersAndUpdates { final AtomicLong ramBytesUsed = new AtomicLong(); - private final Map readerAttributes; - - ReadersAndUpdates(int indexCreatedVersionMajor, SegmentCommitInfo info, - PendingDeletes pendingDeletes, Map readerAttributes) { + ReadersAndUpdates(int indexCreatedVersionMajor, SegmentCommitInfo info, PendingDeletes pendingDeletes) { this.info = info; this.pendingDeletes = pendingDeletes; this.indexCreatedVersionMajor = indexCreatedVersionMajor; - this.readerAttributes = readerAttributes; } /** Init from a previously opened SegmentReader. * *

NOTE: steals incoming ref from reader. */ - ReadersAndUpdates(int indexCreatedVersionMajor, SegmentReader reader, PendingDeletes pendingDeletes, - Map readerAttributes) throws IOException { - this(indexCreatedVersionMajor, reader.getOriginalSegmentInfo(), pendingDeletes, readerAttributes); + ReadersAndUpdates(int indexCreatedVersionMajor, SegmentReader reader, PendingDeletes pendingDeletes) throws IOException { + this(indexCreatedVersionMajor, reader.getOriginalSegmentInfo(), pendingDeletes); this.reader = reader; pendingDeletes.onNewReader(reader, info); } @@ -174,7 +168,7 @@ final class ReadersAndUpdates { public synchronized SegmentReader getReader(IOContext context) throws IOException { if (reader == null) { // We steal returned ref: - reader = new SegmentReader(info, indexCreatedVersionMajor, true, context, readerAttributes); + reader = new SegmentReader(info, indexCreatedVersionMajor, true, context); pendingDeletes.onNewReader(reader, info); } @@ -541,9 +535,7 @@ final class ReadersAndUpdates { // IndexWriter.commitMergedDeletes). final SegmentReader reader; if (this.reader == null) { - reader = new SegmentReader(info, indexCreatedVersionMajor, true, IOContext.READONCE, - // we don't need terms - lets leave them off-heap if possible - Collections.singletonMap(BlockTreeTermsReader.FST_MODE_KEY, BlockTreeTermsReader.FSTLoadMode.OFF_HEAP.name())); + reader = new SegmentReader(info, indexCreatedVersionMajor, true, IOContext.READONCE); pendingDeletes.onNewReader(reader, info); } else { reader = this.reader; diff --git a/lucene/core/src/java/org/apache/lucene/index/SegmentCoreReaders.java b/lucene/core/src/java/org/apache/lucene/index/SegmentCoreReaders.java index 371abc647bd..21e6c82cb60 100644 --- a/lucene/core/src/java/org/apache/lucene/index/SegmentCoreReaders.java +++ b/lucene/core/src/java/org/apache/lucene/index/SegmentCoreReaders.java @@ -24,7 +24,6 @@ import java.io.IOException; import java.nio.file.NoSuchFileException; import java.util.Collections; import java.util.LinkedHashSet; -import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; @@ -90,7 +89,7 @@ final class SegmentCoreReaders { private final Set coreClosedListeners = Collections.synchronizedSet(new LinkedHashSet()); - SegmentCoreReaders(Directory dir, SegmentCommitInfo si, boolean openedFromWriter, IOContext context, Map readerAttributes) throws IOException { + SegmentCoreReaders(Directory dir, SegmentCommitInfo si, boolean openedFromWriter, IOContext context) throws IOException { final Codec codec = si.info.getCodec(); final Directory cfsDir; // confusing name: if (cfs) it's the cfsdir, otherwise it's the segment's directory. @@ -108,7 +107,7 @@ final class SegmentCoreReaders { coreFieldInfos = codec.fieldInfosFormat().read(cfsDir, si.info, "", context); - final SegmentReadState segmentReadState = new SegmentReadState(cfsDir, si.info, coreFieldInfos, openedFromWriter, context, readerAttributes); + final SegmentReadState segmentReadState = new SegmentReadState(cfsDir, si.info, coreFieldInfos, openedFromWriter, context); final PostingsFormat format = codec.postingsFormat(); // Ask codec for its Fields fields = format.fieldsProducer(segmentReadState); diff --git a/lucene/core/src/java/org/apache/lucene/index/SegmentDocValues.java b/lucene/core/src/java/org/apache/lucene/index/SegmentDocValues.java index 6cb1a8a98d1..fee0050886d 100644 --- a/lucene/core/src/java/org/apache/lucene/index/SegmentDocValues.java +++ b/lucene/core/src/java/org/apache/lucene/index/SegmentDocValues.java @@ -18,7 +18,6 @@ package org.apache.lucene.index; import java.io.IOException; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -47,7 +46,7 @@ final class SegmentDocValues { } // set SegmentReadState to list only the fields that are relevant to that gen - SegmentReadState srs = new SegmentReadState(dvDir, si.info, infos, false, IOContext.READ, segmentSuffix, Collections.emptyMap()); + SegmentReadState srs = new SegmentReadState(dvDir, si.info, infos, false, IOContext.READ, segmentSuffix); DocValuesFormat dvFormat = si.info.getCodec().docValuesFormat(); return new RefCount(dvFormat.fieldsProducer(srs)) { @SuppressWarnings("synthetic-access") diff --git a/lucene/core/src/java/org/apache/lucene/index/SegmentMerger.java b/lucene/core/src/java/org/apache/lucene/index/SegmentMerger.java index c2677c95a5a..a91cea2a66c 100644 --- a/lucene/core/src/java/org/apache/lucene/index/SegmentMerger.java +++ b/lucene/core/src/java/org/apache/lucene/index/SegmentMerger.java @@ -18,7 +18,6 @@ package org.apache.lucene.index; import java.io.IOException; -import java.util.Collections; import java.util.List; import org.apache.lucene.codecs.Codec; @@ -113,7 +112,7 @@ final class SegmentMerger { final SegmentWriteState segmentWriteState = new SegmentWriteState(mergeState.infoStream, directory, mergeState.segmentInfo, mergeState.mergeFieldInfos, null, context); final SegmentReadState segmentReadState = new SegmentReadState(directory, mergeState.segmentInfo, mergeState.mergeFieldInfos, - true, IOContext.READ, segmentWriteState.segmentSuffix, Collections.emptyMap()); + true, IOContext.READ, segmentWriteState.segmentSuffix); if (mergeState.mergeFieldInfos.hasNorms()) { if (mergeState.infoStream.isEnabled("SM")) { diff --git a/lucene/core/src/java/org/apache/lucene/index/SegmentReadState.java b/lucene/core/src/java/org/apache/lucene/index/SegmentReadState.java index c8fd3d148c4..107d754ed4f 100644 --- a/lucene/core/src/java/org/apache/lucene/index/SegmentReadState.java +++ b/lucene/core/src/java/org/apache/lucene/index/SegmentReadState.java @@ -16,9 +16,6 @@ */ package org.apache.lucene.index; - -import java.util.Map; - import org.apache.lucene.codecs.PostingsFormat; // javadocs import org.apache.lucene.codecs.perfield.PerFieldPostingsFormat; // javadocs import org.apache.lucene.store.Directory; @@ -56,16 +53,10 @@ public class SegmentReadState { */ public final boolean openedFromWriter; - /** - * The reader attributes for this reader. This is used to configure low level options on the codec layer. - * This attribute map is user supplied at reader creation time. - */ - public final Map readerAttributes; - /** Create a {@code SegmentReadState}. */ public SegmentReadState(Directory dir, SegmentInfo info, - FieldInfos fieldInfos, boolean openedFromWriter, IOContext context, Map readerAttributes) { - this(dir, info, fieldInfos, openedFromWriter, context, "", readerAttributes); + FieldInfos fieldInfos, boolean openedFromWriter, IOContext context) { + this(dir, info, fieldInfos, openedFromWriter, context, ""); } /** Create a {@code SegmentReadState}. */ @@ -73,14 +64,13 @@ public class SegmentReadState { SegmentInfo info, FieldInfos fieldInfos, boolean openedFromWriter, IOContext context, - String segmentSuffix, Map readerAttributes) { + String segmentSuffix) { this.directory = dir; this.segmentInfo = info; this.fieldInfos = fieldInfos; this.context = context; this.segmentSuffix = segmentSuffix; this.openedFromWriter = openedFromWriter; - this.readerAttributes = Map.copyOf(readerAttributes); } /** Create a {@code SegmentReadState}. */ @@ -92,6 +82,5 @@ public class SegmentReadState { this.context = other.context; this.openedFromWriter = other.openedFromWriter; this.segmentSuffix = newSegmentSuffix; - this.readerAttributes = other.readerAttributes; } } diff --git a/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java b/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java index d71d3846335..c0ae337e5fc 100644 --- a/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java +++ b/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java @@ -19,7 +19,6 @@ package org.apache.lucene.index; import java.io.IOException; import java.util.Collections; -import java.util.Map; import java.util.Set; import java.util.concurrent.CopyOnWriteArraySet; @@ -73,7 +72,7 @@ public final class SegmentReader extends CodecReader { * @throws CorruptIndexException if the index is corrupt * @throws IOException if there is a low-level IO error */ - SegmentReader(SegmentCommitInfo si, int createdVersionMajor, boolean openedFromWriter, IOContext context, Map readerAttributes) throws IOException { + SegmentReader(SegmentCommitInfo si, int createdVersionMajor, boolean openedFromWriter, IOContext context) throws IOException { this.si = si.clone(); this.originalSi = si; this.metaData = new LeafMetaData(createdVersionMajor, si.info.getMinVersion(), si.info.getIndexSort()); @@ -81,7 +80,7 @@ public final class SegmentReader extends CodecReader { // We pull liveDocs/DV updates from disk: this.isNRT = false; - core = new SegmentCoreReaders(si.info.dir, si, openedFromWriter, context, readerAttributes); + core = new SegmentCoreReaders(si.info.dir, si, openedFromWriter, context); segDocValues = new SegmentDocValues(); boolean success = false; diff --git a/lucene/core/src/java/org/apache/lucene/index/StandardDirectoryReader.java b/lucene/core/src/java/org/apache/lucene/index/StandardDirectoryReader.java index 571b7b1c5f1..82b7bad6049 100644 --- a/lucene/core/src/java/org/apache/lucene/index/StandardDirectoryReader.java +++ b/lucene/core/src/java/org/apache/lucene/index/StandardDirectoryReader.java @@ -42,22 +42,19 @@ public final class StandardDirectoryReader extends DirectoryReader { final SegmentInfos segmentInfos; private final boolean applyAllDeletes; private final boolean writeAllDeletes; - private final Map readerAttributes; - + /** called only from static open() methods */ StandardDirectoryReader(Directory directory, LeafReader[] readers, IndexWriter writer, - SegmentInfos sis, boolean applyAllDeletes, boolean writeAllDeletes, - Map readerAttributes) throws IOException { + SegmentInfos sis, boolean applyAllDeletes, boolean writeAllDeletes) throws IOException { super(directory, readers); this.writer = writer; this.segmentInfos = sis; this.applyAllDeletes = applyAllDeletes; this.writeAllDeletes = writeAllDeletes; - this.readerAttributes = Map.copyOf(readerAttributes); } /** called from DirectoryReader.open(...) methods */ - static DirectoryReader open(final Directory directory, final IndexCommit commit, Map readerAttributes) throws IOException { + static DirectoryReader open(final Directory directory, final IndexCommit commit) throws IOException { return new SegmentInfos.FindSegmentsFile(directory) { @Override protected DirectoryReader doBody(String segmentFileName) throws IOException { @@ -66,12 +63,12 @@ public final class StandardDirectoryReader extends DirectoryReader { boolean success = false; try { for (int i = sis.size()-1; i >= 0; i--) { - readers[i] = new SegmentReader(sis.info(i), sis.getIndexCreatedVersionMajor(), false, IOContext.READ, readerAttributes); + readers[i] = new SegmentReader(sis.info(i), sis.getIndexCreatedVersionMajor(), false, IOContext.READ); } // This may throw CorruptIndexException if there are too many docs, so // it must be inside try clause so we close readers in that case: - DirectoryReader reader = new StandardDirectoryReader(directory, readers, null, sis, false, false, readerAttributes); + DirectoryReader reader = new StandardDirectoryReader(directory, readers, null, sis, false, false); success = true; return reader; @@ -85,7 +82,7 @@ public final class StandardDirectoryReader extends DirectoryReader { } /** Used by near real-time search */ - static DirectoryReader open(IndexWriter writer, SegmentInfos infos, boolean applyAllDeletes, boolean writeAllDeletes, Map readerAttributes) throws IOException { + static DirectoryReader open(IndexWriter writer, SegmentInfos infos, boolean applyAllDeletes, boolean writeAllDeletes) throws IOException { // IndexWriter synchronizes externally before calling // us, which ensures infos will not change; so there's // no need to process segments in reverse order @@ -124,7 +121,7 @@ public final class StandardDirectoryReader extends DirectoryReader { StandardDirectoryReader result = new StandardDirectoryReader(dir, readers.toArray(new SegmentReader[readers.size()]), writer, - segmentInfos, applyAllDeletes, writeAllDeletes, readerAttributes); + segmentInfos, applyAllDeletes, writeAllDeletes); return result; } catch (Throwable t) { try { @@ -139,7 +136,7 @@ public final class StandardDirectoryReader extends DirectoryReader { /** This constructor is only used for {@link #doOpenIfChanged(SegmentInfos)}, as well as NRT replication. * * @lucene.internal */ - public static DirectoryReader open(Directory directory, SegmentInfos infos, List oldReaders, Map readerAttributes) throws IOException { + public static DirectoryReader open(Directory directory, SegmentInfos infos, List oldReaders) throws IOException { // we put the old SegmentReaders in a map, that allows us // to lookup a reader using its segment name @@ -179,7 +176,7 @@ public final class StandardDirectoryReader extends DirectoryReader { SegmentReader newReader; if (oldReader == null || commitInfo.info.getUseCompoundFile() != oldReader.getSegmentInfo().info.getUseCompoundFile()) { // this is a new reader; in case we hit an exception we can decRef it safely - newReader = new SegmentReader(commitInfo, infos.getIndexCreatedVersionMajor(), false, IOContext.READ, readerAttributes); + newReader = new SegmentReader(commitInfo, infos.getIndexCreatedVersionMajor(), false, IOContext.READ); newReaders[i] = newReader; } else { if (oldReader.isNRT) { @@ -221,7 +218,7 @@ public final class StandardDirectoryReader extends DirectoryReader { } } } - return new StandardDirectoryReader(directory, newReaders, null, infos, false, false, readerAttributes); + return new StandardDirectoryReader(directory, newReaders, null, infos, false, false); } // TODO: move somewhere shared if it's useful elsewhere @@ -334,7 +331,7 @@ public final class StandardDirectoryReader extends DirectoryReader { } DirectoryReader doOpenIfChanged(SegmentInfos infos) throws IOException { - return StandardDirectoryReader.open(directory, infos, getSequentialSubReaders(), readerAttributes); + return StandardDirectoryReader.open(directory, infos, getSequentialSubReaders()); } @Override diff --git a/lucene/core/src/test/org/apache/lucene/codecs/lucene84/TestLucene84PostingsFormat.java b/lucene/core/src/test/org/apache/lucene/codecs/lucene84/TestLucene84PostingsFormat.java index 0be30e33612..0c2d99b540b 100644 --- a/lucene/core/src/test/org/apache/lucene/codecs/lucene84/TestLucene84PostingsFormat.java +++ b/lucene/core/src/test/org/apache/lucene/codecs/lucene84/TestLucene84PostingsFormat.java @@ -17,22 +17,15 @@ package org.apache.lucene.codecs.lucene84; import java.io.IOException; -import java.nio.file.Path; import java.util.Arrays; import java.util.Collections; -import java.util.HashMap; import java.util.List; -import java.util.Map; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.CompetitiveImpactAccumulator; -import org.apache.lucene.codecs.blocktree.BlockTreeTermsReader; import org.apache.lucene.codecs.blocktree.FieldReader; import org.apache.lucene.codecs.blocktree.Stats; -import org.apache.lucene.codecs.lucene84.Lucene84PostingsFormat; -import org.apache.lucene.codecs.lucene84.Lucene84ScoreSkipReader; -import org.apache.lucene.codecs.lucene84.Lucene84SkipWriter; import org.apache.lucene.codecs.lucene84.Lucene84ScoreSkipReader.MutableImpactList; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; @@ -41,16 +34,11 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.Impact; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.Term; import org.apache.lucene.store.ByteArrayDataInput; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.FSDirectory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexOutput; -import org.apache.lucene.store.MMapDirectory; -import org.apache.lucene.store.SimpleFSDirectory; import org.apache.lucene.util.TestUtil; public class TestLucene84PostingsFormat extends BasePostingsFormatTestCase { @@ -61,202 +49,6 @@ public class TestLucene84PostingsFormat extends BasePostingsFormatTestCase { return codec; } - public void testFstOffHeap() throws IOException { - Path tempDir = createTempDir(); - try (Directory d = FSDirectory.open(tempDir)) { - assumeTrue("only works with mmap directory", d instanceof MMapDirectory); - try (IndexWriter w = new IndexWriter(d, new IndexWriterConfig(new MockAnalyzer(random())))) { - DirectoryReader readerFromWriter = DirectoryReader.open(w); - for (int i = 0; i < 50; i++) { - Document doc = new Document(); - doc.add(newStringField("id", "" + i, Field.Store.NO)); - doc.add(newStringField("field", Character.toString((char) (97 + i)), Field.Store.NO)); - doc.add(newStringField("field", Character.toString((char) (98 + i)), Field.Store.NO)); - if (rarely()) { - w.addDocument(doc); - } else { - w.updateDocument(new Term("id", "" + i), doc); - } - if (random().nextBoolean()) { - w.commit(); - } - - if (random().nextBoolean()) { - DirectoryReader newReader = DirectoryReader.openIfChanged(readerFromWriter); - if (newReader != null) { - readerFromWriter.close(); - readerFromWriter = newReader; - } - for (LeafReaderContext leaf : readerFromWriter.leaves()) { - FieldReader field = (FieldReader) leaf.reader().terms("field"); - FieldReader id = (FieldReader) leaf.reader().terms("id"); - assertFalse(id.isFstOffHeap()); - assertTrue(field.isFstOffHeap()); - } - } - } - readerFromWriter.close(); - - w.forceMerge(1); - try (DirectoryReader r = DirectoryReader.open(w)) { - assertEquals(1, r.leaves().size()); - FieldReader field = (FieldReader) r.leaves().get(0).reader().terms("field"); - FieldReader id = (FieldReader) r.leaves().get(0).reader().terms("id"); - assertFalse(id.isFstOffHeap()); - assertTrue(field.isFstOffHeap()); - } - w.commit(); - try (DirectoryReader r = DirectoryReader.open(d)) { - assertEquals(1, r.leaves().size()); - FieldReader field = (FieldReader) r.leaves().get(0).reader().terms("field"); - FieldReader id = (FieldReader) r.leaves().get(0).reader().terms("id"); - assertTrue(id.isFstOffHeap()); - assertTrue(field.isFstOffHeap()); - } - } - } - - try (Directory d = new SimpleFSDirectory(tempDir)) { - // test auto - try (DirectoryReader r = DirectoryReader.open(d)) { - assertEquals(1, r.leaves().size()); - FieldReader field = (FieldReader) r.leaves().get(0).reader().terms("field"); - FieldReader id = (FieldReader) r.leaves().get(0).reader().terms("id"); - assertFalse(id.isFstOffHeap()); - assertFalse(field.isFstOffHeap()); - } - } - - try (Directory d = new SimpleFSDirectory(tempDir)) { - // test per field - Map readerAttributes = new HashMap<>(); - readerAttributes.put(BlockTreeTermsReader.FST_MODE_KEY, BlockTreeTermsReader.FSTLoadMode.OFF_HEAP.name()); - readerAttributes.put(BlockTreeTermsReader.FST_MODE_KEY + ".field", BlockTreeTermsReader.FSTLoadMode.ON_HEAP.name()); - try (DirectoryReader r = DirectoryReader.open(d, readerAttributes)) { - assertEquals(1, r.leaves().size()); - FieldReader field = (FieldReader) r.leaves().get(0).reader().terms("field"); - FieldReader id = (FieldReader) r.leaves().get(0).reader().terms("id"); - assertTrue(id.isFstOffHeap()); - assertFalse(field.isFstOffHeap()); - } - } - - IllegalArgumentException invalid = expectThrows(IllegalArgumentException.class, () -> { - try (Directory d = new SimpleFSDirectory(tempDir)) { - Map readerAttributes = new HashMap<>(); - readerAttributes.put(BlockTreeTermsReader.FST_MODE_KEY, "invalid"); - DirectoryReader.open(d, readerAttributes); - } - }); - - assertEquals("Invalid value for blocktree.terms.fst expected one of: [OFF_HEAP, ON_HEAP, OPTIMIZE_UPDATES_OFF_HEAP, AUTO] but was: invalid", invalid.getMessage()); - } - - public void testDisableFSTOffHeap() throws IOException { - Path tempDir = createTempDir(); - try (Directory d = MMapDirectory.open(tempDir)) { - try (IndexWriter w = new IndexWriter(d, new IndexWriterConfig(new MockAnalyzer(random())) - .setReaderAttributes(Collections.singletonMap(BlockTreeTermsReader.FST_MODE_KEY, BlockTreeTermsReader.FSTLoadMode.ON_HEAP.name())))) { - assumeTrue("only works with mmap directory", d instanceof MMapDirectory); - DirectoryReader readerFromWriter = DirectoryReader.open(w); - for (int i = 0; i < 50; i++) { - Document doc = new Document(); - doc.add(newStringField("id", "" + i, Field.Store.NO)); - doc.add(newStringField("field", Character.toString((char) (97 + i)), Field.Store.NO)); - doc.add(newStringField("field", Character.toString((char) (98 + i)), Field.Store.NO)); - if (rarely()) { - w.addDocument(doc); - } else { - w.updateDocument(new Term("id", "" + i), doc); - } - if (random().nextBoolean()) { - w.commit(); - } - if (random().nextBoolean()) { - DirectoryReader newReader = DirectoryReader.openIfChanged(readerFromWriter); - if (newReader != null) { - readerFromWriter.close(); - readerFromWriter = newReader; - } - for (LeafReaderContext leaf : readerFromWriter.leaves()) { - FieldReader field = (FieldReader) leaf.reader().terms("field"); - FieldReader id = (FieldReader) leaf.reader().terms("id"); - assertFalse(id.isFstOffHeap()); - assertFalse(field.isFstOffHeap()); - } - } - } - readerFromWriter.close(); - w.forceMerge(1); - w.commit(); - } - try (DirectoryReader r = DirectoryReader.open(d, Collections.singletonMap(BlockTreeTermsReader.FST_MODE_KEY, BlockTreeTermsReader.FSTLoadMode.ON_HEAP.name()))) { - assertEquals(1, r.leaves().size()); - FieldReader field = (FieldReader) r.leaves().get(0).reader().terms("field"); - FieldReader id = (FieldReader) r.leaves().get(0).reader().terms("id"); - assertFalse(id.isFstOffHeap()); - assertFalse(field.isFstOffHeap()); - } - } - } - - public void testAlwaysFSTOffHeap() throws IOException { - boolean alsoLoadIdOffHeap = random().nextBoolean(); - BlockTreeTermsReader.FSTLoadMode loadMode; - if (alsoLoadIdOffHeap) { - loadMode = BlockTreeTermsReader.FSTLoadMode.OFF_HEAP; - } else { - loadMode = BlockTreeTermsReader.FSTLoadMode.OPTIMIZE_UPDATES_OFF_HEAP; - } - try (Directory d = newDirectory()) { // any directory should work now - try (IndexWriter w = new IndexWriter(d, new IndexWriterConfig(new MockAnalyzer(random())) - .setReaderAttributes(Collections.singletonMap(BlockTreeTermsReader.FST_MODE_KEY, loadMode.name())))) { - DirectoryReader readerFromWriter = DirectoryReader.open(w); - for (int i = 0; i < 50; i++) { - Document doc = new Document(); - doc.add(newStringField("id", "" + i, Field.Store.NO)); - doc.add(newStringField("field", Character.toString((char) (97 + i)), Field.Store.NO)); - doc.add(newStringField("field", Character.toString((char) (98 + i)), Field.Store.NO)); - if (rarely()) { - w.addDocument(doc); - } else { - w.updateDocument(new Term("id", "" + i), doc); - } - if (random().nextBoolean()) { - w.commit(); - } - if (random().nextBoolean()) { - DirectoryReader newReader = DirectoryReader.openIfChanged(readerFromWriter); - if (newReader != null) { - readerFromWriter.close(); - readerFromWriter = newReader; - } - for (LeafReaderContext leaf : readerFromWriter.leaves()) { - FieldReader field = (FieldReader) leaf.reader().terms("field"); - FieldReader id = (FieldReader) leaf.reader().terms("id"); - if (alsoLoadIdOffHeap) { - assertTrue(id.isFstOffHeap()); - } else { - assertFalse(id.isFstOffHeap()); - } - assertTrue(field.isFstOffHeap()); - } - } - } - readerFromWriter.close(); - w.forceMerge(1); - w.commit(); - } - try (DirectoryReader r = DirectoryReader.open(d, Collections.singletonMap(BlockTreeTermsReader.FST_MODE_KEY, loadMode.name()))) { - assertEquals(1, r.leaves().size()); - FieldReader field = (FieldReader) r.leaves().get(0).reader().terms("field"); - FieldReader id = (FieldReader) r.leaves().get(0).reader().terms("id"); - assertTrue(id.isFstOffHeap()); - assertTrue(field.isFstOffHeap()); - } - } - } - /** Make sure the final sub-block(s) are not skipped. */ public void testFinalBlock() throws Exception { Directory d = newDirectory(); @@ -283,7 +75,7 @@ public class TestLucene84PostingsFormat extends BasePostingsFormatTestCase { private void shouldFail(int minItemsInBlock, int maxItemsInBlock) { expectThrows(IllegalArgumentException.class, () -> { - new Lucene84PostingsFormat(minItemsInBlock, maxItemsInBlock, BlockTreeTermsReader.FSTLoadMode.AUTO); + new Lucene84PostingsFormat(minItemsInBlock, maxItemsInBlock); }); } diff --git a/lucene/core/src/test/org/apache/lucene/index/TestCodecs.java b/lucene/core/src/test/org/apache/lucene/index/TestCodecs.java index b3033d256a0..f48ae6e43de 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestCodecs.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestCodecs.java @@ -222,7 +222,7 @@ public class TestCodecs extends LuceneTestCase { final SegmentInfo si = new SegmentInfo(dir, Version.LATEST, Version.LATEST, SEGMENT, 10000, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null); this.write(si, fieldInfos, dir, fields); - final FieldsProducer reader = codec.postingsFormat().fieldsProducer(new SegmentReadState(dir, si, fieldInfos, false, newIOContext(random()), Collections.emptyMap())); + final FieldsProducer reader = codec.postingsFormat().fieldsProducer(new SegmentReadState(dir, si, fieldInfos, false, newIOContext(random()))); final Iterator fieldsEnum = reader.iterator(); String fieldName = fieldsEnum.next(); @@ -282,7 +282,7 @@ public class TestCodecs extends LuceneTestCase { if (VERBOSE) { System.out.println("TEST: now read postings"); } - final FieldsProducer terms = codec.postingsFormat().fieldsProducer(new SegmentReadState(dir, si, fieldInfos, false, newIOContext(random()), Collections.emptyMap())); + final FieldsProducer terms = codec.postingsFormat().fieldsProducer(new SegmentReadState(dir, si, fieldInfos, false, newIOContext(random()))); final Verify[] threads = new Verify[NUM_TEST_THREADS-1]; for(int i=0;i 0l, null, null, null, Collections.emptyMap()); + ReaderPool pool = new ReaderPool(directory, directory, segmentInfos, fieldNumbers, () -> 0l, null, null, null); SegmentCommitInfo commitInfo = RandomPicks.randomFrom(random(), segmentInfos.asList()); ReadersAndUpdates readersAndUpdates = pool.get(commitInfo, true); assertSame(readersAndUpdates, pool.get(commitInfo, false)); @@ -64,7 +64,7 @@ public class TestReaderPool extends LuceneTestCase { StandardDirectoryReader reader = (StandardDirectoryReader) DirectoryReader.open(directory); SegmentInfos segmentInfos = reader.segmentInfos.clone(); - ReaderPool pool = new ReaderPool(directory, directory, segmentInfos, fieldNumbers, () -> 0l, null, null, null, Collections.emptyMap()); + ReaderPool pool = new ReaderPool(directory, directory, segmentInfos, fieldNumbers, () -> 0l, null, null, null); SegmentCommitInfo commitInfo = RandomPicks.randomFrom(random(), segmentInfos.asList()); assertFalse(pool.isReaderPoolingEnabled()); pool.release(pool.get(commitInfo, true), random().nextBoolean()); @@ -100,7 +100,7 @@ public class TestReaderPool extends LuceneTestCase { StandardDirectoryReader reader = (StandardDirectoryReader) DirectoryReader.open(directory); SegmentInfos segmentInfos = reader.segmentInfos.clone(); ReaderPool pool = new ReaderPool(directory, directory, segmentInfos, fieldNumbers, () -> 0l, - new NullInfoStream(), null, null, Collections.emptyMap()); + new NullInfoStream(), null, null); int id = random().nextInt(10); if (random().nextBoolean()) { pool.enableReaderPooling(); @@ -168,7 +168,7 @@ public class TestReaderPool extends LuceneTestCase { StandardDirectoryReader reader = (StandardDirectoryReader) DirectoryReader.open(directory); SegmentInfos segmentInfos = reader.segmentInfos.clone(); ReaderPool pool = new ReaderPool(directory, directory, segmentInfos, fieldNumbers, () -> 0l, - new NullInfoStream(), null, null, Collections.emptyMap()); + new NullInfoStream(), null, null); int id = random().nextInt(10); if (random().nextBoolean()) { pool.enableReaderPooling(); @@ -213,7 +213,7 @@ public class TestReaderPool extends LuceneTestCase { StandardDirectoryReader reader = (StandardDirectoryReader) DirectoryReader.open(directory); SegmentInfos segmentInfos = reader.segmentInfos.clone(); ReaderPool pool = new ReaderPool(directory, directory, segmentInfos, fieldNumbers, () -> 0L, - new NullInfoStream(), null, null, Collections.emptyMap()); + new NullInfoStream(), null, null); if (random().nextBoolean()) { pool.enableReaderPooling(); } @@ -287,7 +287,7 @@ public class TestReaderPool extends LuceneTestCase { StandardDirectoryReader reader = (StandardDirectoryReader) DirectoryReader.open(directory); SegmentInfos segmentInfos = reader.segmentInfos.clone(); ReaderPool pool = new ReaderPool(directory, directory, segmentInfos, fieldNumbers, () -> 0l, - new NullInfoStream(), null, null, Collections.emptyMap()); + new NullInfoStream(), null, null); assertEquals(0, pool.getReadersByRam().size()); int ord = 0; diff --git a/lucene/core/src/test/org/apache/lucene/index/TestSegmentMerger.java b/lucene/core/src/test/org/apache/lucene/index/TestSegmentMerger.java index d2cd7114c34..da2a7f17234 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestSegmentMerger.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestSegmentMerger.java @@ -60,8 +60,8 @@ public class TestSegmentMerger extends LuceneTestCase { SegmentCommitInfo info1 = DocHelper.writeDoc(random(), merge1Dir, doc1); DocHelper.setupDoc(doc2); SegmentCommitInfo info2 = DocHelper.writeDoc(random(), merge2Dir, doc2); - reader1 = new SegmentReader(info1, Version.LATEST.major, false, newIOContext(random()), Collections.emptyMap()); - reader2 = new SegmentReader(info2, Version.LATEST.major, false, newIOContext(random()), Collections.emptyMap()); + reader1 = new SegmentReader(info1, Version.LATEST.major, false, newIOContext(random())); + reader2 = new SegmentReader(info2, Version.LATEST.major, false, newIOContext(random())); } @Override @@ -95,10 +95,10 @@ public class TestSegmentMerger extends LuceneTestCase { assertTrue(docsMerged == 2); //Should be able to open a new SegmentReader against the new directory SegmentReader mergedReader = new SegmentReader(new SegmentCommitInfo( - mergeState.segmentInfo, - 0, 0, -1L, -1L, -1L), - Version.LATEST.major, - false, newIOContext(random()), Collections.emptyMap()); + mergeState.segmentInfo, + 0, 0, -1L, -1L, -1L), + Version.LATEST.major, + false, newIOContext(random())); assertTrue(mergedReader != null); assertTrue(mergedReader.numDocs() == 2); Document newDoc1 = mergedReader.document(0); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestSegmentReader.java b/lucene/core/src/test/org/apache/lucene/index/TestSegmentReader.java index 76bf1a6a118..2f45488f833 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestSegmentReader.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestSegmentReader.java @@ -19,7 +19,6 @@ package org.apache.lucene.index; import java.io.IOException; import java.util.Collection; -import java.util.Collections; import java.util.HashSet; import java.util.List; @@ -44,7 +43,7 @@ public class TestSegmentReader extends LuceneTestCase { dir = newDirectory(); DocHelper.setupDoc(testDoc); SegmentCommitInfo info = DocHelper.writeDoc(random(), dir, testDoc); - reader = new SegmentReader(info, Version.LATEST.major, false, IOContext.READ, Collections.emptyMap()); + reader = new SegmentReader(info, Version.LATEST.major, false, IOContext.READ); } @Override diff --git a/lucene/core/src/test/org/apache/lucene/index/TestSegmentTermDocs.java b/lucene/core/src/test/org/apache/lucene/index/TestSegmentTermDocs.java index 9fa39c0dfff..99853f7ea35 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestSegmentTermDocs.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestSegmentTermDocs.java @@ -18,7 +18,6 @@ package org.apache.lucene.index; import java.io.IOException; -import java.util.Collections; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; @@ -55,7 +54,7 @@ public class TestSegmentTermDocs extends LuceneTestCase { public void testTermDocs() throws IOException { //After adding the document, we should be able to read it back in - SegmentReader reader = new SegmentReader(info, Version.LATEST.major, false, newIOContext(random()), Collections.emptyMap()); + SegmentReader reader = new SegmentReader(info, Version.LATEST.major, false, newIOContext(random())); assertTrue(reader != null); TermsEnum terms = reader.terms(DocHelper.TEXT_FIELD_2_KEY).iterator(); @@ -73,7 +72,7 @@ public class TestSegmentTermDocs extends LuceneTestCase { public void testBadSeek() throws IOException { { //After adding the document, we should be able to read it back in - SegmentReader reader = new SegmentReader(info, Version.LATEST.major, false, newIOContext(random()), Collections.emptyMap()); + SegmentReader reader = new SegmentReader(info, Version.LATEST.major, false, newIOContext(random())); assertTrue(reader != null); PostingsEnum termDocs = TestUtil.docs(random(), reader, "textField2", @@ -86,7 +85,7 @@ public class TestSegmentTermDocs extends LuceneTestCase { } { //After adding the document, we should be able to read it back in - SegmentReader reader = new SegmentReader(info, Version.LATEST.major, false, newIOContext(random()), Collections.emptyMap()); + SegmentReader reader = new SegmentReader(info, Version.LATEST.major, false, newIOContext(random())); assertTrue(reader != null); PostingsEnum termDocs = TestUtil.docs(random(), reader, "junk", diff --git a/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/SegmentInfosSearcherManager.java b/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/SegmentInfosSearcherManager.java index 3bb8e91b84a..d18ee1029d9 100644 --- a/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/SegmentInfosSearcherManager.java +++ b/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/SegmentInfosSearcherManager.java @@ -19,7 +19,6 @@ package org.apache.lucene.replicator.nrt; import java.io.IOException; import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; @@ -56,7 +55,7 @@ class SegmentInfosSearcherManager extends ReferenceManager { this.searcherFactory = searcherFactory; currentInfos = infosIn; node.message("SegmentInfosSearcherManager.init: use incoming infos=" + infosIn.toString()); - current = SearcherManager.getSearcher(searcherFactory, StandardDirectoryReader.open(dir, currentInfos, null, Collections.emptyMap()), null); + current = SearcherManager.getSearcher(searcherFactory, StandardDirectoryReader.open(dir, currentInfos, null), null); addReaderClosedListener(current.getIndexReader()); } @@ -105,7 +104,7 @@ class SegmentInfosSearcherManager extends ReferenceManager { } // Open a new reader, sharing any common segment readers with the old one: - DirectoryReader r = StandardDirectoryReader.open(dir, currentInfos, subs, Collections.emptyMap()); + DirectoryReader r = StandardDirectoryReader.open(dir, currentInfos, subs); addReaderClosedListener(r); node.message("refreshed to version=" + currentInfos.getVersion() + " r=" + r); return SearcherManager.getSearcher(searcherFactory, r, old.getIndexReader()); diff --git a/lucene/test-framework/src/java/org/apache/lucene/codecs/cheapbastard/CheapBastardCodec.java b/lucene/test-framework/src/java/org/apache/lucene/codecs/cheapbastard/CheapBastardCodec.java index 325d3123c90..065b0a0c3d2 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/codecs/cheapbastard/CheapBastardCodec.java +++ b/lucene/test-framework/src/java/org/apache/lucene/codecs/cheapbastard/CheapBastardCodec.java @@ -18,7 +18,6 @@ package org.apache.lucene.codecs.cheapbastard; import org.apache.lucene.codecs.FilterCodec; import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.codecs.blocktree.BlockTreeTermsReader; import org.apache.lucene.util.TestUtil; /** Codec that tries to use as little ram as possible because he spent all his money on beer */ @@ -26,7 +25,7 @@ import org.apache.lucene.util.TestUtil; // but if we named it "LowMemory" in codecs/ package, it would be irresistible like optimize()! public class CheapBastardCodec extends FilterCodec { - private final PostingsFormat postings = TestUtil.getDefaultPostingsFormat(100, 200, BlockTreeTermsReader.FSTLoadMode.OFF_HEAP); + private final PostingsFormat postings = TestUtil.getDefaultPostingsFormat(100, 200); public CheapBastardCodec() { super("CheapBastard", TestUtil.getDefaultCodec()); diff --git a/lucene/test-framework/src/java/org/apache/lucene/codecs/mockrandom/MockRandomPostingsFormat.java b/lucene/test-framework/src/java/org/apache/lucene/codecs/mockrandom/MockRandomPostingsFormat.java index 690839feefd..939e8e7c1d1 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/codecs/mockrandom/MockRandomPostingsFormat.java +++ b/lucene/test-framework/src/java/org/apache/lucene/codecs/mockrandom/MockRandomPostingsFormat.java @@ -19,7 +19,6 @@ package org.apache.lucene.codecs.mockrandom; import java.io.IOException; import java.util.Random; -import com.carrotsearch.randomizedtesting.generators.RandomPicks; import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.codecs.FieldsConsumer; import org.apache.lucene.codecs.FieldsProducer; @@ -294,7 +293,7 @@ public final class MockRandomPostingsFormat extends PostingsFormat { boolean success = false; try { - fields = new BlockTreeTermsReader(postingsReader, state, RandomPicks.randomFrom(random, BlockTreeTermsReader.FSTLoadMode.values())); + fields = new BlockTreeTermsReader(postingsReader, state); success = true; } finally { if (!success) { diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java index 818d82bfe21..e5f7cd49701 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java +++ b/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java @@ -362,7 +362,7 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase { segmentInfo, fieldInfos, null, new IOContext(new FlushInfo(1, 20))); - SegmentReadState readState = new SegmentReadState(dir, segmentInfo, fieldInfos, false, IOContext.READ, Collections.emptyMap()); + SegmentReadState readState = new SegmentReadState(dir, segmentInfo, fieldInfos, false, IOContext.READ); // PostingsFormat NormsProducer fakeNorms = new NormsProducer() { diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/RandomCodec.java b/lucene/test-framework/src/java/org/apache/lucene/index/RandomCodec.java index de1e64edf6a..7c158a2f28d 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/index/RandomCodec.java +++ b/lucene/test-framework/src/java/org/apache/lucene/index/RandomCodec.java @@ -26,7 +26,6 @@ import java.util.Map; import java.util.Random; import java.util.Set; -import com.carrotsearch.randomizedtesting.generators.RandomPicks; import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.PointsFormat; import org.apache.lucene.codecs.PointsReader; @@ -39,7 +38,6 @@ import org.apache.lucene.codecs.asserting.AssertingPostingsFormat; import org.apache.lucene.codecs.blockterms.LuceneFixedGap; import org.apache.lucene.codecs.blockterms.LuceneVarGapDocFreqInterval; import org.apache.lucene.codecs.blockterms.LuceneVarGapFixedInterval; -import org.apache.lucene.codecs.blocktree.BlockTreeTermsReader; import org.apache.lucene.codecs.blocktreeords.BlockTreeOrdsPostingsFormat; import org.apache.lucene.codecs.bloom.TestBloomFilteredLucenePostings; import org.apache.lucene.codecs.lucene60.Lucene60PointsReader; @@ -187,7 +185,7 @@ public class RandomCodec extends AssertingCodec { bkdSplitRandomSeed = random.nextInt(); add(avoidCodecs, - TestUtil.getDefaultPostingsFormat(minItemsPerBlock, maxItemsPerBlock, RandomPicks.randomFrom(random, BlockTreeTermsReader.FSTLoadMode.values())), + TestUtil.getDefaultPostingsFormat(minItemsPerBlock, maxItemsPerBlock), new FSTPostingsFormat(), new DirectPostingsFormat(LuceneTestCase.rarely(random) ? 1 : (LuceneTestCase.rarely(random) ? Integer.MAX_VALUE : maxItemsPerBlock), LuceneTestCase.rarely(random) ? 1 : (LuceneTestCase.rarely(random) ? Integer.MAX_VALUE : lowFreqCutoff)), diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/RandomPostingsTester.java b/lucene/test-framework/src/java/org/apache/lucene/index/RandomPostingsTester.java index 9d70fa1e42b..1254842fa2e 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/index/RandomPostingsTester.java +++ b/lucene/test-framework/src/java/org/apache/lucene/index/RandomPostingsTester.java @@ -748,7 +748,7 @@ public class RandomPostingsTester { currentFieldInfos = newFieldInfos; - SegmentReadState readState = new SegmentReadState(dir, segmentInfo, newFieldInfos, false, IOContext.READ, Collections.emptyMap()); + SegmentReadState readState = new SegmentReadState(dir, segmentInfo, newFieldInfos, false, IOContext.READ); return codec.postingsFormat().fieldsProducer(readState); } diff --git a/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java b/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java index 9fef84ea9ad..f0697f95d49 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java +++ b/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java @@ -51,7 +51,6 @@ import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.codecs.asserting.AssertingCodec; import org.apache.lucene.codecs.blockterms.LuceneFixedGap; -import org.apache.lucene.codecs.blocktree.BlockTreeTermsReader; import org.apache.lucene.codecs.blocktreeords.BlockTreeOrdsPostingsFormat; import org.apache.lucene.codecs.lucene80.Lucene80DocValuesFormat; import org.apache.lucene.codecs.lucene84.Lucene84Codec; @@ -934,8 +933,8 @@ public final class TestUtil { * Returns the actual default postings format (e.g. LuceneMNPostingsFormat for this version of Lucene. * @lucene.internal this may disappear at any time */ - public static PostingsFormat getDefaultPostingsFormat(int minItemsPerBlock, int maxItemsPerBlock, BlockTreeTermsReader.FSTLoadMode fstLoadMode) { - return new Lucene84PostingsFormat(minItemsPerBlock, maxItemsPerBlock, fstLoadMode); + public static PostingsFormat getDefaultPostingsFormat(int minItemsPerBlock, int maxItemsPerBlock) { + return new Lucene84PostingsFormat(minItemsPerBlock, maxItemsPerBlock); } /** Returns a random postings format that supports term ordinals */