mirror of https://github.com/apache/lucene.git
LUCENE-9307: Remove the ability to set the buffer size dynamically on BufferedIndexInput (#1415)
This commit is contained in:
parent
1fc4a546df
commit
aa605b3c70
|
@ -54,6 +54,8 @@ API Changes
|
|||
must now also implement the default constructor (see MIGRATE.txt).
|
||||
(Uwe Schindler, Dawid Weiss)
|
||||
|
||||
* LUCENE-9307: BufferedIndexInput#setBufferSize has been removed. (Adrien Grand)
|
||||
|
||||
Improvements
|
||||
|
||||
* LUCENE-8757: When provided with an ExecutorService to run queries across
|
||||
|
|
|
@ -21,7 +21,6 @@ import java.io.Closeable;
|
|||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.apache.lucene.store.BufferedIndexInput;
|
||||
import org.apache.lucene.store.IndexInput;
|
||||
import org.apache.lucene.util.MathUtil;
|
||||
|
||||
|
@ -80,8 +79,7 @@ public abstract class MultiLevelSkipListReader implements Closeable {
|
|||
/** childPointer of last read skip entry with docId <=
|
||||
* target. */
|
||||
private long lastChildPointer;
|
||||
|
||||
private boolean inputIsBuffered;
|
||||
|
||||
private final int skipMultiplier;
|
||||
|
||||
/** Creates a {@code MultiLevelSkipListReader}. */
|
||||
|
@ -94,7 +92,6 @@ public abstract class MultiLevelSkipListReader implements Closeable {
|
|||
this.skipInterval = new int[maxSkipLevels];
|
||||
this.skipMultiplier = skipMultiplier;
|
||||
this.skipStream [0]= skipStream;
|
||||
this.inputIsBuffered = (skipStream instanceof BufferedIndexInput);
|
||||
this.skipInterval[0] = skipInterval;
|
||||
for (int i = 1; i < maxSkipLevels; i++) {
|
||||
// cache skip intervals
|
||||
|
@ -237,9 +234,6 @@ public abstract class MultiLevelSkipListReader implements Closeable {
|
|||
} else {
|
||||
// clone this stream, it is already at the start of the current level
|
||||
skipStream[i] = skipStream[0].clone();
|
||||
if (inputIsBuffered && length < BufferedIndexInput.BUFFER_SIZE) {
|
||||
((BufferedIndexInput) skipStream[i]).setBufferSize(Math.max(BufferedIndexInput.MIN_BUFFER_SIZE, (int) length));
|
||||
}
|
||||
|
||||
// move base stream beyond the current level
|
||||
skipStream[0].seek(skipStream[0].getFilePointer() + length);
|
||||
|
|
|
@ -73,29 +73,6 @@ public abstract class BufferedIndexInput extends IndexInput implements RandomAcc
|
|||
this.bufferSize = bufferSize;
|
||||
}
|
||||
|
||||
/** Change the buffer size used by this IndexInput */
|
||||
public final void setBufferSize(int newSize) {
|
||||
assert buffer == EMPTY_BYTEBUFFER || bufferSize == buffer.capacity(): "buffer=" + buffer + " bufferSize=" + bufferSize + " buffer.length=" + (buffer != null ? buffer.capacity() : 0);
|
||||
if (newSize != bufferSize) {
|
||||
checkBufferSize(newSize);
|
||||
bufferSize = newSize;
|
||||
if (buffer != EMPTY_BYTEBUFFER) {
|
||||
// Resize the existing buffer and carefully save as
|
||||
// many bytes as possible starting from the current
|
||||
// bufferPosition
|
||||
ByteBuffer newBuffer = ByteBuffer.allocate(newSize);
|
||||
assert newBuffer.order() == ByteOrder.BIG_ENDIAN;
|
||||
if (buffer.remaining() > newBuffer.capacity()) {
|
||||
buffer.limit(buffer.position() + newBuffer.capacity());
|
||||
}
|
||||
assert buffer.remaining() <= newBuffer.capacity();
|
||||
newBuffer.put(buffer);
|
||||
newBuffer.flip();
|
||||
buffer = newBuffer;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** Returns buffer size. @see #setBufferSize */
|
||||
public final int getBufferSize() {
|
||||
return bufferSize;
|
||||
|
|
|
@ -19,23 +19,8 @@ package org.apache.lucene.store;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Random;
|
||||
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.util.ArrayUtil;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
||||
|
@ -199,87 +184,4 @@ public class TestBufferedIndexInput extends LuceneTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testSetBufferSize() throws IOException {
|
||||
Path indexDir = createTempDir("testSetBufferSize");
|
||||
MockFSDirectory dir = new MockFSDirectory(indexDir, random());
|
||||
IndexWriter writer = new IndexWriter(
|
||||
dir,
|
||||
new IndexWriterConfig(new MockAnalyzer(random())).
|
||||
setOpenMode(OpenMode.CREATE).
|
||||
setMergePolicy(newLogMergePolicy(false))
|
||||
);
|
||||
for(int i=0;i<37;i++) {
|
||||
Document doc = new Document();
|
||||
doc.add(newTextField("content", "aaa bbb ccc ddd" + i, Field.Store.YES));
|
||||
doc.add(newTextField("id", "" + i, Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
}
|
||||
|
||||
dir.allIndexInputs.clear();
|
||||
|
||||
IndexReader reader = DirectoryReader.open(writer);
|
||||
Term aaa = new Term("content", "aaa");
|
||||
Term bbb = new Term("content", "bbb");
|
||||
|
||||
reader.close();
|
||||
|
||||
dir.tweakBufferSizes();
|
||||
writer.deleteDocuments(new Term("id", "0"));
|
||||
reader = DirectoryReader.open(writer);
|
||||
IndexSearcher searcher = newSearcher(reader);
|
||||
ScoreDoc[] hits = searcher.search(new TermQuery(bbb), 1000).scoreDocs;
|
||||
dir.tweakBufferSizes();
|
||||
assertEquals(36, hits.length);
|
||||
|
||||
reader.close();
|
||||
|
||||
dir.tweakBufferSizes();
|
||||
writer.deleteDocuments(new Term("id", "4"));
|
||||
reader = DirectoryReader.open(writer);
|
||||
searcher = newSearcher(reader);
|
||||
|
||||
hits = searcher.search(new TermQuery(bbb), 1000).scoreDocs;
|
||||
dir.tweakBufferSizes();
|
||||
assertEquals(35, hits.length);
|
||||
dir.tweakBufferSizes();
|
||||
hits = searcher.search(new TermQuery(new Term("id", "33")), 1000).scoreDocs;
|
||||
dir.tweakBufferSizes();
|
||||
assertEquals(1, hits.length);
|
||||
hits = searcher.search(new TermQuery(aaa), 1000).scoreDocs;
|
||||
dir.tweakBufferSizes();
|
||||
assertEquals(35, hits.length);
|
||||
writer.close();
|
||||
reader.close();
|
||||
}
|
||||
|
||||
private static class MockFSDirectory extends FilterDirectory {
|
||||
|
||||
final List<IndexInput> allIndexInputs = new ArrayList<>();
|
||||
final Random rand;
|
||||
|
||||
public MockFSDirectory(Path path, Random rand) throws IOException {
|
||||
super(new NIOFSDirectory(path));
|
||||
this.rand = rand;
|
||||
}
|
||||
|
||||
public void tweakBufferSizes() {
|
||||
//int count = 0;
|
||||
for (final IndexInput ip : allIndexInputs) {
|
||||
BufferedIndexInput bii = (BufferedIndexInput) ip;
|
||||
int bufferSize = 1024 + rand.nextInt(32768);
|
||||
bii.setBufferSize(bufferSize);
|
||||
//count++;
|
||||
}
|
||||
//System.out.println("tweak'd " + count + " buffer sizes");
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexInput openInput(String name, IOContext context) throws IOException {
|
||||
// Make random changes to buffer size
|
||||
//bufferSize = 1+Math.abs(rand.nextInt() % 10);
|
||||
IndexInput f = super.openInput(name, context);
|
||||
allIndexInputs.add(f);
|
||||
return f;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue