Remove IOContext from Directory#openChecksumInput (#12027)

This commit is contained in:
Zach Chen 2022-12-26 11:45:42 -08:00 committed by GitHub
parent c9401bf064
commit 008a0d4206
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
49 changed files with 62 additions and 78 deletions

View File

@ -37,6 +37,8 @@ API Changes
* GITHUB#11840: Query rewrite now takes an IndexSearcher instead of IndexReader to enable concurrent
rewriting. (Patrick Zhai)
* GITHUB#11933: Remove IOContext from Directory#openChecksumInput. (Zach Chen)
* GITHUB#11814: Support deletions in IndexRearranger. (Stefan Vodita)
New Features

View File

@ -74,6 +74,12 @@ determine the number of valid ordinals for the currently-positioned document up-
illegal to call `SortedSetDocValues#nextOrd()` more than `SortedSetDocValues#docValueCount()` times
for the currently-positioned document (doing so will result in undefined behavior).
### IOContext removed from Directory#openChecksumInput (GITHUB-12027)
`Directory#openChecksumInput` no longer takes in `IOContext` as a parameter, and will always use value
`IOContext.READONCE` for opening internally, as that's the only valid usage pattern for checksum input.
Callers should remove the parameter when calling this method.
## Migration from Lucene 9.0 to Lucene 9.1
### Test framework package migration and module (LUCENE-10301)

View File

@ -1136,7 +1136,7 @@ public class Dictionary {
new WordStorage.Builder(wordCount, hasCustomMorphData, flags, allNonSuggestibleFlags());
try (ByteSequencesReader reader =
new ByteSequencesReader(tempDir.openChecksumInput(sorted, IOContext.READONCE), sorted)) {
new ByteSequencesReader(tempDir.openChecksumInput(sorted), sorted)) {
// TODO: the flags themselves can be double-chars (long) or also numeric
// either way the trick is to encode them as char... but they must be parsed differently

View File

@ -122,7 +122,7 @@ public final class Lucene90FieldInfosFormat extends FieldInfosFormat {
throws IOException {
final String fileName =
IndexFileNames.segmentFileName(segmentInfo.name, segmentSuffix, EXTENSION);
try (ChecksumIndexInput input = directory.openChecksumInput(fileName, context)) {
try (ChecksumIndexInput input = directory.openChecksumInput(fileName)) {
Throwable priorE = null;
FieldInfo[] infos = null;
try {

View File

@ -93,7 +93,7 @@ public final class Lucene90HnswVectorsReader extends KnnVectorsReader {
IndexFileNames.segmentFileName(
state.segmentInfo.name, state.segmentSuffix, Lucene90HnswVectorsFormat.META_EXTENSION);
int versionMeta = -1;
try (ChecksumIndexInput meta = state.directory.openChecksumInput(metaFileName, state.context)) {
try (ChecksumIndexInput meta = state.directory.openChecksumInput(metaFileName)) {
Throwable priorE = null;
try {
versionMeta =

View File

@ -90,7 +90,7 @@ public final class Lucene91HnswVectorsReader extends KnnVectorsReader {
IndexFileNames.segmentFileName(
state.segmentInfo.name, state.segmentSuffix, Lucene91HnswVectorsFormat.META_EXTENSION);
int versionMeta = -1;
try (ChecksumIndexInput meta = state.directory.openChecksumInput(metaFileName, state.context)) {
try (ChecksumIndexInput meta = state.directory.openChecksumInput(metaFileName)) {
Throwable priorE = null;
try {
versionMeta =

View File

@ -88,7 +88,7 @@ public final class Lucene92HnswVectorsReader extends KnnVectorsReader {
IndexFileNames.segmentFileName(
state.segmentInfo.name, state.segmentSuffix, Lucene92HnswVectorsFormat.META_EXTENSION);
int versionMeta = -1;
try (ChecksumIndexInput meta = state.directory.openChecksumInput(metaFileName, state.context)) {
try (ChecksumIndexInput meta = state.directory.openChecksumInput(metaFileName)) {
Throwable priorE = null;
try {
versionMeta =

View File

@ -38,7 +38,6 @@ import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TotalHits;
import org.apache.lucene.store.ChecksumIndexInput;
import org.apache.lucene.store.DataInput;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
@ -89,8 +88,7 @@ public final class Lucene94HnswVectorsReader extends KnnVectorsReader {
IndexFileNames.segmentFileName(
state.segmentInfo.name, state.segmentSuffix, Lucene94HnswVectorsFormat.META_EXTENSION);
int versionMeta = -1;
try (ChecksumIndexInput meta =
state.directory.openChecksumInput(metaFileName, IOContext.READONCE)) {
try (ChecksumIndexInput meta = state.directory.openChecksumInput(metaFileName)) {
Throwable priorE = null;
try {
versionMeta =

View File

@ -45,7 +45,7 @@ public final class EndiannessReverserUtil {
/** Open a checksum index input */
public static ChecksumIndexInput openChecksumInput(
Directory directory, String name, IOContext context) throws IOException {
return new EndiannessReverserChecksumIndexInput(directory.openChecksumInput(name, context));
return new EndiannessReverserChecksumIndexInput(directory.openChecksumInput(name));
}
/** Open an index output */

View File

@ -32,7 +32,6 @@ import org.apache.lucene.store.ByteBuffersDataOutput;
import org.apache.lucene.store.ChecksumIndexInput;
import org.apache.lucene.store.DataOutput;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.TrackingDirectoryWrapper;
import org.apache.lucene.util.ArrayUtil;
@ -1577,7 +1576,7 @@ public class BKDWriter60 implements Closeable {
// We are reading from a temp file; go verify the checksum:
String tempFileName = ((OfflinePointWriter) writer).name;
if (tempDir.getCreatedFiles().contains(tempFileName)) {
try (ChecksumIndexInput in = tempDir.openChecksumInput(tempFileName, IOContext.READONCE)) {
try (ChecksumIndexInput in = tempDir.openChecksumInput(tempFileName)) {
CodecUtil.checkFooter(in, priorException);
}
}

View File

@ -29,7 +29,7 @@ public class TestEndiannessReverserCheckSumIndexInput extends EndiannessReverser
@Override
protected IndexInput getEndiannessReverserInput(Directory dir, String name, IOContext context)
throws IOException {
return new EndiannessReverserChecksumIndexInput(dir.openChecksumInput(name, context));
return new EndiannessReverserChecksumIndexInput(dir.openChecksumInput(name));
}
@Override

View File

@ -152,7 +152,7 @@ public final class BloomFilteringPostingsFormat extends PostingsFormat {
ChecksumIndexInput bloomIn = null;
boolean success = false;
try {
bloomIn = state.directory.openChecksumInput(bloomFileName, state.context);
bloomIn = state.directory.openChecksumInput(bloomFileName);
CodecUtil.checkIndexHeader(
bloomIn,
BLOOM_CODEC_NAME,

View File

@ -45,7 +45,6 @@ import org.apache.lucene.index.PointValues.IntersectVisitor;
import org.apache.lucene.index.PointValues.Relation;
import org.apache.lucene.store.ChecksumIndexInput;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.TrackingDirectoryWrapper;
import org.apache.lucene.util.ArrayUtil;
@ -841,7 +840,7 @@ final class SimpleTextBKDWriter implements Closeable {
if (writer instanceof OfflinePointWriter) {
// We are reading from a temp file; go verify the checksum:
String tempFileName = ((OfflinePointWriter) writer).name;
try (ChecksumIndexInput in = tempDir.openChecksumInput(tempFileName, IOContext.READONCE)) {
try (ChecksumIndexInput in = tempDir.openChecksumInput(tempFileName)) {
CodecUtil.checkFooter(in, priorException);
}
}

View File

@ -79,7 +79,7 @@ public class SimpleTextFieldInfosFormat extends FieldInfosFormat {
throws IOException {
final String fileName =
IndexFileNames.segmentFileName(segmentInfo.name, segmentSuffix, FIELD_INFOS_EXTENSION);
ChecksumIndexInput input = directory.openChecksumInput(fileName, iocontext);
ChecksumIndexInput input = directory.openChecksumInput(fileName);
BytesRefBuilder scratch = new BytesRefBuilder();
boolean success = false;

View File

@ -82,8 +82,7 @@ public class SimpleTextKnnVectorsReader extends KnnVectorsReader {
SimpleTextKnnVectorsFormat.VECTOR_EXTENSION);
boolean success = false;
try (ChecksumIndexInput in =
readState.directory.openChecksumInput(metaFileName, IOContext.DEFAULT)) {
try (ChecksumIndexInput in = readState.directory.openChecksumInput(metaFileName)) {
int fieldNumber = readInt(in, FIELD_NUMBER);
while (fieldNumber != -1) {
String fieldName = readString(in, FIELD_NAME);

View File

@ -61,7 +61,7 @@ public class SimpleTextLiveDocsFormat extends LiveDocsFormat {
ChecksumIndexInput in = null;
boolean success = false;
try {
in = dir.openChecksumInput(fileName, context);
in = dir.openChecksumInput(fileName);
SimpleTextUtil.readLine(in, scratch);
assert StringHelper.startsWith(scratch.get(), SIZE);

View File

@ -70,8 +70,7 @@ class SimpleTextPointsReader extends PointsReader {
readState.segmentInfo.name,
readState.segmentSuffix,
SimpleTextPointsFormat.POINT_INDEX_EXTENSION);
try (ChecksumIndexInput in =
readState.directory.openChecksumInput(indexFileName, IOContext.DEFAULT)) {
try (ChecksumIndexInput in = readState.directory.openChecksumInput(indexFileName)) {
readLine(in);
int count = parseInt(FIELD_COUNT);
for (int i = 0; i < count; i++) {

View File

@ -78,7 +78,7 @@ public class SimpleTextSegmentInfoFormat extends SegmentInfoFormat {
BytesRefBuilder scratch = new BytesRefBuilder();
String segFileName =
IndexFileNames.segmentFileName(segmentName, "", SimpleTextSegmentInfoFormat.SI_EXTENSION);
try (ChecksumIndexInput input = directory.openChecksumInput(segFileName, context)) {
try (ChecksumIndexInput input = directory.openChecksumInput(segFileName)) {
SimpleTextUtil.readLine(input, scratch);
assert StringHelper.startsWith(scratch.get(), SI_VERSION);
final Version version;

View File

@ -110,7 +110,7 @@ public final class Lucene90CompoundFormat extends CompoundFormat {
// align file start offset
long startOffset = data.alignFilePointer(Long.BYTES);
// write bytes for file
try (ChecksumIndexInput in = dir.openChecksumInput(file, IOContext.READONCE)) {
try (ChecksumIndexInput in = dir.openChecksumInput(file)) {
// just copies the index header, verifying that its id matches what we expect
CodecUtil.verifyAndCopyIndexHeader(in, data, si.getId());

View File

@ -106,8 +106,7 @@ final class Lucene90CompoundReader extends CompoundDirectory {
private Map<String, FileEntry> readEntries(
byte[] segmentID, Directory dir, String entriesFileName) throws IOException {
Map<String, FileEntry> mapping = null;
try (ChecksumIndexInput entriesStream =
dir.openChecksumInput(entriesFileName, IOContext.READONCE)) {
try (ChecksumIndexInput entriesStream = dir.openChecksumInput(entriesFileName)) {
Throwable priorE = null;
try {
version =

View File

@ -42,7 +42,6 @@ import org.apache.lucene.index.TermsEnum.SeekStatus;
import org.apache.lucene.store.ByteArrayDataInput;
import org.apache.lucene.store.ChecksumIndexInput;
import org.apache.lucene.store.DataInput;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.RandomAccessInput;
import org.apache.lucene.util.BytesRef;
@ -83,7 +82,7 @@ final class Lucene90DocValuesProducer extends DocValuesProducer {
merging = false;
// read in the entries from the metadata file.
try (ChecksumIndexInput in = state.directory.openChecksumInput(metaName, IOContext.READONCE)) {
try (ChecksumIndexInput in = state.directory.openChecksumInput(metaName)) {
Throwable priorE = null;
try {

View File

@ -68,7 +68,7 @@ public final class Lucene90LiveDocsFormat extends LiveDocsFormat {
long gen = info.getDelGen();
String name = IndexFileNames.fileNameFromGeneration(info.info.name, EXTENSION, gen);
final int length = info.info.maxDoc();
try (ChecksumIndexInput input = dir.openChecksumInput(name, context)) {
try (ChecksumIndexInput input = dir.openChecksumInput(name)) {
Throwable priorE = null;
try {
CodecUtil.checkIndexHeader(

View File

@ -32,7 +32,6 @@ import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.SegmentReadState;
import org.apache.lucene.store.ChecksumIndexInput;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.RandomAccessInput;
import org.apache.lucene.util.IOUtils;
@ -61,7 +60,7 @@ final class Lucene90NormsProducer extends NormsProducer implements Cloneable {
int version = -1;
// read in the entries from the metadata file.
try (ChecksumIndexInput in = state.directory.openChecksumInput(metaName, IOContext.READONCE)) {
try (ChecksumIndexInput in = state.directory.openChecksumInput(metaName)) {
Throwable priorE = null;
try {
version =

View File

@ -81,8 +81,7 @@ public class Lucene90PointsReader extends PointsReader {
CodecUtil.retrieveChecksum(dataIn);
long indexLength = -1, dataLength = -1;
try (ChecksumIndexInput metaIn =
readState.directory.openChecksumInput(metaFileName, IOContext.READONCE)) {
try (ChecksumIndexInput metaIn = readState.directory.openChecksumInput(metaFileName)) {
Throwable priorE = null;
try {
CodecUtil.checkIndexHeader(

View File

@ -99,7 +99,7 @@ public class Lucene90SegmentInfoFormat extends SegmentInfoFormat {
public SegmentInfo read(Directory dir, String segment, byte[] segmentID, IOContext context)
throws IOException {
final String fileName = IndexFileNames.segmentFileName(segment, "", SI_EXTENSION);
try (ChecksumIndexInput input = dir.openChecksumInput(fileName, context)) {
try (ChecksumIndexInput input = dir.openChecksumInput(fileName)) {
Throwable priorE = null;
SegmentInfo si = null;
try {

View File

@ -150,8 +150,7 @@ public final class Lucene90BlockTreeTermsReader extends FieldsProducer {
Map<String, FieldReader> fieldMap = null;
Throwable priorE = null;
long indexLength = -1, termsLength = -1;
try (ChecksumIndexInput metaIn =
state.directory.openChecksumInput(metaName, IOContext.READONCE)) {
try (ChecksumIndexInput metaIn = state.directory.openChecksumInput(metaName)) {
try {
CodecUtil.checkIndexHeader(
metaIn,

View File

@ -120,8 +120,7 @@ public final class FieldsIndexWriter implements Closeable {
metaOut.writeInt(totalChunks + 1);
metaOut.writeLong(dataOut.getFilePointer());
try (ChecksumIndexInput docsIn =
dir.openChecksumInput(docsOut.getName(), IOContext.READONCE)) {
try (ChecksumIndexInput docsIn = dir.openChecksumInput(docsOut.getName())) {
CodecUtil.checkHeader(docsIn, codecName + "Docs", VERSION_CURRENT, VERSION_CURRENT);
Throwable priorE = null;
try {
@ -147,8 +146,7 @@ public final class FieldsIndexWriter implements Closeable {
docsOut = null;
metaOut.writeLong(dataOut.getFilePointer());
try (ChecksumIndexInput filePointersIn =
dir.openChecksumInput(filePointersOut.getName(), IOContext.READONCE)) {
try (ChecksumIndexInput filePointersIn = dir.openChecksumInput(filePointersOut.getName())) {
CodecUtil.checkHeader(
filePointersIn, codecName + "FilePointers", VERSION_CURRENT, VERSION_CURRENT);
Throwable priorE = null;

View File

@ -137,7 +137,7 @@ public final class Lucene90CompressingStoredFieldsReader extends StoredFieldsRea
final String metaStreamFN =
IndexFileNames.segmentFileName(segment, segmentSuffix, META_EXTENSION);
metaIn = d.openChecksumInput(metaStreamFN, IOContext.READONCE);
metaIn = d.openChecksumInput(metaStreamFN);
CodecUtil.checkIndexHeader(
metaIn,
INDEX_CODEC_NAME + "Meta",

View File

@ -143,7 +143,7 @@ public final class Lucene90CompressingTermVectorsReader extends TermVectorsReade
final String metaStreamFN =
IndexFileNames.segmentFileName(segment, segmentSuffix, VECTORS_META_EXTENSION);
metaIn = d.openChecksumInput(metaStreamFN, IOContext.READONCE);
metaIn = d.openChecksumInput(metaStreamFN);
CodecUtil.checkIndexHeader(
metaIn,
VECTORS_INDEX_CODEC_NAME + "Meta",

View File

@ -127,7 +127,7 @@ public final class Lucene94FieldInfosFormat extends FieldInfosFormat {
throws IOException {
final String fileName =
IndexFileNames.segmentFileName(segmentInfo.name, segmentSuffix, EXTENSION);
try (ChecksumIndexInput input = directory.openChecksumInput(fileName, context)) {
try (ChecksumIndexInput input = directory.openChecksumInput(fileName)) {
Throwable priorE = null;
FieldInfo[] infos = null;
try {

View File

@ -38,7 +38,6 @@ import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TotalHits;
import org.apache.lucene.store.ChecksumIndexInput;
import org.apache.lucene.store.DataInput;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.RandomAccessInput;
import org.apache.lucene.util.Accountable;
@ -96,8 +95,7 @@ public final class Lucene95HnswVectorsReader extends KnnVectorsReader {
IndexFileNames.segmentFileName(
state.segmentInfo.name, state.segmentSuffix, Lucene95HnswVectorsFormat.META_EXTENSION);
int versionMeta = -1;
try (ChecksumIndexInput meta =
state.directory.openChecksumInput(metaFileName, IOContext.READONCE)) {
try (ChecksumIndexInput meta = state.directory.openChecksumInput(metaFileName)) {
Throwable priorE = null;
try {
versionMeta =

View File

@ -288,7 +288,7 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentCommitInfo
long generation = generationFromSegmentsFileName(segmentFileName);
// System.out.println(Thread.currentThread() + ": SegmentInfos.readCommit " + segmentFileName);
try (ChecksumIndexInput input = directory.openChecksumInput(segmentFileName, IOContext.READ)) {
try (ChecksumIndexInput input = directory.openChecksumInput(segmentFileName)) {
try {
return readCommit(directory, input, generation, minSupportedMajorVersion);
} catch (EOFException | NoSuchFileException | FileNotFoundException e) {

View File

@ -152,8 +152,8 @@ public abstract class Directory implements Closeable {
* @param name the name of an existing file.
* @throws IOException in case of I/O error
*/
public ChecksumIndexInput openChecksumInput(String name, IOContext context) throws IOException {
return new BufferedChecksumIndexInput(openInput(name, context));
public ChecksumIndexInput openChecksumInput(String name) throws IOException {
return new BufferedChecksumIndexInput(openInput(name, IOContext.READONCE));
}
/**

View File

@ -283,8 +283,7 @@ public class OfflineSorter {
TrackingDirectoryWrapper trackingDir = new TrackingDirectoryWrapper(dir);
boolean success = false;
try (ByteSequencesReader is =
getReader(dir.openChecksumInput(inputFileName, IOContext.READONCE), inputFileName)) {
try (ByteSequencesReader is = getReader(dir.openChecksumInput(inputFileName), inputFileName)) {
while (true) {
Partition part = readPartition(is);
if (part.count == 0) {
@ -367,7 +366,7 @@ public class OfflineSorter {
*/
private void verifyChecksum(Throwable priorException, ByteSequencesReader reader)
throws IOException {
try (ChecksumIndexInput in = dir.openChecksumInput(reader.name, IOContext.READONCE)) {
try (ChecksumIndexInput in = dir.openChecksumInput(reader.name)) {
CodecUtil.checkFooter(in, priorException);
}
}
@ -701,9 +700,7 @@ public class OfflineSorter {
// Open streams and read the top for each file
for (int i = 0; i < segmentsToMerge.size(); i++) {
Partition segment = getPartition(segmentsToMerge.get(i));
streams[i] =
getReader(
dir.openChecksumInput(segment.fileName, IOContext.READONCE), segment.fileName);
streams[i] = getReader(dir.openChecksumInput(segment.fileName), segment.fileName);
BytesRef item = null;
try {

View File

@ -33,7 +33,6 @@ import org.apache.lucene.store.ByteBuffersDataOutput;
import org.apache.lucene.store.ChecksumIndexInput;
import org.apache.lucene.store.DataOutput;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.TrackingDirectoryWrapper;
import org.apache.lucene.util.ArrayUtil;
@ -1532,7 +1531,7 @@ public class BKDWriter implements Closeable {
// We are reading from a temp file; go verify the checksum:
String tempFileName = ((OfflinePointWriter) writer).name;
if (tempDir.getCreatedFiles().contains(tempFileName)) {
try (ChecksumIndexInput in = tempDir.openChecksumInput(tempFileName, IOContext.READONCE)) {
try (ChecksumIndexInput in = tempDir.openChecksumInput(tempFileName)) {
CodecUtil.checkFooter(in, priorException);
}
}

View File

@ -85,7 +85,7 @@ public final class OfflinePointReader implements PointReader {
== tempDir.fileLength(tempFileName) - CodecUtil.footerLength()) {
// If we are going to read the entire file, e.g. because BKDWriter is now
// partitioning it, we open with checksums:
in = tempDir.openChecksumInput(tempFileName, IOContext.READONCE);
in = tempDir.openChecksumInput(tempFileName);
} else {
// Since we are going to seek somewhere in the middle of a possibly huge
// file, and not read all bytes from there, don't use ChecksumIndexInput here.

View File

@ -138,7 +138,7 @@ public class TestAllFilesDetectTruncation extends LuceneTestCase {
if (name.equals(victim) == false) {
dirCopy.copyFrom(dir, name, name, IOContext.DEFAULT);
} else {
try (ChecksumIndexInput in = dir.openChecksumInput(name, IOContext.DEFAULT)) {
try (ChecksumIndexInput in = dir.openChecksumInput(name)) {
try {
CodecUtil.checkFooter(in);
// In some rare cases, the codec footer would still appear as correct even though the

View File

@ -39,7 +39,7 @@ public class TestFilterDirectory extends BaseDirectoryTestCase {
exclude.add(
Directory.class.getMethod(
"copyFrom", Directory.class, String.class, String.class, IOContext.class));
exclude.add(Directory.class.getMethod("openChecksumInput", String.class, IOContext.class));
exclude.add(Directory.class.getMethod("openChecksumInput", String.class));
for (Method m : FilterDirectory.class.getMethods()) {
if (m.getDeclaringClass() == Directory.class) {
assertTrue("method " + m.getName() + " not overridden!", exclude.contains(m));

View File

@ -92,8 +92,7 @@ public class TestHardLinkCopyDirectoryWrapper extends BaseDirectoryTestCase {
BasicFileAttributes sourceAttr =
Files.readAttributes(dir_1.resolve("foo.bar"), BasicFileAttributes.class);
assertEquals(destAttr.fileKey(), sourceAttr.fileKey());
try (ChecksumIndexInput indexInput =
wrapper.openChecksumInput("bar.foo", IOContext.DEFAULT)) {
try (ChecksumIndexInput indexInput = wrapper.openChecksumInput("bar.foo")) {
CodecUtil.checkHeader(indexInput, "foo", 0, 0);
assertEquals("hey man, nice shot!", indexInput.readString());
CodecUtil.checkFooter(indexInput);

View File

@ -194,7 +194,7 @@ public class SortedInputIterator implements InputIterator {
tempSortedFileName = sorter.sort(tempInput.getName());
return new OfflineSorter.ByteSequencesReader(
tempDir.openChecksumInput(tempSortedFileName, IOContext.READONCE), tempSortedFileName);
tempDir.openChecksumInput(tempSortedFileName), tempSortedFileName);
}
private void close() throws IOException {

View File

@ -494,8 +494,7 @@ public class AnalyzingSuggester extends Lookup {
reader =
new OfflineSorter.ByteSequencesReader(
tempDir.openChecksumInput(tempSortedFileName, IOContext.READONCE),
tempSortedFileName);
tempDir.openChecksumInput(tempSortedFileName), tempSortedFileName);
PairOutputs<Long, BytesRef> outputs =
new PairOutputs<>(PositiveIntOutputs.getSingleton(), ByteSequenceOutputs.getSingleton());

View File

@ -74,7 +74,7 @@ final class CompletionFieldsProducer extends FieldsProducer implements Accountab
delegateFieldsProducer = null;
boolean success = false;
try (ChecksumIndexInput index = state.directory.openChecksumInput(indexFile, state.context)) {
try (ChecksumIndexInput index = state.directory.openChecksumInput(indexFile)) {
// open up dict file containing all fsts
String dictFile =
IndexFileNames.segmentFileName(

View File

@ -91,8 +91,7 @@ public class ExternalRefSorter implements BytesRefSorter, Closeable {
return new ByteSequenceIterator(
new OfflineSorter.ByteSequencesReader(
sorter.getDirectory().openChecksumInput(sortedOutput, IOContext.READONCE),
sortedOutput));
sorter.getDirectory().openChecksumInput(sortedOutput), sortedOutput));
}
/** Close the writer but leave any sorted output for iteration. */

View File

@ -202,8 +202,7 @@ public class FSTCompletionLookup extends Lookup {
reader =
new OfflineSorter.ByteSequencesReader(
tempDir.openChecksumInput(tempSortedFileName, IOContext.READONCE),
tempSortedFileName);
tempDir.openChecksumInput(tempSortedFileName), tempSortedFileName);
long line = 0;
int previousBucket = 0;
int previousScore = 0;

View File

@ -266,7 +266,7 @@ public final class MockRandomPostingsFormat extends PostingsFormat {
final String seedFileName =
IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, SEED_EXT);
final ChecksumIndexInput in = state.directory.openChecksumInput(seedFileName, state.context);
final ChecksumIndexInput in = state.directory.openChecksumInput(seedFileName);
CodecUtil.checkIndexHeader(
in, "MockRandomSeed", 0, 0, state.segmentInfo.getId(), state.segmentSuffix);
final long seed = in.readLong();

View File

@ -866,8 +866,8 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase {
}
@Override
public ChecksumIndexInput openChecksumInput(String name, IOContext context) throws IOException {
ChecksumIndexInput in = super.openChecksumInput(name, context);
public ChecksumIndexInput openChecksumInput(String name) throws IOException {
ChecksumIndexInput in = super.openChecksumInput(name);
final FixedBitSet set =
readBytes.computeIfAbsent(name, n -> new FixedBitSet(Math.toIntExact(in.length())));
if (set.length() != in.length()) {

View File

@ -567,7 +567,7 @@ public abstract class BaseDirectoryTestCase extends LuceneTestCase {
output.writeBytes(bytes, 0, bytes.length);
output.close();
ChecksumIndexInput input = dir.openChecksumInput("checksum", newIOContext(random()));
ChecksumIndexInput input = dir.openChecksumInput("checksum");
input.skipBytes(numBytes);
assertEquals(expected.getValue(), input.getChecksum());

View File

@ -1115,9 +1115,8 @@ public class MockDirectoryWrapper extends BaseDirectoryWrapper {
// full checks. we randomly exercise "raw" directories anyway. We ensure default impls are used:
@Override
public final ChecksumIndexInput openChecksumInput(String name, IOContext context)
throws IOException {
return super.openChecksumInput(name, context);
public final ChecksumIndexInput openChecksumInput(String name) throws IOException {
return super.openChecksumInput(name);
}
@Override

View File

@ -40,7 +40,7 @@ public final class RawDirectoryWrapper extends BaseDirectoryWrapper {
}
@Override
public ChecksumIndexInput openChecksumInput(String name, IOContext context) throws IOException {
return in.openChecksumInput(name, context);
public ChecksumIndexInput openChecksumInput(String name) throws IOException {
return in.openChecksumInput(name);
}
}