LUCENE-10420: Move functional interfaces in IOUtils to top-level interfaces (#673)

Co-authored-by: Uwe Schindler <uschindler@apache.org>
This commit is contained in:
Tomoko Uchida 2022-02-15 00:12:28 +09:00 committed by GitHub
parent 8aa4763070
commit db8fcb84bb
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 113 additions and 40 deletions

View File

@ -86,6 +86,9 @@ API Changes
DrillSideways#search(Query, CollectorManager). This reflects the change (LUCENE-10002) being made in
IndexSearcher#search that trends towards using CollectorManagers over Collectors. (Gautam Worah)
* LUCENE-10420: Move functional interfaces in IOUtils to top-level interfaces.
(David Smiley, Uwe Schindler, Dawid Weiss, Tomoko Uchida)
New Features
---------------------

View File

@ -16,8 +16,6 @@
*/
package org.apache.lucene.analysis.ja.dict;
import static org.apache.lucene.util.IOUtils.IOSupplier;
import java.io.BufferedInputStream;
import java.io.EOFException;
import java.io.IOException;
@ -28,6 +26,7 @@ import java.nio.channels.ReadableByteChannel;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.store.DataInput;
import org.apache.lucene.store.InputStreamDataInput;
import org.apache.lucene.util.IOSupplier;
import org.apache.lucene.util.IntsRef;
/** Base class for a binary-encoded in-memory dictionary. */

View File

@ -16,8 +16,6 @@
*/
package org.apache.lucene.analysis.ja.dict;
import static org.apache.lucene.util.IOUtils.IOSupplier;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
@ -27,6 +25,7 @@ import java.nio.file.Path;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.store.DataInput;
import org.apache.lucene.store.InputStreamDataInput;
import org.apache.lucene.util.IOSupplier;
import org.apache.lucene.util.IOUtils;
/** n-gram connection cost data */

View File

@ -16,8 +16,6 @@
*/
package org.apache.lucene.analysis.ja.dict;
import static org.apache.lucene.util.IOUtils.IOSupplier;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
@ -25,6 +23,7 @@ import java.nio.file.Files;
import java.nio.file.Path;
import org.apache.lucene.store.DataInput;
import org.apache.lucene.store.InputStreamDataInput;
import org.apache.lucene.util.IOSupplier;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.fst.FST;
import org.apache.lucene.util.fst.PositiveIntOutputs;

View File

@ -29,6 +29,7 @@ import java.util.concurrent.atomic.AtomicLong;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOConsumer;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.InfoStream;
@ -280,16 +281,14 @@ final class BufferedUpdatesStream implements Accountable {
final ReadersAndUpdates rld;
final SegmentReader reader;
final int startDelCount;
private final IOUtils.IOConsumer<ReadersAndUpdates> onClose;
private final IOConsumer<ReadersAndUpdates> onClose;
TermsEnum termsEnum;
PostingsEnum postingsEnum;
BytesRef term;
SegmentState(
ReadersAndUpdates rld,
IOUtils.IOConsumer<ReadersAndUpdates> onClose,
SegmentCommitInfo info)
ReadersAndUpdates rld, IOConsumer<ReadersAndUpdates> onClose, SegmentCommitInfo info)
throws IOException {
this.rld = rld;
reader = rld.getReader(IOContext.READ);

View File

@ -33,6 +33,7 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.IOConsumer;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.InfoStream;
@ -180,8 +181,7 @@ final class DocumentsWriter implements Closeable, Accountable {
return false;
}
void purgeFlushTickets(
boolean forced, IOUtils.IOConsumer<DocumentsWriterFlushQueue.FlushTicket> consumer)
void purgeFlushTickets(boolean forced, IOConsumer<DocumentsWriterFlushQueue.FlushTicket> consumer)
throws IOException {
if (forced) {
ticketQueue.forcePurge(consumer);
@ -608,7 +608,7 @@ final class DocumentsWriter implements Closeable, Accountable {
* is called. The caller must ensure that the purge happens without an index writer lock being
* held.
*
* @see DocumentsWriter#purgeFlushTickets(boolean, IOUtils.IOConsumer)
* @see DocumentsWriter#purgeFlushTickets(boolean, IOConsumer)
*/
void onTicketBacklog();
}

View File

@ -22,7 +22,7 @@ import java.util.Queue;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.lucene.index.DocumentsWriterPerThread.FlushedSegment;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.IOConsumer;
/** @lucene.internal */
final class DocumentsWriterFlushQueue {
@ -97,7 +97,7 @@ final class DocumentsWriterFlushQueue {
return ticketCount.get() != 0;
}
private void innerPurge(IOUtils.IOConsumer<FlushTicket> consumer) throws IOException {
private void innerPurge(IOConsumer<FlushTicket> consumer) throws IOException {
assert purgeLock.isHeldByCurrentThread();
while (true) {
final FlushTicket head;
@ -131,7 +131,7 @@ final class DocumentsWriterFlushQueue {
}
}
void forcePurge(IOUtils.IOConsumer<FlushTicket> consumer) throws IOException {
void forcePurge(IOConsumer<FlushTicket> consumer) throws IOException {
assert !Thread.holdsLock(this);
purgeLock.lock();
try {
@ -141,7 +141,7 @@ final class DocumentsWriterFlushQueue {
}
}
void tryPurge(IOUtils.IOConsumer<FlushTicket> consumer) throws IOException {
void tryPurge(IOConsumer<FlushTicket> consumer) throws IOException {
assert !Thread.holdsLock(this);
if (purgeLock.tryLock()) {
try {

View File

@ -80,6 +80,8 @@ import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.Constants;
import org.apache.lucene.util.Counter;
import org.apache.lucene.util.IOConsumer;
import org.apache.lucene.util.IOFunction;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.InfoStream;
import org.apache.lucene.util.StringHelper;
@ -525,7 +527,7 @@ public class IndexWriter
final Map<String, SegmentReader> openedReadOnlyClones = new HashMap<>();
// this function is used to control which SR are opened in order to keep track of them
// and to reuse them in the case we wait for merges in this getReader call.
IOUtils.IOFunction<SegmentCommitInfo, SegmentReader> readerFactory =
IOFunction<SegmentCommitInfo, SegmentReader> readerFactory =
sci -> {
final ReadersAndUpdates rld = getPooledInstance(sci, true);
try {
@ -3569,7 +3571,7 @@ public class IndexWriter
SegmentInfos mergingSegmentInfos,
BooleanSupplier stopCollectingMergeResults,
MergeTrigger trigger,
IOUtils.IOConsumer<SegmentCommitInfo> mergeFinished)
IOConsumer<SegmentCommitInfo> mergeFinished)
throws IOException {
assert Thread.holdsLock(this);
assert trigger == MergeTrigger.GET_READER || trigger == MergeTrigger.COMMIT
@ -3664,8 +3666,7 @@ public class IndexWriter
@Override
void initMergeReaders(
IOUtils.IOFunction<SegmentCommitInfo, MergePolicy.MergeReader>
readerFactory)
IOFunction<SegmentCommitInfo, MergePolicy.MergeReader> readerFactory)
throws IOException {
if (onlyOnce.compareAndSet(false, true)) {
// we do this only once below to pull readers as point in time readers
@ -5545,7 +5546,7 @@ public class IndexWriter
TrackingDirectoryWrapper directory,
final SegmentInfo info,
IOContext context,
IOUtils.IOConsumer<Collection<String>> deleteFiles)
IOConsumer<Collection<String>> deleteFiles)
throws IOException {
// maybe this check is not needed, but why take the risk?

View File

@ -38,6 +38,8 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.MergeInfo;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.IOConsumer;
import org.apache.lucene.util.IOFunction;
import org.apache.lucene.util.IOSupplier;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.InfoStream;
@ -247,7 +249,7 @@ public abstract class MergePolicy {
/** Closes this merge and releases all merge readers */
final void close(
boolean success, boolean segmentDropped, IOUtils.IOConsumer<MergeReader> readerConsumer)
boolean success, boolean segmentDropped, IOConsumer<MergeReader> readerConsumer)
throws IOException {
// this method is final to ensure we never miss a super call to cleanup and finish the merge
if (mergeCompleted.complete(success) == false) {
@ -406,7 +408,7 @@ public abstract class MergePolicy {
void onMergeComplete() throws IOException {}
/** Sets the merge readers for this merge. */
void initMergeReaders(IOUtils.IOFunction<SegmentCommitInfo, MergeReader> readerFactory)
void initMergeReaders(IOFunction<SegmentCommitInfo, MergeReader> readerFactory)
throws IOException {
assert mergeReaders.isEmpty() : "merge readers must be empty";
assert mergeCompleted.isDone() == false : "merge is already done";

View File

@ -32,6 +32,7 @@ import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.IOFunction;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.Version;
@ -111,7 +112,7 @@ public final class StandardDirectoryReader extends DirectoryReader {
/** Used by near real-time search */
static StandardDirectoryReader open(
IndexWriter writer,
IOUtils.IOFunction<SegmentCommitInfo, SegmentReader> readerFunction,
IOFunction<SegmentCommitInfo, SegmentReader> readerFunction,
SegmentInfos infos,
boolean applyAllDeletes,
boolean writeAllDeletes)

View File

@ -0,0 +1,38 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.util;
import java.io.IOException;
/**
* An IO operation with a single input that may throw an IOException.
*
* @see java.util.function.Consumer
* @param <T> the consumer's input type.
*/
@FunctionalInterface
@SuppressWarnings("removal")
public interface IOConsumer<T> extends IOUtils.IOConsumer<T> {
/**
* Performs this operation on the given argument.
*
* @param input the input argument
* @throws IOException if producing the result throws an {@link IOException}
*/
@Override
void accept(T input) throws IOException;
}

View File

@ -0,0 +1,40 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.util;
import java.io.IOException;
/**
* A Function that may throw an IOException
*
* @see java.util.function.Function
* @param <T> the type of the input to the function
* @param <R> the type of the result of the function
*/
@FunctionalInterface
@SuppressWarnings("removal")
public interface IOFunction<T, R> extends IOUtils.IOFunction<T, R> {
/**
* Applies this function to the given argument.
*
* @param t the function argument
* @return the function result
* @throws IOException if producing the result throws an {@link IOException}
*/
@Override
R apply(T t) throws IOException;
}

View File

@ -510,8 +510,10 @@ public final class IOUtils {
* An IO operation with a single input.
*
* @see java.util.function.Consumer
* @deprecated was replaced by {@link org.apache.lucene.util.IOConsumer}.
*/
@FunctionalInterface
@Deprecated(forRemoval = true, since = "9.1")
public interface IOConsumer<T> {
/** Performs this operation on the given argument. */
void accept(T input) throws IOException;
@ -521,22 +523,12 @@ public final class IOUtils {
* A Function that may throw an IOException
*
* @see java.util.function.Function
* @deprecated was replaced by {@link org.apache.lucene.util.IOFunction}.
*/
@FunctionalInterface
@Deprecated(forRemoval = true, since = "9.1")
public interface IOFunction<T, R> {
/** Applies this function to the given argument. */
R apply(T t) throws IOException;
}
/**
* A resource supplier function that may throw an IOException.
*
* <p>Note that this would open a resource such as a File. Consumers should make sure to close the
* resource (e.g., use try-with-resources)
*
* @see java.util.function.Supplier
*/
@FunctionalInterface
public interface IOSupplier<T> {
T get() throws IOException;
}
}

View File

@ -29,7 +29,7 @@ import java.util.List;
import java.util.Random;
import org.apache.lucene.tests.util.LuceneTestCase;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.IOUtils.IOConsumer;
import org.apache.lucene.util.IOConsumer;
import org.junit.Test;
public abstract class BaseDataOutputTestCase<T extends DataOutput> extends LuceneTestCase {

View File

@ -28,7 +28,7 @@ import java.util.ArrayList;
import java.util.List;
import org.apache.lucene.tests.util.LuceneTestCase;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.IOUtils.IOConsumer;
import org.apache.lucene.util.IOConsumer;
import org.junit.Test;
public final class TestByteBuffersDataInput extends RandomizedTest {