[Rename] ElasticsearchDirectoryReader class in server module (#176)

This commit refactors the ElasticsearchDirectoryReader class located in the
server module to OpenSearchDirectoryReader. References and usages, along with
method names, throughout the rest of the codebase are fully refactored.

Signed-off-by: Nicholas Knize <nknize@amazon.com>
This commit is contained in:
Nick Knize 2021-03-03 18:15:58 -06:00
parent bdc3158020
commit e60906fc11
32 changed files with 164 additions and 164 deletions

View File

@ -35,7 +35,7 @@ import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.index.OpenSearchDirectoryReader;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.Index; import org.elasticsearch.index.Index;
import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.ContentPath;
@ -106,7 +106,7 @@ public class ChildrenToParentAggregatorTests extends AggregatorTestCase {
final Map<String, Tuple<Integer, Integer>> expectedParentChildRelations = setupIndex(indexWriter); final Map<String, Tuple<Integer, Integer>> expectedParentChildRelations = setupIndex(indexWriter);
indexWriter.close(); indexWriter.close();
IndexReader indexReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(directory), IndexReader indexReader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(directory),
new ShardId(new Index("foo", "_na_"), 1)); new ShardId(new Index("foo", "_na_"), 1));
// TODO set "maybeWrap" to true for IndexSearcher once #23338 is resolved // TODO set "maybeWrap" to true for IndexSearcher once #23338 is resolved
IndexSearcher indexSearcher = newSearcher(indexReader, false, true); IndexSearcher indexSearcher = newSearcher(indexReader, false, true);
@ -169,7 +169,7 @@ public class ChildrenToParentAggregatorTests extends AggregatorTestCase {
return o1.getKey().compareTo(o2.getKey()); return o1.getKey().compareTo(o2.getKey());
}); });
IndexReader indexReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(directory), IndexReader indexReader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(directory),
new ShardId(new Index("foo", "_na_"), 1)); new ShardId(new Index("foo", "_na_"), 1));
// TODO set "maybeWrap" to true for IndexSearcher once #23338 is resolved // TODO set "maybeWrap" to true for IndexSearcher once #23338 is resolved
IndexSearcher indexSearcher = newSearcher(indexReader, false, true); IndexSearcher indexSearcher = newSearcher(indexReader, false, true);
@ -211,7 +211,7 @@ public class ChildrenToParentAggregatorTests extends AggregatorTestCase {
sortedValues.put(value.v2(), l+1); sortedValues.put(value.v2(), l+1);
} }
IndexReader indexReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(directory), IndexReader indexReader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(directory),
new ShardId(new Index("foo", "_na_"), 1)); new ShardId(new Index("foo", "_na_"), 1));
// TODO set "maybeWrap" to true for IndexSearcher once #23338 is resolved // TODO set "maybeWrap" to true for IndexSearcher once #23338 is resolved
IndexSearcher indexSearcher = newSearcher(indexReader, false, true); IndexSearcher indexSearcher = newSearcher(indexReader, false, true);

View File

@ -36,7 +36,7 @@ import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.index.OpenSearchDirectoryReader;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.Index; import org.elasticsearch.index.Index;
import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.ContentPath;
@ -101,7 +101,7 @@ public class ParentToChildrenAggregatorTests extends AggregatorTestCase {
final Map<String, Tuple<Integer, Integer>> expectedParentChildRelations = setupIndex(indexWriter); final Map<String, Tuple<Integer, Integer>> expectedParentChildRelations = setupIndex(indexWriter);
indexWriter.close(); indexWriter.close();
IndexReader indexReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(directory), IndexReader indexReader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(directory),
new ShardId(new Index("foo", "_na_"), 1)); new ShardId(new Index("foo", "_na_"), 1));
// TODO set "maybeWrap" to true for IndexSearcher once #23338 is resolved // TODO set "maybeWrap" to true for IndexSearcher once #23338 is resolved
IndexSearcher indexSearcher = newSearcher(indexReader, false, true); IndexSearcher indexSearcher = newSearcher(indexReader, false, true);
@ -137,7 +137,7 @@ public class ParentToChildrenAggregatorTests extends AggregatorTestCase {
indexWriter.close(); indexWriter.close();
try ( try (
IndexReader indexReader = ElasticsearchDirectoryReader.wrap( IndexReader indexReader = OpenSearchDirectoryReader.wrap(
DirectoryReader.open(directory), DirectoryReader.open(directory),
new ShardId(new Index("foo", "_na_"), 1) new ShardId(new Index("foo", "_na_"), 1)
) )

View File

@ -31,13 +31,13 @@ import java.io.IOException;
* A {@link org.apache.lucene.index.FilterDirectoryReader} that exposes * A {@link org.apache.lucene.index.FilterDirectoryReader} that exposes
* Elasticsearch internal per shard / index information like the shard ID. * Elasticsearch internal per shard / index information like the shard ID.
*/ */
public final class ElasticsearchDirectoryReader extends FilterDirectoryReader { public final class OpenSearchDirectoryReader extends FilterDirectoryReader {
private final ShardId shardId; private final ShardId shardId;
private final FilterDirectoryReader.SubReaderWrapper wrapper; private final FilterDirectoryReader.SubReaderWrapper wrapper;
private ElasticsearchDirectoryReader(DirectoryReader in, FilterDirectoryReader.SubReaderWrapper wrapper, private OpenSearchDirectoryReader(DirectoryReader in, FilterDirectoryReader.SubReaderWrapper wrapper,
ShardId shardId) throws IOException { ShardId shardId) throws IOException {
super(in, wrapper); super(in, wrapper);
this.wrapper = wrapper; this.wrapper = wrapper;
this.shardId = shardId; this.shardId = shardId;
@ -58,19 +58,19 @@ public final class ElasticsearchDirectoryReader extends FilterDirectoryReader {
@Override @Override
protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException { protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException {
return new ElasticsearchDirectoryReader(in, wrapper, shardId); return new OpenSearchDirectoryReader(in, wrapper, shardId);
} }
/** /**
* Wraps the given reader in a {@link ElasticsearchDirectoryReader} as * Wraps the given reader in a {@link OpenSearchDirectoryReader} as
* well as all it's sub-readers in {@link ElasticsearchLeafReader} to * well as all it's sub-readers in {@link ElasticsearchLeafReader} to
* expose the given shard Id. * expose the given shard Id.
* *
* @param reader the reader to wrap * @param reader the reader to wrap
* @param shardId the shard ID to expose via the elasticsearch internal reader wrappers. * @param shardId the shard ID to expose via the elasticsearch internal reader wrappers.
*/ */
public static ElasticsearchDirectoryReader wrap(DirectoryReader reader, ShardId shardId) throws IOException { public static OpenSearchDirectoryReader wrap(DirectoryReader reader, ShardId shardId) throws IOException {
return new ElasticsearchDirectoryReader(reader, new SubReaderWrapper(shardId), shardId); return new OpenSearchDirectoryReader(reader, new SubReaderWrapper(shardId), shardId);
} }
private static final class SubReaderWrapper extends FilterDirectoryReader.SubReaderWrapper { private static final class SubReaderWrapper extends FilterDirectoryReader.SubReaderWrapper {
@ -86,22 +86,22 @@ public final class ElasticsearchDirectoryReader extends FilterDirectoryReader {
/** /**
* Adds the given listener to the provided directory reader. The reader * Adds the given listener to the provided directory reader. The reader
* must contain an {@link ElasticsearchDirectoryReader} in it's hierarchy * must contain an {@link OpenSearchDirectoryReader} in it's hierarchy
* otherwise we can't safely install the listener. * otherwise we can't safely install the listener.
* *
* @throws IllegalArgumentException if the reader doesn't contain an * @throws IllegalArgumentException if the reader doesn't contain an
* {@link ElasticsearchDirectoryReader} in it's hierarchy * {@link OpenSearchDirectoryReader} in it's hierarchy
*/ */
@SuppressForbidden(reason = "This is the only sane way to add a ReaderClosedListener") @SuppressForbidden(reason = "This is the only sane way to add a ReaderClosedListener")
public static void addReaderCloseListener(DirectoryReader reader, IndexReader.ClosedListener listener) { public static void addReaderCloseListener(DirectoryReader reader, IndexReader.ClosedListener listener) {
ElasticsearchDirectoryReader elasticsearchDirectoryReader = getElasticsearchDirectoryReader(reader); OpenSearchDirectoryReader openSearchDirectoryReader = getOpenSearchDirectoryReader(reader);
if (elasticsearchDirectoryReader == null) { if (openSearchDirectoryReader == null) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"Can't install close listener reader is not an ElasticsearchDirectoryReader/ElasticsearchLeafReader"); "Can't install close listener reader is not an OpenSearchDirectoryReader/ElasticsearchLeafReader");
} }
IndexReader.CacheHelper cacheHelper = elasticsearchDirectoryReader.getReaderCacheHelper(); IndexReader.CacheHelper cacheHelper = openSearchDirectoryReader.getReaderCacheHelper();
if (cacheHelper == null) { if (cacheHelper == null) {
throw new IllegalArgumentException("Reader " + elasticsearchDirectoryReader + " does not support caching"); throw new IllegalArgumentException("Reader " + openSearchDirectoryReader + " does not support caching");
} }
assert cacheHelper.getKey() == reader.getReaderCacheHelper().getKey(); assert cacheHelper.getKey() == reader.getReaderCacheHelper().getKey();
cacheHelper.addClosedListener(listener); cacheHelper.addClosedListener(listener);
@ -109,19 +109,19 @@ public final class ElasticsearchDirectoryReader extends FilterDirectoryReader {
/** /**
* Tries to unwrap the given reader until the first * Tries to unwrap the given reader until the first
* {@link ElasticsearchDirectoryReader} instance is found or {@code null} * {@link OpenSearchDirectoryReader} instance is found or {@code null}
* if no instance is found. * if no instance is found.
*/ */
public static ElasticsearchDirectoryReader getElasticsearchDirectoryReader(DirectoryReader reader) { public static OpenSearchDirectoryReader getOpenSearchDirectoryReader(DirectoryReader reader) {
if (reader instanceof FilterDirectoryReader) { if (reader instanceof FilterDirectoryReader) {
if (reader instanceof ElasticsearchDirectoryReader) { if (reader instanceof OpenSearchDirectoryReader) {
return (ElasticsearchDirectoryReader) reader; return (OpenSearchDirectoryReader) reader;
} else { } else {
// We need to use FilterDirectoryReader#getDelegate and not FilterDirectoryReader#unwrap, because // We need to use FilterDirectoryReader#getDelegate and not FilterDirectoryReader#unwrap, because
// If there are multiple levels of filtered leaf readers then with the unwrap() method it immediately // If there are multiple levels of filtered leaf readers then with the unwrap() method it immediately
// returns the most inner leaf reader and thus skipping of over any other filtered leaf reader that // returns the most inner leaf reader and thus skipping of over any other filtered leaf reader that
// may be instance of ElasticsearchLeafReader. This can cause us to miss the shardId. // may be instance of ElasticsearchLeafReader. This can cause us to miss the shardId.
return getElasticsearchDirectoryReader(((FilterDirectoryReader) reader).getDelegate()); return getOpenSearchDirectoryReader(((FilterDirectoryReader) reader).getDelegate());
} }
} }
return null; return null;

View File

@ -22,7 +22,7 @@ package org.elasticsearch.index;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.index.OpenSearchDirectoryReader;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.fielddata.IndexFieldDataService;
@ -57,7 +57,7 @@ public final class IndexWarmer {
this.listeners = Collections.unmodifiableList(list); this.listeners = Collections.unmodifiableList(list);
} }
void warm(ElasticsearchDirectoryReader reader, IndexShard shard, IndexSettings settings) { void warm(OpenSearchDirectoryReader reader, IndexShard shard, IndexSettings settings) {
if (shard.state() == IndexShardState.CLOSED) { if (shard.state() == IndexShardState.CLOSED) {
return; return;
} }
@ -102,7 +102,7 @@ public final class IndexWarmer {
public interface Listener { public interface Listener {
/** Queue tasks to warm-up the given segments and return handles that allow to wait for termination of the /** Queue tasks to warm-up the given segments and return handles that allow to wait for termination of the
* execution of those tasks. */ * execution of those tasks. */
TerminationHandle warmReader(IndexShard indexShard, ElasticsearchDirectoryReader reader); TerminationHandle warmReader(IndexShard indexShard, OpenSearchDirectoryReader reader);
} }
private static class FieldDataWarmer implements IndexWarmer.Listener { private static class FieldDataWarmer implements IndexWarmer.Listener {
@ -116,7 +116,7 @@ public final class IndexWarmer {
} }
@Override @Override
public TerminationHandle warmReader(final IndexShard indexShard, final ElasticsearchDirectoryReader reader) { public TerminationHandle warmReader(final IndexShard indexShard, final OpenSearchDirectoryReader reader) {
final MapperService mapperService = indexShard.mapperService(); final MapperService mapperService = indexShard.mapperService();
final Map<String, MappedFieldType> warmUpGlobalOrdinals = new HashMap<>(); final Map<String, MappedFieldType> warmUpGlobalOrdinals = new HashMap<>();
for (MappedFieldType fieldType : mapperService.fieldTypes()) { for (MappedFieldType fieldType : mapperService.fieldTypes()) {

View File

@ -38,7 +38,7 @@ import org.elasticsearch.common.cache.Cache;
import org.elasticsearch.common.cache.CacheBuilder; import org.elasticsearch.common.cache.CacheBuilder;
import org.elasticsearch.common.cache.RemovalListener; import org.elasticsearch.common.cache.RemovalListener;
import org.elasticsearch.common.cache.RemovalNotification; import org.elasticsearch.common.cache.RemovalNotification;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.index.OpenSearchDirectoryReader;
import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Setting.Property;
@ -224,7 +224,7 @@ public final class BitsetFilterCache extends AbstractIndexComponent
} }
@Override @Override
public IndexWarmer.TerminationHandle warmReader(final IndexShard indexShard, final ElasticsearchDirectoryReader reader) { public IndexWarmer.TerminationHandle warmReader(final IndexShard indexShard, final OpenSearchDirectoryReader reader) {
if (indexSettings.getIndex().equals(indexShard.indexSettings().getIndex()) == false) { if (indexSettings.getIndex().equals(indexShard.indexSettings().getIndex()) == false) {
// this is from a different index // this is from a different index
return TerminationHandle.NO_WAIT; return TerminationHandle.NO_WAIT;

View File

@ -27,10 +27,10 @@ import org.apache.lucene.search.ReferenceManager;
import org.apache.lucene.search.SearcherManager; import org.apache.lucene.search.SearcherManager;
import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.index.OpenSearchDirectoryReader;
/** /**
* Utility class to safely share {@link ElasticsearchDirectoryReader} instances across * Utility class to safely share {@link OpenSearchDirectoryReader} instances across
* multiple threads, while periodically reopening. This class ensures each * multiple threads, while periodically reopening. This class ensures each
* reader is closed only once all threads have finished using it. * reader is closed only once all threads have finished using it.
* *
@ -38,32 +38,32 @@ import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
* *
*/ */
@SuppressForbidden(reason = "reference counting is required here") @SuppressForbidden(reason = "reference counting is required here")
class ElasticsearchReaderManager extends ReferenceManager<ElasticsearchDirectoryReader> { class ElasticsearchReaderManager extends ReferenceManager<OpenSearchDirectoryReader> {
private final BiConsumer<ElasticsearchDirectoryReader, ElasticsearchDirectoryReader> refreshListener; private final BiConsumer<OpenSearchDirectoryReader, OpenSearchDirectoryReader> refreshListener;
/** /**
* Creates and returns a new ElasticsearchReaderManager from the given * Creates and returns a new ElasticsearchReaderManager from the given
* already-opened {@link ElasticsearchDirectoryReader}, stealing * already-opened {@link OpenSearchDirectoryReader}, stealing
* the incoming reference. * the incoming reference.
* *
* @param reader the directoryReader to use for future reopens * @param reader the directoryReader to use for future reopens
* @param refreshListener A consumer that is called every time a new reader is opened * @param refreshListener A consumer that is called every time a new reader is opened
*/ */
ElasticsearchReaderManager(ElasticsearchDirectoryReader reader, ElasticsearchReaderManager(OpenSearchDirectoryReader reader,
BiConsumer<ElasticsearchDirectoryReader, ElasticsearchDirectoryReader> refreshListener) { BiConsumer<OpenSearchDirectoryReader, OpenSearchDirectoryReader> refreshListener) {
this.current = reader; this.current = reader;
this.refreshListener = refreshListener; this.refreshListener = refreshListener;
refreshListener.accept(current, null); refreshListener.accept(current, null);
} }
@Override @Override
protected void decRef(ElasticsearchDirectoryReader reference) throws IOException { protected void decRef(OpenSearchDirectoryReader reference) throws IOException {
reference.decRef(); reference.decRef();
} }
@Override @Override
protected ElasticsearchDirectoryReader refreshIfNeeded(ElasticsearchDirectoryReader referenceToRefresh) throws IOException { protected OpenSearchDirectoryReader refreshIfNeeded(OpenSearchDirectoryReader referenceToRefresh) throws IOException {
final ElasticsearchDirectoryReader reader = (ElasticsearchDirectoryReader) DirectoryReader.openIfChanged(referenceToRefresh); final OpenSearchDirectoryReader reader = (OpenSearchDirectoryReader) DirectoryReader.openIfChanged(referenceToRefresh);
if (reader != null) { if (reader != null) {
refreshListener.accept(reader, referenceToRefresh); refreshListener.accept(reader, referenceToRefresh);
} }
@ -71,12 +71,12 @@ class ElasticsearchReaderManager extends ReferenceManager<ElasticsearchDirectory
} }
@Override @Override
protected boolean tryIncRef(ElasticsearchDirectoryReader reference) { protected boolean tryIncRef(OpenSearchDirectoryReader reference) {
return reference.tryIncRef(); return reference.tryIncRef();
} }
@Override @Override
protected int getRefCount(ElasticsearchDirectoryReader reference) { protected int getRefCount(OpenSearchDirectoryReader reference) {
return reference.getRefCount(); return reference.getRefCount();
} }
} }

View File

@ -54,7 +54,7 @@ import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.index.OpenSearchDirectoryReader;
import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.lucene.uid.Versions;
import org.elasticsearch.common.lucene.uid.VersionsAndSeqNoResolver; import org.elasticsearch.common.lucene.uid.VersionsAndSeqNoResolver;
import org.elasticsearch.common.lucene.uid.VersionsAndSeqNoResolver.DocIdAndVersion; import org.elasticsearch.common.lucene.uid.VersionsAndSeqNoResolver.DocIdAndVersion;
@ -636,8 +636,8 @@ public abstract class Engine implements Closeable {
} }
Releasable releasable = store::decRef; Releasable releasable = store::decRef;
try { try {
ReferenceManager<ElasticsearchDirectoryReader> referenceManager = getReferenceManager(scope); ReferenceManager<OpenSearchDirectoryReader> referenceManager = getReferenceManager(scope);
ElasticsearchDirectoryReader acquire = referenceManager.acquire(); OpenSearchDirectoryReader acquire = referenceManager.acquire();
SearcherSupplier reader = new SearcherSupplier(wrapper) { SearcherSupplier reader = new SearcherSupplier(wrapper) {
@Override @Override
public Searcher acquireSearcherInternal(String source) { public Searcher acquireSearcherInternal(String source) {
@ -695,7 +695,7 @@ public abstract class Engine implements Closeable {
} }
} }
protected abstract ReferenceManager<ElasticsearchDirectoryReader> getReferenceManager(SearcherScope scope); protected abstract ReferenceManager<OpenSearchDirectoryReader> getReferenceManager(SearcherScope scope);
boolean assertSearcherIsWarmedUp(String source, SearcherScope scope) { boolean assertSearcherIsWarmedUp(String source, SearcherScope scope) {
return true; return true;
@ -1654,7 +1654,7 @@ public abstract class Engine implements Closeable {
this.ifPrimaryTerm = primaryTerm; this.ifPrimaryTerm = primaryTerm;
return this; return this;
} }
public long getIfPrimaryTerm() { public long getIfPrimaryTerm() {
return ifPrimaryTerm; return ifPrimaryTerm;
} }
@ -1860,7 +1860,7 @@ public abstract class Engine implements Closeable {
/** /**
* Called once a new top-level reader is opened. * Called once a new top-level reader is opened.
*/ */
void warm(ElasticsearchDirectoryReader reader); void warm(OpenSearchDirectoryReader reader);
} }
/** /**

View File

@ -61,7 +61,7 @@ import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lucene.LoggerInfoStream; import org.elasticsearch.common.lucene.LoggerInfoStream;
import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.index.OpenSearchDirectoryReader;
import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.lucene.uid.Versions;
import org.elasticsearch.common.lucene.uid.VersionsAndSeqNoResolver; import org.elasticsearch.common.lucene.uid.VersionsAndSeqNoResolver;
import org.elasticsearch.common.lucene.uid.VersionsAndSeqNoResolver.DocIdAndSeqNo; import org.elasticsearch.common.lucene.uid.VersionsAndSeqNoResolver.DocIdAndSeqNo;
@ -346,25 +346,25 @@ public class InternalEngine extends Engine {
* and old segments can be released in the same way previous version did this (as a side-effect of _refresh) * and old segments can be released in the same way previous version did this (as a side-effect of _refresh)
*/ */
@SuppressForbidden(reason = "reference counting is required here") @SuppressForbidden(reason = "reference counting is required here")
private static final class ExternalReaderManager extends ReferenceManager<ElasticsearchDirectoryReader> { private static final class ExternalReaderManager extends ReferenceManager<OpenSearchDirectoryReader> {
private final BiConsumer<ElasticsearchDirectoryReader, ElasticsearchDirectoryReader> refreshListener; private final BiConsumer<OpenSearchDirectoryReader, OpenSearchDirectoryReader> refreshListener;
private final ElasticsearchReaderManager internalReaderManager; private final ElasticsearchReaderManager internalReaderManager;
private boolean isWarmedUp; //guarded by refreshLock private boolean isWarmedUp; //guarded by refreshLock
ExternalReaderManager(ElasticsearchReaderManager internalReaderManager, ExternalReaderManager(ElasticsearchReaderManager internalReaderManager,
BiConsumer<ElasticsearchDirectoryReader, ElasticsearchDirectoryReader> refreshListener) throws IOException { BiConsumer<OpenSearchDirectoryReader, OpenSearchDirectoryReader> refreshListener) throws IOException {
this.refreshListener = refreshListener; this.refreshListener = refreshListener;
this.internalReaderManager = internalReaderManager; this.internalReaderManager = internalReaderManager;
this.current = internalReaderManager.acquire(); // steal the reference without warming up this.current = internalReaderManager.acquire(); // steal the reference without warming up
} }
@Override @Override
protected ElasticsearchDirectoryReader refreshIfNeeded(ElasticsearchDirectoryReader referenceToRefresh) throws IOException { protected OpenSearchDirectoryReader refreshIfNeeded(OpenSearchDirectoryReader referenceToRefresh) throws IOException {
// we simply run a blocking refresh on the internal reference manager and then steal it's reader // we simply run a blocking refresh on the internal reference manager and then steal it's reader
// it's a save operation since we acquire the reader which incs it's reference but then down the road // it's a save operation since we acquire the reader which incs it's reference but then down the road
// steal it by calling incRef on the "stolen" reader // steal it by calling incRef on the "stolen" reader
internalReaderManager.maybeRefreshBlocking(); internalReaderManager.maybeRefreshBlocking();
final ElasticsearchDirectoryReader newReader = internalReaderManager.acquire(); final OpenSearchDirectoryReader newReader = internalReaderManager.acquire();
if (isWarmedUp == false || newReader != referenceToRefresh) { if (isWarmedUp == false || newReader != referenceToRefresh) {
boolean success = false; boolean success = false;
try { try {
@ -387,17 +387,17 @@ public class InternalEngine extends Engine {
} }
@Override @Override
protected boolean tryIncRef(ElasticsearchDirectoryReader reference) { protected boolean tryIncRef(OpenSearchDirectoryReader reference) {
return reference.tryIncRef(); return reference.tryIncRef();
} }
@Override @Override
protected int getRefCount(ElasticsearchDirectoryReader reference) { protected int getRefCount(OpenSearchDirectoryReader reference) {
return reference.getRefCount(); return reference.getRefCount();
} }
@Override @Override
protected void decRef(ElasticsearchDirectoryReader reference) throws IOException { protected void decRef(OpenSearchDirectoryReader reference) throws IOException {
reference.decRef(); reference.decRef();
} }
} }
@ -638,8 +638,8 @@ public class InternalEngine extends Engine {
ElasticsearchReaderManager internalReaderManager = null; ElasticsearchReaderManager internalReaderManager = null;
try { try {
try { try {
final ElasticsearchDirectoryReader directoryReader = final OpenSearchDirectoryReader directoryReader =
ElasticsearchDirectoryReader.wrap(DirectoryReader.open(indexWriter), shardId); OpenSearchDirectoryReader.wrap(DirectoryReader.open(indexWriter), shardId);
internalReaderManager = new ElasticsearchReaderManager(directoryReader, internalReaderManager = new ElasticsearchReaderManager(directoryReader,
new RamAccountingRefreshListener(engineConfig.getCircuitBreakerService())); new RamAccountingRefreshListener(engineConfig.getCircuitBreakerService()));
lastCommittedSegmentInfos = store.readLastCommittedSegmentsInfo(); lastCommittedSegmentInfos = store.readLastCommittedSegmentsInfo();
@ -1674,7 +1674,7 @@ public class InternalEngine extends Engine {
try { try {
// even though we maintain 2 managers we really do the heavy-lifting only once. // even though we maintain 2 managers we really do the heavy-lifting only once.
// the second refresh will only do the extra work we have to do for warming caches etc. // the second refresh will only do the extra work we have to do for warming caches etc.
ReferenceManager<ElasticsearchDirectoryReader> referenceManager = getReferenceManager(scope); ReferenceManager<OpenSearchDirectoryReader> referenceManager = getReferenceManager(scope);
// it is intentional that we never refresh both internal / external together // it is intentional that we never refresh both internal / external together
if (block) { if (block) {
referenceManager.maybeRefreshBlocking(); referenceManager.maybeRefreshBlocking();
@ -2259,7 +2259,7 @@ public class InternalEngine extends Engine {
} }
@Override @Override
protected final ReferenceManager<ElasticsearchDirectoryReader> getReferenceManager(SearcherScope scope) { protected final ReferenceManager<OpenSearchDirectoryReader> getReferenceManager(SearcherScope scope) {
switch (scope) { switch (scope) {
case INTERNAL: case INTERNAL:
return internalReaderManager; return internalReaderManager;
@ -2331,7 +2331,7 @@ public class InternalEngine extends Engine {
} }
/** A listener that warms the segments if needed when acquiring a new reader */ /** A listener that warms the segments if needed when acquiring a new reader */
static final class RefreshWarmerListener implements BiConsumer<ElasticsearchDirectoryReader, ElasticsearchDirectoryReader> { static final class RefreshWarmerListener implements BiConsumer<OpenSearchDirectoryReader, OpenSearchDirectoryReader> {
private final Engine.Warmer warmer; private final Engine.Warmer warmer;
private final Logger logger; private final Logger logger;
private final AtomicBoolean isEngineClosed; private final AtomicBoolean isEngineClosed;
@ -2343,7 +2343,7 @@ public class InternalEngine extends Engine {
} }
@Override @Override
public void accept(ElasticsearchDirectoryReader reader, ElasticsearchDirectoryReader previousReader) { public void accept(OpenSearchDirectoryReader reader, OpenSearchDirectoryReader previousReader) {
if (warmer != null) { if (warmer != null) {
try { try {
warmer.warm(reader); warmer.warm(reader);

View File

@ -24,7 +24,7 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.SegmentReader; import org.apache.lucene.index.SegmentReader;
import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreaker;
import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.index.OpenSearchDirectoryReader;
import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.CircuitBreakerService;
import java.util.Collections; import java.util.Collections;
@ -36,7 +36,7 @@ import java.util.function.BiConsumer;
/** /**
* A refresh listener that tracks the amount of memory used by segments in the accounting circuit breaker. * A refresh listener that tracks the amount of memory used by segments in the accounting circuit breaker.
*/ */
final class RamAccountingRefreshListener implements BiConsumer<ElasticsearchDirectoryReader, ElasticsearchDirectoryReader> { final class RamAccountingRefreshListener implements BiConsumer<OpenSearchDirectoryReader, OpenSearchDirectoryReader> {
private final CircuitBreakerService breakerService; private final CircuitBreakerService breakerService;
@ -45,7 +45,7 @@ final class RamAccountingRefreshListener implements BiConsumer<ElasticsearchDire
} }
@Override @Override
public void accept(ElasticsearchDirectoryReader reader, ElasticsearchDirectoryReader previousReader) { public void accept(OpenSearchDirectoryReader reader, OpenSearchDirectoryReader previousReader) {
final CircuitBreaker breaker = breakerService.getBreaker(CircuitBreaker.ACCOUNTING); final CircuitBreaker breaker = breakerService.getBreaker(CircuitBreaker.ACCOUNTING);
// Construct a list of the previous segment readers, we only want to track memory used // Construct a list of the previous segment readers, we only want to track memory used

View File

@ -28,7 +28,7 @@ import org.apache.lucene.store.Directory;
import org.apache.lucene.store.Lock; import org.apache.lucene.store.Lock;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.index.OpenSearchDirectoryReader;
import org.elasticsearch.common.util.concurrent.ReleasableLock; import org.elasticsearch.common.util.concurrent.ReleasableLock;
import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
@ -98,7 +98,7 @@ public class ReadOnlyEngine extends Engine {
try { try {
Store store = config.getStore(); Store store = config.getStore();
store.incRef(); store.incRef();
ElasticsearchDirectoryReader reader = null; OpenSearchDirectoryReader reader = null;
Directory directory = store.directory(); Directory directory = store.directory();
Lock indexWriterLock = null; Lock indexWriterLock = null;
boolean success = false; boolean success = false;
@ -170,13 +170,13 @@ public class ReadOnlyEngine extends Engine {
// reopened as an internal engine, which would be the path to fix the issue. // reopened as an internal engine, which would be the path to fix the issue.
} }
protected final ElasticsearchDirectoryReader wrapReader(DirectoryReader reader, protected final OpenSearchDirectoryReader wrapReader(DirectoryReader reader,
Function<DirectoryReader, DirectoryReader> readerWrapperFunction) throws IOException { Function<DirectoryReader, DirectoryReader> readerWrapperFunction) throws IOException {
if (engineConfig.getIndexSettings().isSoftDeleteEnabled()) { if (engineConfig.getIndexSettings().isSoftDeleteEnabled()) {
reader = new SoftDeletesDirectoryReaderWrapper(reader, Lucene.SOFT_DELETES_FIELD); reader = new SoftDeletesDirectoryReaderWrapper(reader, Lucene.SOFT_DELETES_FIELD);
} }
reader = readerWrapperFunction.apply(reader); reader = readerWrapperFunction.apply(reader);
return ElasticsearchDirectoryReader.wrap(reader, engineConfig.getShardId()); return OpenSearchDirectoryReader.wrap(reader, engineConfig.getShardId());
} }
protected DirectoryReader open(IndexCommit commit) throws IOException { protected DirectoryReader open(IndexCommit commit) throws IOException {
@ -231,7 +231,7 @@ public class ReadOnlyEngine extends Engine {
} }
@Override @Override
protected ReferenceManager<ElasticsearchDirectoryReader> getReferenceManager(SearcherScope scope) { protected ReferenceManager<OpenSearchDirectoryReader> getReferenceManager(SearcherScope scope) {
return readerManager; return readerManager;
} }
@ -478,7 +478,7 @@ public class ReadOnlyEngine extends Engine {
} }
protected void processReader(ElasticsearchDirectoryReader reader) { protected void processReader(OpenSearchDirectoryReader reader) {
refreshListener.accept(reader, null); refreshListener.accept(reader, null);
} }

View File

@ -64,7 +64,7 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.index.OpenSearchDirectoryReader;
import org.elasticsearch.common.metrics.CounterMetric; import org.elasticsearch.common.metrics.CounterMetric;
import org.elasticsearch.common.metrics.MeanMetric; import org.elasticsearch.common.metrics.MeanMetric;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
@ -1273,8 +1273,8 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
} }
private Engine.Searcher wrapSearcher(Engine.Searcher searcher) { private Engine.Searcher wrapSearcher(Engine.Searcher searcher) {
assert ElasticsearchDirectoryReader.unwrap(searcher.getDirectoryReader()) assert OpenSearchDirectoryReader.unwrap(searcher.getDirectoryReader())
!= null : "DirectoryReader must be an instance or ElasticsearchDirectoryReader"; != null : "DirectoryReader must be an instance or OpenSearchDirectoryReader";
boolean success = false; boolean success = false;
try { try {
final Engine.Searcher newSearcher = readerWrapper == null ? searcher : wrapSearcher(searcher, readerWrapper); final Engine.Searcher newSearcher = readerWrapper == null ? searcher : wrapSearcher(searcher, readerWrapper);
@ -1293,22 +1293,22 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
static Engine.Searcher wrapSearcher(Engine.Searcher engineSearcher, static Engine.Searcher wrapSearcher(Engine.Searcher engineSearcher,
CheckedFunction<DirectoryReader, DirectoryReader, IOException> readerWrapper) throws IOException { CheckedFunction<DirectoryReader, DirectoryReader, IOException> readerWrapper) throws IOException {
assert readerWrapper != null; assert readerWrapper != null;
final ElasticsearchDirectoryReader elasticsearchDirectoryReader = final OpenSearchDirectoryReader openSearchDirectoryReader =
ElasticsearchDirectoryReader.getElasticsearchDirectoryReader(engineSearcher.getDirectoryReader()); OpenSearchDirectoryReader.getOpenSearchDirectoryReader(engineSearcher.getDirectoryReader());
if (elasticsearchDirectoryReader == null) { if (openSearchDirectoryReader == null) {
throw new IllegalStateException("Can't wrap non elasticsearch directory reader"); throw new IllegalStateException("Can't wrap non elasticsearch directory reader");
} }
NonClosingReaderWrapper nonClosingReaderWrapper = new NonClosingReaderWrapper(engineSearcher.getDirectoryReader()); NonClosingReaderWrapper nonClosingReaderWrapper = new NonClosingReaderWrapper(engineSearcher.getDirectoryReader());
DirectoryReader reader = readerWrapper.apply(nonClosingReaderWrapper); DirectoryReader reader = readerWrapper.apply(nonClosingReaderWrapper);
if (reader != nonClosingReaderWrapper) { if (reader != nonClosingReaderWrapper) {
if (reader.getReaderCacheHelper() != elasticsearchDirectoryReader.getReaderCacheHelper()) { if (reader.getReaderCacheHelper() != openSearchDirectoryReader.getReaderCacheHelper()) {
throw new IllegalStateException("wrapped directory reader doesn't delegate IndexReader#getCoreCacheKey," + throw new IllegalStateException("wrapped directory reader doesn't delegate IndexReader#getCoreCacheKey," +
" wrappers must override this method and delegate to the original readers core cache key. Wrapped readers can't be " + " wrappers must override this method and delegate to the original readers core cache key. Wrapped readers can't be " +
"used as cache keys since their are used only per request which would lead to subtle bugs"); "used as cache keys since their are used only per request which would lead to subtle bugs");
} }
if (ElasticsearchDirectoryReader.getElasticsearchDirectoryReader(reader) != elasticsearchDirectoryReader) { if (OpenSearchDirectoryReader.getOpenSearchDirectoryReader(reader) != openSearchDirectoryReader) {
// prevent that somebody wraps with a non-filter reader // prevent that somebody wraps with a non-filter reader
throw new IllegalStateException("wrapped directory reader hides actual ElasticsearchDirectoryReader but shouldn't"); throw new IllegalStateException("wrapped directory reader hides actual OpenSearchDirectoryReader but shouldn't");
} }
} }

View File

@ -22,7 +22,7 @@ package org.elasticsearch.index.shard;
import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReader;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.index.OpenSearchDirectoryReader;
import org.elasticsearch.common.lucene.index.ElasticsearchLeafReader; import org.elasticsearch.common.lucene.index.ElasticsearchLeafReader;
public final class ShardUtils { public final class ShardUtils {
@ -49,11 +49,11 @@ public final class ShardUtils {
*/ */
@Nullable @Nullable
public static ShardId extractShardId(DirectoryReader reader) { public static ShardId extractShardId(DirectoryReader reader) {
final ElasticsearchDirectoryReader esReader = ElasticsearchDirectoryReader.getElasticsearchDirectoryReader(reader); final OpenSearchDirectoryReader esReader = OpenSearchDirectoryReader.getOpenSearchDirectoryReader(reader);
if (esReader != null) { if (esReader != null) {
return esReader.shardId(); return esReader.shardId();
} }
throw new IllegalArgumentException("can't extract shard ID, can't unwrap ElasticsearchDirectoryReader"); throw new IllegalArgumentException("can't extract shard ID, can't unwrap OpenSearchDirectoryReader");
} }

View File

@ -34,7 +34,7 @@ import org.elasticsearch.common.cache.CacheBuilder;
import org.elasticsearch.common.cache.CacheLoader; import org.elasticsearch.common.cache.CacheLoader;
import org.elasticsearch.common.cache.RemovalListener; import org.elasticsearch.common.cache.RemovalListener;
import org.elasticsearch.common.cache.RemovalNotification; import org.elasticsearch.common.cache.RemovalNotification;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.index.OpenSearchDirectoryReader;
import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
@ -125,7 +125,7 @@ public final class IndicesRequestCache implements RemovalListener<IndicesRequest
if (!registeredClosedListeners.containsKey(cleanupKey)) { if (!registeredClosedListeners.containsKey(cleanupKey)) {
Boolean previous = registeredClosedListeners.putIfAbsent(cleanupKey, Boolean.TRUE); Boolean previous = registeredClosedListeners.putIfAbsent(cleanupKey, Boolean.TRUE);
if (previous == null) { if (previous == null) {
ElasticsearchDirectoryReader.addReaderCloseListener(reader, cleanupKey); OpenSearchDirectoryReader.addReaderCloseListener(reader, cleanupKey);
} }
} }
} else { } else {

View File

@ -34,7 +34,7 @@ import org.elasticsearch.common.cache.CacheBuilder;
import org.elasticsearch.common.cache.RemovalListener; import org.elasticsearch.common.cache.RemovalListener;
import org.elasticsearch.common.cache.RemovalNotification; import org.elasticsearch.common.cache.RemovalNotification;
import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.index.OpenSearchDirectoryReader;
import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
@ -169,7 +169,7 @@ public class IndicesFieldDataCache implements RemovalListener<IndicesFieldDataCa
final Key key = new Key(this, cacheHelper.getKey(), shardId); final Key key = new Key(this, cacheHelper.getKey(), shardId);
//noinspection unchecked //noinspection unchecked
final Accountable accountable = cache.computeIfAbsent(key, k -> { final Accountable accountable = cache.computeIfAbsent(key, k -> {
ElasticsearchDirectoryReader.addReaderCloseListener(indexReader, IndexFieldCache.this); OpenSearchDirectoryReader.addReaderCloseListener(indexReader, IndexFieldCache.this);
Collections.addAll(k.listeners, this.listeners); Collections.addAll(k.listeners, this.listeners);
final Accountable ifd = (Accountable) indexFieldData.loadGlobalDirect(indexReader); final Accountable ifd = (Accountable) indexFieldData.loadGlobalDirect(indexReader);
for (Listener listener : k.listeners) { for (Listener listener : k.listeners) {

View File

@ -27,7 +27,7 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.index.OpenSearchDirectoryReader;
import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
@ -62,7 +62,7 @@ public class ShardCoreKeyMapTests extends ESTestCase {
try (Directory dir = newDirectory(); try (Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir)) { RandomIndexWriter writer = new RandomIndexWriter(random(), dir)) {
writer.addDocument(new Document()); writer.addDocument(new Document());
try (DirectoryReader dirReader = ElasticsearchDirectoryReader.wrap(writer.getReader(), new ShardId("index1", "_na_", 1))) { try (DirectoryReader dirReader = OpenSearchDirectoryReader.wrap(writer.getReader(), new ShardId("index1", "_na_", 1))) {
reader = dirReader.leaves().get(0).reader(); reader = dirReader.leaves().get(0).reader();
} }
} }
@ -93,9 +93,9 @@ public class ShardCoreKeyMapTests extends ESTestCase {
ShardId shardId2 = new ShardId("index1", "_na_", 3); ShardId shardId2 = new ShardId("index1", "_na_", 3);
ShardId shardId3 = new ShardId("index2", "_na_", 2); ShardId shardId3 = new ShardId("index2", "_na_", 2);
ElasticsearchDirectoryReader reader1 = ElasticsearchDirectoryReader.wrap(w1.getReader(), shardId1); OpenSearchDirectoryReader reader1 = OpenSearchDirectoryReader.wrap(w1.getReader(), shardId1);
ElasticsearchDirectoryReader reader2 = ElasticsearchDirectoryReader.wrap(w2.getReader(), shardId2); OpenSearchDirectoryReader reader2 = OpenSearchDirectoryReader.wrap(w2.getReader(), shardId2);
ElasticsearchDirectoryReader reader3 = ElasticsearchDirectoryReader.wrap(w3.getReader(), shardId3); OpenSearchDirectoryReader reader3 = OpenSearchDirectoryReader.wrap(w3.getReader(), shardId3);
ShardCoreKeyMap map = new ShardCoreKeyMap(); ShardCoreKeyMap map = new ShardCoreKeyMap();
for (DirectoryReader reader : Arrays.asList(reader1, reader2, reader3)) { for (DirectoryReader reader : Arrays.asList(reader1, reader2, reader3)) {
@ -116,14 +116,14 @@ public class ShardCoreKeyMapTests extends ESTestCase {
} }
w1.addDocument(new Document()); w1.addDocument(new Document());
ElasticsearchDirectoryReader newReader1 = ElasticsearchDirectoryReader.wrap(w1.getReader(), shardId1); OpenSearchDirectoryReader newReader1 = OpenSearchDirectoryReader.wrap(w1.getReader(), shardId1);
reader1.close(); reader1.close();
reader1 = newReader1; reader1 = newReader1;
// same for reader2, but with a force merge to trigger evictions // same for reader2, but with a force merge to trigger evictions
w2.addDocument(new Document()); w2.addDocument(new Document());
w2.forceMerge(1); w2.forceMerge(1);
ElasticsearchDirectoryReader newReader2 = ElasticsearchDirectoryReader.wrap(w2.getReader(), shardId2); OpenSearchDirectoryReader newReader2 = OpenSearchDirectoryReader.wrap(w2.getReader(), shardId2);
reader2.close(); reader2.close();
reader2 = newReader2; reader2 = newReader2;

View File

@ -54,7 +54,7 @@ public class ESDirectoryReaderTests extends ESTestCase {
// open reader // open reader
ShardId shardId = new ShardId("fake", "_na_", 1); ShardId shardId = new ShardId("fake", "_na_", 1);
DirectoryReader ir = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(iw), shardId); DirectoryReader ir = OpenSearchDirectoryReader.wrap(DirectoryReader.open(iw), shardId);
assertEquals(2, ir.numDocs()); assertEquals(2, ir.numDocs());
assertEquals(1, ir.leaves().size()); assertEquals(1, ir.leaves().size());

View File

@ -28,7 +28,7 @@ import org.apache.lucene.index.Term;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.index.OpenSearchDirectoryReader;
import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.IdFieldMapper;
import org.elasticsearch.index.mapper.SeqNoFieldMapper; import org.elasticsearch.index.mapper.SeqNoFieldMapper;
import org.elasticsearch.index.mapper.VersionFieldMapper; import org.elasticsearch.index.mapper.VersionFieldMapper;
@ -64,7 +64,7 @@ public class VersionsTests extends ESTestCase {
public void testVersions() throws Exception { public void testVersions() throws Exception {
Directory dir = newDirectory(); Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1)); DirectoryReader directoryReader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1));
assertThat(loadDocIdAndVersion(directoryReader, new Term(IdFieldMapper.NAME, "1"), randomBoolean()), nullValue()); assertThat(loadDocIdAndVersion(directoryReader, new Term(IdFieldMapper.NAME, "1"), randomBoolean()), nullValue());
Document doc = new Document(); Document doc = new Document();
@ -128,7 +128,7 @@ public class VersionsTests extends ESTestCase {
docs.add(doc); docs.add(doc);
writer.updateDocuments(new Term(IdFieldMapper.NAME, "1"), docs); writer.updateDocuments(new Term(IdFieldMapper.NAME, "1"), docs);
DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1)); DirectoryReader directoryReader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1));
assertThat(loadDocIdAndVersion(directoryReader, new Term(IdFieldMapper.NAME, "1"), randomBoolean()).version, equalTo(5L)); assertThat(loadDocIdAndVersion(directoryReader, new Term(IdFieldMapper.NAME, "1"), randomBoolean()).version, equalTo(5L));
version.setLongValue(6L); version.setLongValue(6L);
@ -189,7 +189,7 @@ public class VersionsTests extends ESTestCase {
assertEquals(87, loadDocIdAndVersion(reader, new Term(IdFieldMapper.NAME, "6"), randomBoolean()).version); assertEquals(87, loadDocIdAndVersion(reader, new Term(IdFieldMapper.NAME, "6"), randomBoolean()).version);
assertEquals(size+1, VersionsAndSeqNoResolver.lookupStates.size()); assertEquals(size+1, VersionsAndSeqNoResolver.lookupStates.size());
// now wrap the reader // now wrap the reader
DirectoryReader wrapped = ElasticsearchDirectoryReader.wrap(reader, new ShardId("bogus", "_na_", 5)); DirectoryReader wrapped = OpenSearchDirectoryReader.wrap(reader, new ShardId("bogus", "_na_", 5));
assertEquals(87, loadDocIdAndVersion(wrapped, new Term(IdFieldMapper.NAME, "6"), randomBoolean()).version); assertEquals(87, loadDocIdAndVersion(wrapped, new Term(IdFieldMapper.NAME, "6"), randomBoolean()).version);
// same size map: core cache key is shared // same size map: core cache key is shared
assertEquals(size+1, VersionsAndSeqNoResolver.lookupStates.size()); assertEquals(size+1, VersionsAndSeqNoResolver.lookupStates.size());

View File

@ -37,7 +37,7 @@ import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.BitSet; import org.apache.lucene.util.BitSet;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.index.OpenSearchDirectoryReader;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
@ -87,7 +87,7 @@ public class BitSetFilterCacheTests extends ESTestCase {
writer.commit(); writer.commit();
DirectoryReader reader = DirectoryReader.open(writer); DirectoryReader reader = DirectoryReader.open(writer);
reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId("test", "_na_", 0)); reader = OpenSearchDirectoryReader.wrap(reader, new ShardId("test", "_na_", 0));
BitsetFilterCache cache = new BitsetFilterCache(INDEX_SETTINGS, new BitsetFilterCache.Listener() { BitsetFilterCache cache = new BitsetFilterCache(INDEX_SETTINGS, new BitsetFilterCache.Listener() {
@Override @Override
@ -111,7 +111,7 @@ public class BitSetFilterCacheTests extends ESTestCase {
writer.forceMerge(1); writer.forceMerge(1);
reader.close(); reader.close();
reader = DirectoryReader.open(writer); reader = DirectoryReader.open(writer);
reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId("test", "_na_", 0)); reader = OpenSearchDirectoryReader.wrap(reader, new ShardId("test", "_na_", 0));
assertThat(matchCount(filter, reader), equalTo(3)); assertThat(matchCount(filter, reader), equalTo(3));
@ -136,7 +136,7 @@ public class BitSetFilterCacheTests extends ESTestCase {
writer.addDocument(document); writer.addDocument(document);
writer.commit(); writer.commit();
final DirectoryReader writerReader = DirectoryReader.open(writer); final DirectoryReader writerReader = DirectoryReader.open(writer);
final IndexReader reader = ElasticsearchDirectoryReader.wrap(writerReader, new ShardId("test", "_na_", 0)); final IndexReader reader = OpenSearchDirectoryReader.wrap(writerReader, new ShardId("test", "_na_", 0));
final AtomicLong stats = new AtomicLong(); final AtomicLong stats = new AtomicLong();
final AtomicInteger onCacheCalls = new AtomicInteger(); final AtomicInteger onCacheCalls = new AtomicInteger();
@ -210,7 +210,7 @@ public class BitSetFilterCacheTests extends ESTestCase {
writer.addDocument(new Document()); writer.addDocument(new Document());
DirectoryReader reader = DirectoryReader.open(writer); DirectoryReader reader = DirectoryReader.open(writer);
writer.close(); writer.close();
reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId("test2", "_na_", 0)); reader = OpenSearchDirectoryReader.wrap(reader, new ShardId("test2", "_na_", 0));
BitSetProducer producer = cache.getBitSetProducer(new MatchAllDocsQuery()); BitSetProducer producer = cache.getBitSetProducer(new MatchAllDocsQuery());

View File

@ -91,7 +91,7 @@ import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.index.OpenSearchDirectoryReader;
import org.elasticsearch.common.lucene.index.SequentialStoredFieldsLeafReader; import org.elasticsearch.common.lucene.index.SequentialStoredFieldsLeafReader;
import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.lucene.uid.Versions;
import org.elasticsearch.common.lucene.uid.VersionsAndSeqNoResolver; import org.elasticsearch.common.lucene.uid.VersionsAndSeqNoResolver;
@ -4730,8 +4730,8 @@ public class InternalEngineTests extends EngineTestCase {
public void assertSameReader(Engine.Searcher left, Engine.Searcher right) { public void assertSameReader(Engine.Searcher left, Engine.Searcher right) {
List<LeafReaderContext> leftLeaves = ElasticsearchDirectoryReader.unwrap(left.getDirectoryReader()).leaves(); List<LeafReaderContext> leftLeaves = OpenSearchDirectoryReader.unwrap(left.getDirectoryReader()).leaves();
List<LeafReaderContext> rightLeaves = ElasticsearchDirectoryReader.unwrap(right.getDirectoryReader()).leaves(); List<LeafReaderContext> rightLeaves = OpenSearchDirectoryReader.unwrap(right.getDirectoryReader()).leaves();
assertEquals(rightLeaves.size(), leftLeaves.size()); assertEquals(rightLeaves.size(), leftLeaves.size());
for (int i = 0; i < leftLeaves.size(); i++) { for (int i = 0; i < leftLeaves.size(); i++) {
assertSame(leftLeaves.get(i).reader(), rightLeaves.get(i).reader()); assertSame(leftLeaves.get(i).reader(), rightLeaves.get(i).reader());
@ -4739,8 +4739,8 @@ public class InternalEngineTests extends EngineTestCase {
} }
public void assertNotSameReader(Engine.Searcher left, Engine.Searcher right) { public void assertNotSameReader(Engine.Searcher left, Engine.Searcher right) {
List<LeafReaderContext> leftLeaves = ElasticsearchDirectoryReader.unwrap(left.getDirectoryReader()).leaves(); List<LeafReaderContext> leftLeaves = OpenSearchDirectoryReader.unwrap(left.getDirectoryReader()).leaves();
List<LeafReaderContext> rightLeaves = ElasticsearchDirectoryReader.unwrap(right.getDirectoryReader()).leaves(); List<LeafReaderContext> rightLeaves = OpenSearchDirectoryReader.unwrap(right.getDirectoryReader()).leaves();
if (rightLeaves.size() == leftLeaves.size()) { if (rightLeaves.size() == leftLeaves.size()) {
for (int i = 0; i < leftLeaves.size(); i++) { for (int i = 0; i < leftLeaves.size(); i++) {
if (leftLeaves.get(i).reader() != rightLeaves.get(i).reader()) { if (leftLeaves.get(i).reader() != rightLeaves.get(i).reader()) {
@ -6285,7 +6285,7 @@ public class InternalEngineTests extends EngineTestCase {
public void testNotWarmUpSearcherInEngineCtor() throws Exception { public void testNotWarmUpSearcherInEngineCtor() throws Exception {
try (Store store = createStore()) { try (Store store = createStore()) {
List<ElasticsearchDirectoryReader> warmedUpReaders = new ArrayList<>(); List<OpenSearchDirectoryReader> warmedUpReaders = new ArrayList<>();
Engine.Warmer warmer = reader -> { Engine.Warmer warmer = reader -> {
assertNotNull(reader); assertNotNull(reader);
assertThat(reader, not(in(warmedUpReaders))); assertThat(reader, not(in(warmedUpReaders)));

View File

@ -23,7 +23,7 @@ import org.apache.lucene.index.IndexReader;
import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.index.OpenSearchDirectoryReader;
import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.seqno.SeqNoStats; import org.elasticsearch.index.seqno.SeqNoStats;
@ -36,7 +36,7 @@ import java.util.List;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Function; import java.util.function.Function;
import static org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader.getElasticsearchDirectoryReader; import static org.elasticsearch.common.lucene.index.OpenSearchDirectoryReader.getOpenSearchDirectoryReader;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.instanceOf;
@ -90,7 +90,7 @@ public class ReadOnlyEngineTests extends EngineTestCase {
assertSame(external.getIndexReader(), internal.getIndexReader()); assertSame(external.getIndexReader(), internal.getIndexReader());
assertThat(external.getIndexReader(), instanceOf(DirectoryReader.class)); assertThat(external.getIndexReader(), instanceOf(DirectoryReader.class));
DirectoryReader dirReader = external.getDirectoryReader(); DirectoryReader dirReader = external.getDirectoryReader();
ElasticsearchDirectoryReader esReader = getElasticsearchDirectoryReader(dirReader); OpenSearchDirectoryReader esReader = getOpenSearchDirectoryReader(dirReader);
IndexReader.CacheHelper helper = esReader.getReaderCacheHelper(); IndexReader.CacheHelper helper = esReader.getReaderCacheHelper();
assertNotNull(helper); assertNotNull(helper);
assertEquals(helper.getKey(), dirReader.getReaderCacheHelper().getKey()); assertEquals(helper.getKey(), dirReader.getReaderCacheHelper().getKey());

View File

@ -31,7 +31,7 @@ import org.apache.lucene.index.LogByteSizeMergePolicy;
import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.store.ByteBuffersDirectory; import org.apache.lucene.store.ByteBuffersDirectory;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.index.OpenSearchDirectoryReader;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
@ -146,7 +146,7 @@ public abstract class AbstractFieldDataTestCase extends ESSingleNodeTestCase {
if (readerContexts != null && topLevelReader != null) { if (readerContexts != null && topLevelReader != null) {
topLevelReader.close(); topLevelReader.close();
} }
topLevelReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1)); topLevelReader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1));
readerContexts = topLevelReader.leaves(); readerContexts = topLevelReader.leaves();
return readerContexts; return readerContexts;
} }

View File

@ -47,7 +47,7 @@ import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.TestUtil; import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.UnicodeUtil; import org.apache.lucene.util.UnicodeUtil;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.index.OpenSearchDirectoryReader;
import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource; import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource;
@ -387,7 +387,7 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI
} }
} }
DirectoryReader directoryReader = DirectoryReader.open(writer); DirectoryReader directoryReader = DirectoryReader.open(writer);
directoryReader = ElasticsearchDirectoryReader.wrap(directoryReader, new ShardId(indexService.index(), 0)); directoryReader = OpenSearchDirectoryReader.wrap(directoryReader, new ShardId(indexService.index(), 0));
IndexSearcher searcher = new IndexSearcher(directoryReader); IndexSearcher searcher = new IndexSearcher(directoryReader);
IndexFieldData<?> fieldData = getForField("text"); IndexFieldData<?> fieldData = getForField("text");
final Object missingValue; final Object missingValue;

View File

@ -30,7 +30,7 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.NoMergePolicy;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.index.OpenSearchDirectoryReader;
import org.elasticsearch.index.fielddata.plain.AbstractLeafOrdinalsFieldData; import org.elasticsearch.index.fielddata.plain.AbstractLeafOrdinalsFieldData;
import org.elasticsearch.index.fielddata.plain.PagedBytesIndexFieldData; import org.elasticsearch.index.fielddata.plain.PagedBytesIndexFieldData;
import org.elasticsearch.index.fielddata.plain.SortedSetOrdinalsIndexFieldData; import org.elasticsearch.index.fielddata.plain.SortedSetOrdinalsIndexFieldData;
@ -63,7 +63,7 @@ public class FieldDataCacheTests extends ESTestCase {
} }
iw.close(); iw.close();
DirectoryReader ir = DirectoryReader ir =
ElasticsearchDirectoryReader.wrap(DirectoryReader.open(dir), new ShardId("_index", "_na_", 0)); OpenSearchDirectoryReader.wrap(DirectoryReader.open(dir), new ShardId("_index", "_na_", 0));
DummyAccountingFieldDataCache fieldDataCache = new DummyAccountingFieldDataCache(); DummyAccountingFieldDataCache fieldDataCache = new DummyAccountingFieldDataCache();
// Testing SortedSetOrdinalsIndexFieldData: // Testing SortedSetOrdinalsIndexFieldData:

View File

@ -31,7 +31,7 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.store.ByteBuffersDirectory; import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.SetOnce; import org.apache.lucene.util.SetOnce;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.index.OpenSearchDirectoryReader;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.fielddata.plain.SortedNumericIndexFieldData; import org.elasticsearch.index.fielddata.plain.SortedNumericIndexFieldData;
@ -215,7 +215,7 @@ public class IndexFieldDataServiceTests extends ESSingleNodeTestCase {
writer.addDocument(doc); writer.addDocument(doc);
DirectoryReader open = DirectoryReader.open(writer); DirectoryReader open = DirectoryReader.open(writer);
final boolean wrap = randomBoolean(); final boolean wrap = randomBoolean();
final IndexReader reader = wrap ? ElasticsearchDirectoryReader.wrap(open, new ShardId("test", "_na_", 1)) : open; final IndexReader reader = wrap ? OpenSearchDirectoryReader.wrap(open, new ShardId("test", "_na_", 1)) : open;
final AtomicInteger onCacheCalled = new AtomicInteger(); final AtomicInteger onCacheCalled = new AtomicInteger();
final AtomicInteger onRemovalCalled = new AtomicInteger(); final AtomicInteger onRemovalCalled = new AtomicInteger();
ifdService.setListener(new IndexFieldDataCache.Listener() { ifdService.setListener(new IndexFieldDataCache.Listener() {

View File

@ -36,7 +36,7 @@ import org.apache.lucene.search.TopFieldDocs;
import org.apache.lucene.search.join.QueryBitSetProducer; import org.apache.lucene.search.join.QueryBitSetProducer;
import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.search.join.ScoreMode;
import org.apache.lucene.search.join.ToParentBlockJoinQuery; import org.apache.lucene.search.join.ToParentBlockJoinQuery;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.index.OpenSearchDirectoryReader;
import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.index.fielddata.AbstractFieldDataTestCase; import org.elasticsearch.index.fielddata.AbstractFieldDataTestCase;
import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData;
@ -217,7 +217,7 @@ public abstract class AbstractNumberNestedSortingTestCase extends AbstractFieldD
MultiValueMode sortMode = MultiValueMode.SUM; MultiValueMode sortMode = MultiValueMode.SUM;
DirectoryReader directoryReader = DirectoryReader.open(writer); DirectoryReader directoryReader = DirectoryReader.open(writer);
directoryReader = ElasticsearchDirectoryReader.wrap(directoryReader, new ShardId(indexService.index(), 0)); directoryReader = OpenSearchDirectoryReader.wrap(directoryReader, new ShardId(indexService.index(), 0));
IndexSearcher searcher = new IndexSearcher(directoryReader); IndexSearcher searcher = new IndexSearcher(directoryReader);
Query parentFilter = new TermQuery(new Term("__type", "parent")); Query parentFilter = new TermQuery(new Term("__type", "parent"));
Query childFilter = Queries.not(parentFilter); Query childFilter = Queries.not(parentFilter);

View File

@ -45,7 +45,7 @@ import org.apache.lucene.search.join.ToParentBlockJoinQuery;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.TestUtil; import org.apache.lucene.util.TestUtil;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.index.OpenSearchDirectoryReader;
import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
@ -109,7 +109,7 @@ public class NestedSortingTests extends AbstractFieldDataTestCase {
MultiValueMode sortMode = randomFrom(Arrays.asList(MultiValueMode.MIN, MultiValueMode.MAX)); MultiValueMode sortMode = randomFrom(Arrays.asList(MultiValueMode.MIN, MultiValueMode.MAX));
DirectoryReader reader = DirectoryReader.open(writer); DirectoryReader reader = DirectoryReader.open(writer);
reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId(indexService.index(), 0)); reader = OpenSearchDirectoryReader.wrap(reader, new ShardId(indexService.index(), 0));
IndexSearcher searcher = new IndexSearcher(reader); IndexSearcher searcher = new IndexSearcher(reader);
PagedBytesIndexFieldData indexFieldData1 = getForField("f"); PagedBytesIndexFieldData indexFieldData1 = getForField("f");
IndexFieldData<?> indexFieldData2 = NoOrdinalsStringFieldDataTests.hideOrdinals(indexFieldData1); IndexFieldData<?> indexFieldData2 = NoOrdinalsStringFieldDataTests.hideOrdinals(indexFieldData1);
@ -295,7 +295,7 @@ public class NestedSortingTests extends AbstractFieldDataTestCase {
MultiValueMode sortMode = MultiValueMode.MIN; MultiValueMode sortMode = MultiValueMode.MIN;
DirectoryReader reader = DirectoryReader.open(writer); DirectoryReader reader = DirectoryReader.open(writer);
reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId(indexService.index(), 0)); reader = OpenSearchDirectoryReader.wrap(reader, new ShardId(indexService.index(), 0));
IndexSearcher searcher = new IndexSearcher(reader); IndexSearcher searcher = new IndexSearcher(reader);
PagedBytesIndexFieldData indexFieldData = getForField("field2"); PagedBytesIndexFieldData indexFieldData = getForField("field2");
Query parentFilter = new TermQuery(new Term("__type", "parent")); Query parentFilter = new TermQuery(new Term("__type", "parent"));
@ -611,7 +611,7 @@ public class NestedSortingTests extends AbstractFieldDataTestCase {
} }
} }
DirectoryReader reader = DirectoryReader.open(writer); DirectoryReader reader = DirectoryReader.open(writer);
reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId(indexService.index(), 0)); reader = OpenSearchDirectoryReader.wrap(reader, new ShardId(indexService.index(), 0));
IndexSearcher searcher = new IndexSearcher(reader); IndexSearcher searcher = new IndexSearcher(reader);
QueryShardContext queryShardContext = indexService.newQueryShardContext(0, searcher, () -> 0L, null); QueryShardContext queryShardContext = indexService.newQueryShardContext(0, searcher, () -> 0L, null);

View File

@ -34,8 +34,8 @@ import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.CheckedFunction;
import org.elasticsearch.common.lucene.index.OpenSearchDirectoryReader;
import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
@ -55,7 +55,7 @@ public class IndexReaderWrapperTests extends ESTestCase {
doc.add(new StringField("id", "1", random().nextBoolean() ? Field.Store.YES : Field.Store.NO)); doc.add(new StringField("id", "1", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
doc.add(new TextField("field", "doc", random().nextBoolean() ? Field.Store.YES : Field.Store.NO)); doc.add(new TextField("field", "doc", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
writer.addDocument(doc); writer.addDocument(doc);
DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1)); DirectoryReader open = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1));
IndexSearcher searcher = new IndexSearcher(open); IndexSearcher searcher = new IndexSearcher(open);
assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value); assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value);
final AtomicInteger closeCalls = new AtomicInteger(0); final AtomicInteger closeCalls = new AtomicInteger(0);
@ -69,7 +69,7 @@ public class IndexReaderWrapperTests extends ESTestCase {
IndexSearcher.getDefaultSimilarity(), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), IndexSearcher.getDefaultSimilarity(), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(),
() -> closeCalled.set(true)), wrapper); () -> closeCalled.set(true)), wrapper);
assertEquals(1, wrap.getIndexReader().getRefCount()); assertEquals(1, wrap.getIndexReader().getRefCount());
ElasticsearchDirectoryReader.addReaderCloseListener(wrap.getDirectoryReader(), key -> { OpenSearchDirectoryReader.addReaderCloseListener(wrap.getDirectoryReader(), key -> {
if (key == open.getReaderCacheHelper().getKey()) { if (key == open.getReaderCacheHelper().getKey()) {
count.incrementAndGet(); count.incrementAndGet();
} }
@ -97,7 +97,7 @@ public class IndexReaderWrapperTests extends ESTestCase {
doc.add(new StringField("id", "1", random().nextBoolean() ? Field.Store.YES : Field.Store.NO)); doc.add(new StringField("id", "1", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
doc.add(new TextField("field", "doc", random().nextBoolean() ? Field.Store.YES : Field.Store.NO)); doc.add(new TextField("field", "doc", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
writer.addDocument(doc); writer.addDocument(doc);
DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1)); DirectoryReader open = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1));
IndexSearcher searcher = new IndexSearcher(open); IndexSearcher searcher = new IndexSearcher(open);
assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value); assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value);
searcher.setSimilarity(iwc.getSimilarity()); searcher.setSimilarity(iwc.getSimilarity());
@ -109,7 +109,7 @@ public class IndexReaderWrapperTests extends ESTestCase {
try (Engine.Searcher wrap = IndexShard.wrapSearcher(new Engine.Searcher("foo", open, try (Engine.Searcher wrap = IndexShard.wrapSearcher(new Engine.Searcher("foo", open,
IndexSearcher.getDefaultSimilarity(), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), IndexSearcher.getDefaultSimilarity(), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(),
() -> closeCalled.set(true)), wrapper)) { () -> closeCalled.set(true)), wrapper)) {
ElasticsearchDirectoryReader.addReaderCloseListener(wrap.getDirectoryReader(), key -> { OpenSearchDirectoryReader.addReaderCloseListener(wrap.getDirectoryReader(), key -> {
cache.remove(key); cache.remove(key);
}); });
TopDocs search = wrap.search(new TermQuery(new Term("field", "doc")), 1); TopDocs search = wrap.search(new TermQuery(new Term("field", "doc")), 1);
@ -132,7 +132,7 @@ public class IndexReaderWrapperTests extends ESTestCase {
doc.add(new StringField("id", "1", random().nextBoolean() ? Field.Store.YES : Field.Store.NO)); doc.add(new StringField("id", "1", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
doc.add(new TextField("field", "doc", random().nextBoolean() ? Field.Store.YES : Field.Store.NO)); doc.add(new TextField("field", "doc", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
writer.addDocument(doc); writer.addDocument(doc);
DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1)); DirectoryReader open = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1));
IndexSearcher searcher = new IndexSearcher(open); IndexSearcher searcher = new IndexSearcher(open);
assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value); assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value);
searcher.setSimilarity(iwc.getSimilarity()); searcher.setSimilarity(iwc.getSimilarity());

View File

@ -26,8 +26,8 @@ import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.store.BaseDirectoryWrapper; import org.apache.lucene.store.BaseDirectoryWrapper;
import org.elasticsearch.common.lucene.index.OpenSearchDirectoryReader;
import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import java.io.IOException; import java.io.IOException;
@ -40,7 +40,7 @@ public class ShardUtilsTests extends ESTestCase {
writer.commit(); writer.commit();
ShardId id = new ShardId("foo", "_na_", random().nextInt()); ShardId id = new ShardId("foo", "_na_", random().nextInt());
try (DirectoryReader reader = DirectoryReader.open(writer)) { try (DirectoryReader reader = DirectoryReader.open(writer)) {
ElasticsearchDirectoryReader wrap = ElasticsearchDirectoryReader.wrap(reader, id); OpenSearchDirectoryReader wrap = OpenSearchDirectoryReader.wrap(reader, id);
assertEquals(id, ShardUtils.extractShardId(wrap)); assertEquals(id, ShardUtils.extractShardId(wrap));
} }
final int numDocs = 1 + random().nextInt(5); final int numDocs = 1 + random().nextInt(5);
@ -54,7 +54,7 @@ public class ShardUtilsTests extends ESTestCase {
} }
try (DirectoryReader reader = DirectoryReader.open(writer)) { try (DirectoryReader reader = DirectoryReader.open(writer)) {
ElasticsearchDirectoryReader wrap = ElasticsearchDirectoryReader.wrap(reader, id); OpenSearchDirectoryReader wrap = OpenSearchDirectoryReader.wrap(reader, id);
assertEquals(id, ShardUtils.extractShardId(wrap)); assertEquals(id, ShardUtils.extractShardId(wrap));
CompositeReaderContext context = wrap.getContext(); CompositeReaderContext context = wrap.getContext();
for (LeafReaderContext leaf : context.leaves()) { for (LeafReaderContext leaf : context.leaves()) {

View File

@ -37,8 +37,8 @@ import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.ScorerSupplier;
import org.apache.lucene.search.Weight; import org.apache.lucene.search.Weight;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.elasticsearch.common.lucene.index.OpenSearchDirectoryReader;
import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.cache.query.QueryCacheStats; import org.elasticsearch.index.cache.query.QueryCacheStats;
import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardId;
@ -110,7 +110,7 @@ public class IndicesQueryCacheTests extends ESTestCase {
DirectoryReader r = DirectoryReader.open(w); DirectoryReader r = DirectoryReader.open(w);
w.close(); w.close();
ShardId shard = new ShardId("index", "_na_", 0); ShardId shard = new ShardId("index", "_na_", 0);
r = ElasticsearchDirectoryReader.wrap(r, shard); r = OpenSearchDirectoryReader.wrap(r, shard);
IndexSearcher s = new IndexSearcher(r); IndexSearcher s = new IndexSearcher(r);
s.setQueryCachingPolicy(alwaysCachePolicy()); s.setQueryCachingPolicy(alwaysCachePolicy());
@ -181,7 +181,7 @@ public class IndicesQueryCacheTests extends ESTestCase {
DirectoryReader r1 = DirectoryReader.open(w1); DirectoryReader r1 = DirectoryReader.open(w1);
w1.close(); w1.close();
ShardId shard1 = new ShardId("index", "_na_", 0); ShardId shard1 = new ShardId("index", "_na_", 0);
r1 = ElasticsearchDirectoryReader.wrap(r1, shard1); r1 = OpenSearchDirectoryReader.wrap(r1, shard1);
IndexSearcher s1 = new IndexSearcher(r1); IndexSearcher s1 = new IndexSearcher(r1);
s1.setQueryCachingPolicy(alwaysCachePolicy()); s1.setQueryCachingPolicy(alwaysCachePolicy());
@ -191,7 +191,7 @@ public class IndicesQueryCacheTests extends ESTestCase {
DirectoryReader r2 = DirectoryReader.open(w2); DirectoryReader r2 = DirectoryReader.open(w2);
w2.close(); w2.close();
ShardId shard2 = new ShardId("index", "_na_", 1); ShardId shard2 = new ShardId("index", "_na_", 1);
r2 = ElasticsearchDirectoryReader.wrap(r2, shard2); r2 = OpenSearchDirectoryReader.wrap(r2, shard2);
IndexSearcher s2 = new IndexSearcher(r2); IndexSearcher s2 = new IndexSearcher(r2);
s2.setQueryCachingPolicy(alwaysCachePolicy()); s2.setQueryCachingPolicy(alwaysCachePolicy());
@ -307,7 +307,7 @@ public class IndicesQueryCacheTests extends ESTestCase {
DirectoryReader r1 = DirectoryReader.open(w1); DirectoryReader r1 = DirectoryReader.open(w1);
w1.close(); w1.close();
ShardId shard1 = new ShardId("index", "_na_", 0); ShardId shard1 = new ShardId("index", "_na_", 0);
r1 = ElasticsearchDirectoryReader.wrap(r1, shard1); r1 = OpenSearchDirectoryReader.wrap(r1, shard1);
IndexSearcher s1 = new IndexSearcher(r1); IndexSearcher s1 = new IndexSearcher(r1);
s1.setQueryCachingPolicy(alwaysCachePolicy()); s1.setQueryCachingPolicy(alwaysCachePolicy());
@ -317,7 +317,7 @@ public class IndicesQueryCacheTests extends ESTestCase {
DirectoryReader r2 = DirectoryReader.open(w2); DirectoryReader r2 = DirectoryReader.open(w2);
w2.close(); w2.close();
ShardId shard2 = new ShardId("index", "_na_", 1); ShardId shard2 = new ShardId("index", "_na_", 1);
r2 = ElasticsearchDirectoryReader.wrap(r2, shard2); r2 = OpenSearchDirectoryReader.wrap(r2, shard2);
IndexSearcher s2 = new IndexSearcher(r2); IndexSearcher s2 = new IndexSearcher(r2);
s2.setQueryCachingPolicy(alwaysCachePolicy()); s2.setQueryCachingPolicy(alwaysCachePolicy());
@ -397,7 +397,7 @@ public class IndicesQueryCacheTests extends ESTestCase {
DirectoryReader r = DirectoryReader.open(w); DirectoryReader r = DirectoryReader.open(w);
w.close(); w.close();
ShardId shard = new ShardId("index", "_na_", 0); ShardId shard = new ShardId("index", "_na_", 0);
r = ElasticsearchDirectoryReader.wrap(r, shard); r = OpenSearchDirectoryReader.wrap(r, shard);
IndexSearcher s = new IndexSearcher(r); IndexSearcher s = new IndexSearcher(r);
s.setQueryCachingPolicy(new QueryCachingPolicy() { s.setQueryCachingPolicy(new QueryCachingPolicy() {
@Override @Override

View File

@ -36,7 +36,7 @@ import org.elasticsearch.common.CheckedSupplier;
import org.elasticsearch.common.bytes.AbstractBytesReference; import org.elasticsearch.common.bytes.AbstractBytesReference;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.index.OpenSearchDirectoryReader;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentHelper;
@ -60,7 +60,7 @@ public class IndicesRequestCacheTests extends ESTestCase {
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig()); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig());
writer.addDocument(newDoc(0, "foo")); writer.addDocument(newDoc(0, "foo"));
DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), DirectoryReader reader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer),
new ShardId("foo", "bar", 1)); new ShardId("foo", "bar", 1));
TermQueryBuilder termQuery = new TermQueryBuilder("id", "0"); TermQueryBuilder termQuery = new TermQueryBuilder("id", "0");
BytesReference termBytes = XContentHelper.toXContent(termQuery, XContentType.JSON, false); BytesReference termBytes = XContentHelper.toXContent(termQuery, XContentType.JSON, false);
@ -117,7 +117,7 @@ public class IndicesRequestCacheTests extends ESTestCase {
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig()); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig());
writer.addDocument(newDoc(0, "foo")); writer.addDocument(newDoc(0, "foo"));
DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); DirectoryReader reader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1));
TermQueryBuilder termQuery = new TermQueryBuilder("id", "0"); TermQueryBuilder termQuery = new TermQueryBuilder("id", "0");
BytesReference termBytes = XContentHelper.toXContent(termQuery, XContentType.JSON, false); BytesReference termBytes = XContentHelper.toXContent(termQuery, XContentType.JSON, false);
if (randomBoolean()) { if (randomBoolean()) {
@ -126,7 +126,7 @@ public class IndicesRequestCacheTests extends ESTestCase {
writer = new IndexWriter(dir, newIndexWriterConfig()); writer = new IndexWriter(dir, newIndexWriterConfig());
} }
writer.updateDocument(new Term("id", "0"), newDoc(0, "bar")); writer.updateDocument(new Term("id", "0"), newDoc(0, "bar"));
DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); DirectoryReader secondReader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1));
// initial cache // initial cache
TestEntity entity = new TestEntity(requestCacheStats, indexShard); TestEntity entity = new TestEntity(requestCacheStats, indexShard);
@ -214,7 +214,7 @@ public class IndicesRequestCacheTests extends ESTestCase {
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig()); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig());
writer.addDocument(newDoc(0, "foo")); writer.addDocument(newDoc(0, "foo"));
DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), DirectoryReader reader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer),
new ShardId("foo", "bar", 1)); new ShardId("foo", "bar", 1));
TermQueryBuilder termQuery = new TermQueryBuilder("id", "0"); TermQueryBuilder termQuery = new TermQueryBuilder("id", "0");
BytesReference termBytes = XContentHelper.toXContent(termQuery, XContentType.JSON, false); BytesReference termBytes = XContentHelper.toXContent(termQuery, XContentType.JSON, false);
@ -222,7 +222,7 @@ public class IndicesRequestCacheTests extends ESTestCase {
Loader loader = new Loader(reader, 0); Loader loader = new Loader(reader, 0);
writer.updateDocument(new Term("id", "0"), newDoc(0, "bar")); writer.updateDocument(new Term("id", "0"), newDoc(0, "bar"));
DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), DirectoryReader secondReader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer),
new ShardId("foo", "bar", 1)); new ShardId("foo", "bar", 1));
TestEntity secondEntity = new TestEntity(requestCacheStats, indexShard); TestEntity secondEntity = new TestEntity(requestCacheStats, indexShard);
Loader secondLoader = new Loader(secondReader, 0); Loader secondLoader = new Loader(secondReader, 0);
@ -243,7 +243,7 @@ public class IndicesRequestCacheTests extends ESTestCase {
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig()); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig());
writer.addDocument(newDoc(0, "foo")); writer.addDocument(newDoc(0, "foo"));
DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), DirectoryReader reader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer),
new ShardId("foo", "bar", 1)); new ShardId("foo", "bar", 1));
TermQueryBuilder termQuery = new TermQueryBuilder("id", "0"); TermQueryBuilder termQuery = new TermQueryBuilder("id", "0");
BytesReference termBytes = XContentHelper.toXContent(termQuery, XContentType.JSON, false); BytesReference termBytes = XContentHelper.toXContent(termQuery, XContentType.JSON, false);
@ -251,13 +251,13 @@ public class IndicesRequestCacheTests extends ESTestCase {
Loader loader = new Loader(reader, 0); Loader loader = new Loader(reader, 0);
writer.updateDocument(new Term("id", "0"), newDoc(0, "bar")); writer.updateDocument(new Term("id", "0"), newDoc(0, "bar"));
DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), DirectoryReader secondReader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer),
new ShardId("foo", "bar", 1)); new ShardId("foo", "bar", 1));
TestEntity secondEntity = new TestEntity(requestCacheStats, indexShard); TestEntity secondEntity = new TestEntity(requestCacheStats, indexShard);
Loader secondLoader = new Loader(secondReader, 0); Loader secondLoader = new Loader(secondReader, 0);
writer.updateDocument(new Term("id", "0"), newDoc(0, "baz")); writer.updateDocument(new Term("id", "0"), newDoc(0, "baz"));
DirectoryReader thirdReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), DirectoryReader thirdReader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer),
new ShardId("foo", "bar", 1)); new ShardId("foo", "bar", 1));
TestEntity thirddEntity = new TestEntity(requestCacheStats, indexShard); TestEntity thirddEntity = new TestEntity(requestCacheStats, indexShard);
Loader thirdLoader = new Loader(thirdReader, 0); Loader thirdLoader = new Loader(thirdReader, 0);
@ -283,7 +283,7 @@ public class IndicesRequestCacheTests extends ESTestCase {
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig()); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig());
writer.addDocument(newDoc(0, "foo")); writer.addDocument(newDoc(0, "foo"));
DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), DirectoryReader reader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer),
new ShardId("foo", "bar", 1)); new ShardId("foo", "bar", 1));
TermQueryBuilder termQuery = new TermQueryBuilder("id", "0"); TermQueryBuilder termQuery = new TermQueryBuilder("id", "0");
BytesReference termBytes = XContentHelper.toXContent(termQuery, XContentType.JSON, false); BytesReference termBytes = XContentHelper.toXContent(termQuery, XContentType.JSON, false);
@ -291,13 +291,13 @@ public class IndicesRequestCacheTests extends ESTestCase {
Loader loader = new Loader(reader, 0); Loader loader = new Loader(reader, 0);
writer.updateDocument(new Term("id", "0"), newDoc(0, "bar")); writer.updateDocument(new Term("id", "0"), newDoc(0, "bar"));
DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), DirectoryReader secondReader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer),
new ShardId("foo", "bar", 1)); new ShardId("foo", "bar", 1));
TestEntity secondEntity = new TestEntity(requestCacheStats, indexShard); TestEntity secondEntity = new TestEntity(requestCacheStats, indexShard);
Loader secondLoader = new Loader(secondReader, 0); Loader secondLoader = new Loader(secondReader, 0);
writer.updateDocument(new Term("id", "0"), newDoc(0, "baz")); writer.updateDocument(new Term("id", "0"), newDoc(0, "baz"));
DirectoryReader thirdReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), DirectoryReader thirdReader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer),
new ShardId("foo", "bar", 1)); new ShardId("foo", "bar", 1));
AtomicBoolean differentIdentity = new AtomicBoolean(true); AtomicBoolean differentIdentity = new AtomicBoolean(true);
TestEntity thirddEntity = new TestEntity(requestCacheStats, differentIdentity); TestEntity thirddEntity = new TestEntity(requestCacheStats, differentIdentity);
@ -367,7 +367,7 @@ public class IndicesRequestCacheTests extends ESTestCase {
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig()); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig());
writer.addDocument(newDoc(0, "foo")); writer.addDocument(newDoc(0, "foo"));
DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), DirectoryReader reader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer),
new ShardId("foo", "bar", 1)); new ShardId("foo", "bar", 1));
TermQueryBuilder termQuery = new TermQueryBuilder("id", "0"); TermQueryBuilder termQuery = new TermQueryBuilder("id", "0");
BytesReference termBytes = XContentHelper.toXContent(termQuery, XContentType.JSON, false); BytesReference termBytes = XContentHelper.toXContent(termQuery, XContentType.JSON, false);

View File

@ -60,7 +60,7 @@ import org.apache.lucene.util.CombinedBitSet;
import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.SparseFixedBitSet; import org.apache.lucene.util.SparseFixedBitSet;
import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.index.OpenSearchDirectoryReader;
import org.elasticsearch.common.lucene.index.SequentialStoredFieldsLeafReader; import org.elasticsearch.common.lucene.index.SequentialStoredFieldsLeafReader;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.core.internal.io.IOUtils;
@ -227,7 +227,7 @@ public class ContextIndexSearcherTests extends ESTestCase {
} }
}; };
DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(w), DirectoryReader reader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(w),
new ShardId(settings.getIndex(), 0)); new ShardId(settings.getIndex(), 0));
BitsetFilterCache cache = new BitsetFilterCache(settings, listener); BitsetFilterCache cache = new BitsetFilterCache(settings, listener);
Query roleQuery = new TermQuery(new Term("allowed", "yes")); Query roleQuery = new TermQuery(new Term("allowed", "yes"));

View File

@ -54,7 +54,7 @@ import org.elasticsearch.common.breaker.CircuitBreaker;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.index.OpenSearchDirectoryReader;
import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.network.NetworkAddress;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BigArrays;
@ -554,7 +554,7 @@ public abstract class AggregatorTestCase extends ESTestCase {
} }
protected static DirectoryReader wrapInMockESDirectoryReader(DirectoryReader directoryReader) throws IOException { protected static DirectoryReader wrapInMockESDirectoryReader(DirectoryReader directoryReader) throws IOException {
return ElasticsearchDirectoryReader.wrap(directoryReader, new ShardId(new Index("_index", "_na_"), 0)); return OpenSearchDirectoryReader.wrap(directoryReader, new ShardId(new Index("_index", "_na_"), 0));
} }
/** /**