LUCENE-6158: IW.addIndexes(IndexReader...) -> IW.addIndexes(LeafReader...)

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1649989 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Robert Muir 2015-01-07 02:52:57 +00:00
parent bba930a5b3
commit b10d20a122
28 changed files with 120 additions and 95 deletions

View File

@ -347,6 +347,9 @@ API Changes
be set at the constructor for non-contextual lookup.
(Boon Low, Tomás Fernández Löbbe)
* LUCENE-6158: IndexWriter.addIndexes(IndexReader...) changed to
addIndexes(LeafReader...) (Robert Muir)
Bug Fixes
* LUCENE-5650: Enforce read-only access to any path outside the temporary

View File

@ -558,11 +558,11 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
public void testAddOldIndexesReader() throws IOException {
for (String name : oldNames) {
IndexReader reader = DirectoryReader.open(oldIndexDirs.get(name));
DirectoryReader reader = DirectoryReader.open(oldIndexDirs.get(name));
Directory targetDir = newDirectory();
IndexWriter w = new IndexWriter(targetDir, newIndexWriterConfig(new MockAnalyzer(random())));
w.addIndexes(reader);
TestUtil.addIndexesSlowly(w, reader);
w.close();
reader.close();

View File

@ -18,18 +18,21 @@ package org.apache.lucene.benchmark.byTask.tasks;
*/
import java.nio.file.Paths;
import java.util.List;
import org.apache.lucene.benchmark.byTask.PerfRunData;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
/**
* Adds an input index to an existing index, using
* {@link IndexWriter#addIndexes(Directory...)} or
* {@link IndexWriter#addIndexes(IndexReader...)}. The location of the input
* {@link IndexWriter#addIndexes(LeafReader...)}. The location of the input
* index is specified by the parameter {@link #ADDINDEXES_INPUT_DIR} and is
* assumed to be a directory on the file system.
* <p>
@ -63,11 +66,13 @@ public class AddIndexesTask extends PerfTask {
if (useAddIndexesDir) {
writer.addIndexes(inputDir);
} else {
IndexReader r = DirectoryReader.open(inputDir);
try {
writer.addIndexes(r);
} finally {
r.close();
try (IndexReader r = DirectoryReader.open(inputDir)) {
LeafReader leaves[] = new LeafReader[r.leaves().size()];
int i = 0;
for (LeafReaderContext leaf : r.leaves()) {
leaves[i++] = leaf.reader();
}
writer.addIndexes(leaves);
}
}
return 1;
@ -79,7 +84,7 @@ public class AddIndexesTask extends PerfTask {
* @param params
* {@code useAddIndexesDir=true} for using
* {@link IndexWriter#addIndexes(Directory...)} or {@code false} for
* using {@link IndexWriter#addIndexes(IndexReader...)}. Defaults to
* using {@link IndexWriter#addIndexes(LeafReader...)}. Defaults to
* {@code true}.
*/
@Override

View File

@ -235,8 +235,8 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
public static final String SOURCE_MERGE = "merge";
/** Source of a segment which results from a flush. */
public static final String SOURCE_FLUSH = "flush";
/** Source of a segment which results from a call to {@link #addIndexes(IndexReader...)}. */
public static final String SOURCE_ADDINDEXES_READERS = "addIndexes(IndexReader...)";
/** Source of a segment which results from a call to {@link #addIndexes(LeafReader...)}. */
public static final String SOURCE_ADDINDEXES_READERS = "addIndexes(LeafReader...)";
/**
* Absolute hard maximum length for a term, in bytes once
@ -2099,7 +2099,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
*
* <p>
* NOTE: this method will forcefully abort all merges in progress. If other
* threads are running {@link #forceMerge}, {@link #addIndexes(IndexReader[])}
* threads are running {@link #forceMerge}, {@link #addIndexes(LeafReader[])}
* or {@link #forceMergeDeletes} methods, they may receive
* {@link MergePolicy.MergeAbortedException}s.
*/
@ -2497,7 +2497,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
* index.
*
* <p>
* <b>NOTE:</b> this method merges all given {@link IndexReader}s in one
* <b>NOTE:</b> this method merges all given {@link LeafReader}s in one
* merge. If you intend to merge a large number of readers, it may be better
* to call this method multiple times, each time with a small set of readers.
* In principle, if you use a merge policy with a {@code mergeFactor} or
@ -2509,23 +2509,19 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
* @throws IOException
* if there is a low-level IO error
*/
public void addIndexes(IndexReader... readers) throws IOException {
public void addIndexes(LeafReader... readers) throws IOException {
ensureOpen();
int numDocs = 0;
try {
if (infoStream.isEnabled("IW")) {
infoStream.message("IW", "flush at addIndexes(IndexReader...)");
infoStream.message("IW", "flush at addIndexes(LeafReader...)");
}
flush(false, true);
String mergedName = newSegmentName();
final List<LeafReader> mergeReaders = new ArrayList<>();
for (IndexReader indexReader : readers) {
numDocs += indexReader.numDocs();
for (LeafReaderContext ctx : indexReader.leaves()) {
mergeReaders.add(ctx.reader());
}
for (LeafReader leaf : readers) {
numDocs += leaf.numDocs();
}
// Make sure adding the new documents to this index won't
@ -2541,7 +2537,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
SegmentInfo info = new SegmentInfo(directory, Version.LATEST, mergedName, -1,
false, codec, null, StringHelper.randomId(), new HashMap<>());
SegmentMerger merger = new SegmentMerger(mergeReaders, info, infoStream, trackingDir,
SegmentMerger merger = new SegmentMerger(Arrays.asList(readers), info, infoStream, trackingDir,
globalFieldNumberMap,
context);

View File

@ -165,9 +165,9 @@ public class TrackingIndexWriter {
return indexingGen.get();
}
/** Calls {@link IndexWriter#addIndexes(IndexReader...)}
/** Calls {@link IndexWriter#addIndexes(LeafReader...)}
* and returns the generation that reflects this change. */
public long addIndexes(IndexReader... readers) throws IOException {
public long addIndexes(LeafReader... readers) throws IOException {
writer.addIndexes(readers);
// Return gen as of when indexing finished:
return indexingGen.get();

View File

@ -84,31 +84,29 @@ public class Test2BPostingsBytes extends LuceneTestCase {
w.close();
DirectoryReader oneThousand = DirectoryReader.open(dir);
IndexReader subReaders[] = new IndexReader[1000];
DirectoryReader subReaders[] = new DirectoryReader[1000];
Arrays.fill(subReaders, oneThousand);
MultiReader mr = new MultiReader(subReaders);
BaseDirectoryWrapper dir2 = newFSDirectory(createTempDir("2BPostingsBytes2"));
if (dir2 instanceof MockDirectoryWrapper) {
((MockDirectoryWrapper)dir2).setThrottling(MockDirectoryWrapper.Throttling.NEVER);
}
IndexWriter w2 = new IndexWriter(dir2,
new IndexWriterConfig(null));
w2.addIndexes(mr);
TestUtil.addIndexesSlowly(w2, subReaders);
w2.forceMerge(1);
w2.close();
oneThousand.close();
DirectoryReader oneMillion = DirectoryReader.open(dir2);
subReaders = new IndexReader[2000];
subReaders = new DirectoryReader[2000];
Arrays.fill(subReaders, oneMillion);
mr = new MultiReader(subReaders);
BaseDirectoryWrapper dir3 = newFSDirectory(createTempDir("2BPostingsBytes3"));
if (dir3 instanceof MockDirectoryWrapper) {
((MockDirectoryWrapper)dir3).setThrottling(MockDirectoryWrapper.Throttling.NEVER);
}
IndexWriter w3 = new IndexWriter(dir3,
new IndexWriterConfig(null));
w3.addIndexes(mr);
TestUtil.addIndexesSlowly(w3, subReaders);
w3.forceMerge(1);
w3.close();
oneMillion.close();

View File

@ -652,7 +652,7 @@ public class TestAddIndexes extends LuceneTestCase {
IndexWriter writer2;
final List<Throwable> failures = new ArrayList<>();
volatile boolean didClose;
final IndexReader[] readers;
final DirectoryReader[] readers;
final int NUM_COPY;
final static int NUM_THREADS = 5;
final Thread[] threads = new Thread[NUM_THREADS];
@ -671,7 +671,7 @@ public class TestAddIndexes extends LuceneTestCase {
writer2.commit();
readers = new IndexReader[NUM_COPY];
readers = new DirectoryReader[NUM_COPY];
for(int i=0;i<NUM_COPY;i++)
readers[i] = DirectoryReader.open(dir);
}
@ -770,9 +770,9 @@ public class TestAddIndexes extends LuceneTestCase {
break;
case 2:
if (VERBOSE) {
System.out.println(Thread.currentThread().getName() + ": TEST: addIndexes(IndexReader[])");
System.out.println(Thread.currentThread().getName() + ": TEST: addIndexes(LeafReader[])");
}
writer2.addIndexes(readers);
TestUtil.addIndexesSlowly(writer2, readers);
break;
case 3:
if (VERBOSE) {
@ -875,9 +875,9 @@ public class TestAddIndexes extends LuceneTestCase {
break;
case 2:
if (VERBOSE) {
System.out.println("TEST: " + Thread.currentThread().getName() + ": addIndexes(IR[])");
System.out.println("TEST: " + Thread.currentThread().getName() + ": addIndexes(LR[])");
}
writer2.addIndexes(readers);
TestUtil.addIndexesSlowly(writer2, readers);
break;
case 3:
if (VERBOSE) {
@ -982,11 +982,8 @@ public class TestAddIndexes extends LuceneTestCase {
// Now delete the document
writer.deleteDocuments(new Term("id", "myid"));
IndexReader r = DirectoryReader.open(dirs[1]);
try {
writer.addIndexes(r);
} finally {
r.close();
try (DirectoryReader r = DirectoryReader.open(dirs[1])) {
TestUtil.addIndexesSlowly(writer, r);
}
writer.commit();
assertEquals("Documents from the incoming index should not have been deleted", 1, writer.numDocs());
@ -1101,7 +1098,7 @@ public class TestAddIndexes extends LuceneTestCase {
w.close();
}
IndexReader[] readers = new IndexReader[] { DirectoryReader.open(dirs[0]), DirectoryReader.open(dirs[1]) };
DirectoryReader[] readers = new DirectoryReader[] { DirectoryReader.open(dirs[0]), DirectoryReader.open(dirs[1]) };
MockDirectoryWrapper dir = new MockDirectoryWrapper(random(), new RAMDirectory());
dir.setEnableVirusScanner(false); // we check for specific list of files
@ -1111,7 +1108,7 @@ public class TestAddIndexes extends LuceneTestCase {
lmp.setNoCFSRatio(1.0);
lmp.setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY);
IndexWriter w3 = new IndexWriter(dir, conf);
w3.addIndexes(readers);
TestUtil.addIndexesSlowly(w3, readers);
w3.close();
// we should now see segments_X,
// _Y.cfs,_Y.cfe, _Z.si
@ -1182,7 +1179,7 @@ public class TestAddIndexes extends LuceneTestCase {
doc.add(newStringField("f1", "doc1 field1", Field.Store.YES));
doc.add(newStringField("id", "1", Field.Store.YES));
w.addDocument(doc);
IndexReader r1 = w.getReader();
DirectoryReader r1 = w.getReader();
w.close();
Directory d2 = newDirectory();
@ -1191,12 +1188,12 @@ public class TestAddIndexes extends LuceneTestCase {
doc.add(newStringField("f2", "doc2 field2", Field.Store.YES));
doc.add(newStringField("id", "2", Field.Store.YES));
w.addDocument(doc);
IndexReader r2 = w.getReader();
DirectoryReader r2 = w.getReader();
w.close();
Directory d3 = newDirectory();
w = new RandomIndexWriter(random(), d3);
w.addIndexes(r1, r2);
TestUtil.addIndexesSlowly(w.w, r1, r2);
r1.close();
d1.close();
r2.close();
@ -1220,8 +1217,7 @@ public class TestAddIndexes extends LuceneTestCase {
public void testAddEmpty() throws Exception {
Directory d1 = newDirectory();
RandomIndexWriter w = new RandomIndexWriter(random(), d1);
MultiReader empty = new MultiReader();
w.addIndexes(empty);
w.addIndexes(new LeafReader[0]);
w.close();
DirectoryReader dr = DirectoryReader.open(d1);
for (LeafReaderContext ctx : dr.leaves()) {
@ -1239,7 +1235,7 @@ public class TestAddIndexes extends LuceneTestCase {
Directory src = newDirectory(), dest = newDirectory();
RandomIndexWriter w = new RandomIndexWriter(random(), src);
w.addDocument(new Document());
IndexReader allDeletedReader = new AllDeletedFilterReader(w.getReader().leaves().get(0).reader());
LeafReader allDeletedReader = new AllDeletedFilterReader(w.getReader().leaves().get(0).reader());
w.close();
w = new RandomIndexWriter(random(), dest);

View File

@ -1106,7 +1106,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
writer.addIndexes(dir1);
} else {
DirectoryReader reader = DirectoryReader.open(dir1);
writer.addIndexes(reader);
TestUtil.addIndexesSlowly(writer, reader);
reader.close();
}
writer.close();

View File

@ -38,6 +38,7 @@ import org.apache.lucene.store.Directory;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
/**
*
@ -612,9 +613,9 @@ public class TestDocValuesIndexing extends LuceneTestCase {
// expected
}
IndexReader r = DirectoryReader.open(dir2);
DirectoryReader r = DirectoryReader.open(dir2);
try {
w.addIndexes(new IndexReader[] {r});
TestUtil.addIndexesSlowly(w, r);
fail("didn't hit expected exception");
} catch (IllegalArgumentException iae) {
// expected
@ -781,14 +782,14 @@ public class TestDocValuesIndexing extends LuceneTestCase {
doc = new Document();
doc.add(new SortedDocValuesField("dv", new BytesRef("foo")));
writer.addDocument(doc);
IndexReader[] readers = new IndexReader[] {DirectoryReader.open(dir)};
DirectoryReader reader = DirectoryReader.open(dir);
try {
writer.addIndexes(readers);
TestUtil.addIndexesSlowly(writer, reader);
fail("did not hit exception");
} catch (IllegalArgumentException iae) {
// expected
}
readers[0].close();
reader.close();
writer.close();
dir.close();
@ -833,9 +834,9 @@ public class TestDocValuesIndexing extends LuceneTestCase {
Directory dir2 = newDirectory();
conf = newIndexWriterConfig(new MockAnalyzer(random()));
writer = new IndexWriter(dir2, conf);
IndexReader[] readers = new IndexReader[] {DirectoryReader.open(dir)};
writer.addIndexes(readers);
readers[0].close();
DirectoryReader reader = DirectoryReader.open(dir);
TestUtil.addIndexesSlowly(writer, reader);
reader.close();
doc = new Document();
doc.add(new SortedDocValuesField("dv", new BytesRef("foo")));
try {

View File

@ -138,11 +138,11 @@ public class TestFilterLeafReader extends LuceneTestCase {
((BaseDirectoryWrapper) target).setCrossCheckTermVectorsOnClose(false);
writer = new IndexWriter(target, newIndexWriterConfig(new MockAnalyzer(random())));
IndexReader reader = new TestReader(DirectoryReader.open(directory));
try (LeafReader reader = new TestReader(DirectoryReader.open(directory))) {
writer.addIndexes(reader);
}
writer.close();
reader.close();
reader = DirectoryReader.open(target);
IndexReader reader = DirectoryReader.open(target);
TermsEnum terms = MultiFields.getTerms(reader, "default").iterator(null);
while (terms.next() != null) {

View File

@ -874,8 +874,8 @@ public class TestIndexWriter extends LuceneTestCase {
writer2.addDocument(doc);
writer2.close();
IndexReader r1 = DirectoryReader.open(dir2);
writer.addIndexes(r1, r1);
DirectoryReader r1 = DirectoryReader.open(dir2);
TestUtil.addIndexesSlowly(writer, r1, r1);
writer.close();
IndexReader r3 = DirectoryReader.open(dir);

View File

@ -30,7 +30,9 @@ import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.TimeUnits;
import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite;
@SuppressCodecs({ "SimpleText", "Memory", "Direct" })
@ -275,9 +277,9 @@ public class TestIndexWriterMaxDocs extends LuceneTestCase {
// expected
}
assertEquals(1, w2.maxDoc());
IndexReader ir = DirectoryReader.open(dir);
DirectoryReader ir = DirectoryReader.open(dir);
try {
w2.addIndexes(new IndexReader[] {ir});
TestUtil.addIndexesSlowly(w2, ir);
fail("didn't hit exception");
} catch (IllegalStateException ise) {
// expected

View File

@ -323,12 +323,12 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
}
writer.forceMerge(1);
} else if (1 == method) {
IndexReader readers[] = new IndexReader[dirs.length];
DirectoryReader readers[] = new DirectoryReader[dirs.length];
for(int i=0;i<dirs.length;i++) {
readers[i] = DirectoryReader.open(dirs[i]);
}
try {
writer.addIndexes(readers);
TestUtil.addIndexesSlowly(writer, readers);
} finally {
for(int i=0;i<dirs.length;i++) {
readers[i].close();

View File

@ -39,7 +39,7 @@ public class TestIndexWriterOutOfFileDescriptors extends LuceneTestCase {
dir.setRandomIOExceptionRateOnOpen(rate);
int iters = atLeast(20);
LineFileDocs docs = new LineFileDocs(random());
IndexReader r = null;
DirectoryReader r = null;
DirectoryReader r2 = null;
boolean any = false;
MockDirectoryWrapper dirCopy = null;
@ -68,9 +68,9 @@ public class TestIndexWriterOutOfFileDescriptors extends LuceneTestCase {
if (r != null && random().nextInt(5) == 3) {
if (random().nextBoolean()) {
if (VERBOSE) {
System.out.println("TEST: addIndexes IR[]");
System.out.println("TEST: addIndexes LR[]");
}
w.addIndexes(new IndexReader[] {r});
TestUtil.addIndexesSlowly(w, r);
} else {
if (VERBOSE) {
System.out.println("TEST: addIndexes Directory[]");

View File

@ -399,7 +399,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
final Thread[] threads = new Thread[numThreads];
IndexWriter mainWriter;
final List<Throwable> failures = new ArrayList<>();
IndexReader[] readers;
DirectoryReader[] readers;
boolean didClose = false;
AtomicInteger count = new AtomicInteger(0);
AtomicInteger numaddIndexes = new AtomicInteger(0);
@ -418,7 +418,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
writer.close();
readers = new IndexReader[numDirs];
readers = new DirectoryReader[numDirs];
for (int i = 0; i < numDirs; i++)
readers[i] = DirectoryReader.open(addDir);
}
@ -498,7 +498,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
numaddIndexes.incrementAndGet();
break;
case 2:
mainWriter.addIndexes(readers);
TestUtil.addIndexesSlowly(mainWriter, readers);
break;
case 3:
mainWriter.commit();

View File

@ -1196,7 +1196,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
writer.addIndexes(dir1);
} else {
DirectoryReader reader = DirectoryReader.open(dir1);
writer.addIndexes(reader);
TestUtil.addIndexesSlowly(writer, reader);
reader.close();
}
writer.close();

View File

@ -18,6 +18,8 @@ package org.apache.lucene.index;
*/
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
@ -65,11 +67,11 @@ public class TestParallelReaderEmptyIndex extends LuceneTestCase {
DirectoryReader.open(rd2));
// When unpatched, Lucene crashes here with a NoSuchElementException (caused by ParallelTermEnum)
iwOut.addIndexes(cpr);
iwOut.forceMerge(1);
// 2nd try with a readerless parallel reader
iwOut.addIndexes(new ParallelCompositeReader());
List<LeafReader> leaves = new ArrayList<>();
for (LeafReaderContext leaf : cpr.leaves()) {
leaves.add(leaf.reader());
}
iwOut.addIndexes(leaves.toArray(new LeafReader[0]));
iwOut.forceMerge(1);
iwOut.close();

View File

@ -1,4 +1,4 @@
package org.apache.lucene.search;
package org.apache.lucene.index;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -33,6 +33,7 @@ import org.apache.lucene.store.Directory;
import org.apache.lucene.util.English;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
import org.junit.AfterClass;
import org.junit.BeforeClass;
@ -154,8 +155,8 @@ public class TestTermVectors extends LuceneTestCase {
IndexWriter writer = createWriter(target);
for (Directory dir : input) {
IndexReader r = DirectoryReader.open(dir);
writer.addIndexes(r);
DirectoryReader r = DirectoryReader.open(dir);
TestUtil.addIndexesSlowly(writer, r);
r.close();
}
writer.forceMerge(1);

View File

@ -56,7 +56,7 @@ public abstract class TaxonomyMergeUtils {
for (int i = 0; i < numReaders; i++) {
wrappedLeaves[i] = new OrdinalMappingLeafReader(leaves.get(i).reader(), ordinalMap, srcConfig);
}
destIndexWriter.addIndexes(new MultiReader(wrappedLeaves));
destIndexWriter.addIndexes(wrappedLeaves);
// commit changes to taxonomy and index respectively.
destTaxoWriter.commit();

View File

@ -33,7 +33,7 @@ import org.apache.lucene.util.Version;
/**
* This tool splits input index into multiple equal parts. The method employed
* here uses {@link IndexWriter#addIndexes(IndexReader[])} where the input data
* here uses {@link IndexWriter#addIndexes(LeafReader[])} where the input data
* comes from the input index with artificially applied deletes to the document
* id-s that fall outside the selected partition.
* <p>Note 1: Deletes are only applied to a buffered list of deleted docs and
@ -102,7 +102,7 @@ public class MultiPassIndexSplitter {
System.err.println("Writing part " + (i + 1) + " ...");
// pass the subreaders directly, as our wrapper's numDocs/hasDeletetions are not up-to-date
final List<? extends FakeDeleteLeafIndexReader> sr = input.getSequentialSubReaders();
w.addIndexes(sr.toArray(new IndexReader[sr.size()])); // TODO: maybe take List<IR> here?
w.addIndexes(sr.toArray(new LeafReader[sr.size()])); // TODO: maybe take List<IR> here?
w.close();
}
System.err.println("Done.");

View File

@ -103,7 +103,7 @@ public class PKIndexSplitter {
final IndexWriter w = new IndexWriter(target, config);
try {
final List<LeafReaderContext> leaves = reader.leaves();
final IndexReader[] subReaders = new IndexReader[leaves.size()];
final LeafReader[] subReaders = new LeafReader[leaves.size()];
int i = 0;
for (final LeafReaderContext ctx : leaves) {
subReaders[i++] = new DocumentFilteredLeafIndexReader(ctx, preserveFilter, negateFilter);

View File

@ -71,7 +71,7 @@ public class IndexSortingTest extends SorterTestBase {
Directory target = newDirectory();
IndexWriter writer = new IndexWriter(target, newIndexWriterConfig(null));
IndexReader reader = SortingLeafReader.wrap(unsortedReader, sorter);
LeafReader reader = SortingLeafReader.wrap(unsortedReader, sorter);
writer.addIndexes(reader);
writer.close();
// NOTE: also closes unsortedReader

View File

@ -207,7 +207,7 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase {
w.forceMerge(1);
w.commit();
w.close();
IndexReader reader = DirectoryReader.open(dir);
DirectoryReader reader = DirectoryReader.open(dir);
Directory dir2 = newDirectory();
if (dir2 instanceof MockDirectoryWrapper) {
@ -219,7 +219,8 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase {
mp.setNoCFSRatio(0);
cfg = new IndexWriterConfig(new MockAnalyzer(random())).setUseCompoundFile(false).setMergePolicy(mp);
w = new IndexWriter(dir2, cfg);
w.addIndexes(reader);
TestUtil.addIndexesSlowly(w, reader);
w.commit();
w.close();

View File

@ -647,7 +647,7 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat
Directory dir2 = newDirectory();
w = new RandomIndexWriter(random(), dir2);
w.addIndexes(reader);
TestUtil.addIndexesSlowly(w.w, reader);
reader.close();
dir.close();
@ -787,7 +787,7 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat
}
dirs[i] = newDirectory();
IndexWriter adder = new IndexWriter(dirs[i], new IndexWriterConfig(null));
adder.addIndexes(reader);
TestUtil.addIndexesSlowly(adder, reader);
adder.commit();
adder.close();

View File

@ -222,7 +222,7 @@ public class RandomIndexWriter implements Closeable {
w.addIndexes(dirs);
}
public void addIndexes(IndexReader... readers) throws IOException {
public void addIndexes(LeafReader... readers) throws IOException {
LuceneTestCase.maybeChangeLiveIndexWriterConfig(r, w.getConfig());
w.addIndexes(readers);
}

View File

@ -29,6 +29,7 @@ import java.nio.CharBuffer;
import java.nio.file.FileSystem;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
@ -68,6 +69,7 @@ import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.SortedDocValuesField;
import org.apache.lucene.index.CheckIndex;
import org.apache.lucene.index.ConcurrentMergeScheduler;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.DocsAndPositionsEnum;
import org.apache.lucene.index.DocsEnum;
@ -873,6 +875,16 @@ public final class TestUtil {
}
}
public static void addIndexesSlowly(IndexWriter writer, DirectoryReader... readers) throws IOException {
List<LeafReader> leaves = new ArrayList<>();
for (DirectoryReader reader : readers) {
for (LeafReaderContext context : reader.leaves()) {
leaves.add(context.reader());
}
}
writer.addIndexes(leaves.toArray(new LeafReader[leaves.size()]));
}
/** just tries to configure things to keep the open file
* count lowish */
public static void reduceOpenFiles(IndexWriter w) {

View File

@ -24,6 +24,8 @@ import org.apache.lucene.document.Document;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.Term;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.search.BooleanClause;
@ -467,9 +469,15 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
List<DirectoryReader> readers = cmd.readers;
if (readers != null && readers.size() > 0) {
List<LeafReader> leaves = new ArrayList<>();
for (DirectoryReader reader : readers) {
for (LeafReaderContext leaf : reader.leaves()) {
leaves.add(leaf.reader());
}
}
RefCounted<IndexWriter> iw = solrCoreState.getIndexWriter(core);
try {
iw.get().addIndexes(readers.toArray(new IndexReader[readers.size()]));
iw.get().addIndexes(leaves.toArray(new LeafReader[leaves.size()]));
} finally {
iw.decref();
}

View File

@ -129,7 +129,7 @@ public class SolrIndexSplitter {
// This removes deletions but optimize might still be needed because sub-shards will have the same number of segments as the parent shard.
for (int segmentNumber = 0; segmentNumber<leaves.size(); segmentNumber++) {
log.info("SolrIndexSplitter: partition #" + partitionNumber + " partitionCount=" + numPieces + (ranges != null ? " range=" + ranges.get(partitionNumber) : "") + " segment #"+segmentNumber + " segmentCount=" + leaves.size());
IndexReader subReader = new LiveDocsReader( leaves.get(segmentNumber), segmentDocSets.get(segmentNumber)[partitionNumber] );
LeafReader subReader = new LiveDocsReader( leaves.get(segmentNumber), segmentDocSets.get(segmentNumber)[partitionNumber] );
iw.addIndexes(subReader);
}
success = true;