mirror of https://github.com/apache/lucene.git
LUCENE-6158: IW.addIndexes(IndexReader...) -> IW.addIndexes(LeafReader...)
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1649989 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
bba930a5b3
commit
b10d20a122
|
@ -347,6 +347,9 @@ API Changes
|
||||||
be set at the constructor for non-contextual lookup.
|
be set at the constructor for non-contextual lookup.
|
||||||
(Boon Low, Tomás Fernández Löbbe)
|
(Boon Low, Tomás Fernández Löbbe)
|
||||||
|
|
||||||
|
* LUCENE-6158: IndexWriter.addIndexes(IndexReader...) changed to
|
||||||
|
addIndexes(LeafReader...) (Robert Muir)
|
||||||
|
|
||||||
Bug Fixes
|
Bug Fixes
|
||||||
|
|
||||||
* LUCENE-5650: Enforce read-only access to any path outside the temporary
|
* LUCENE-5650: Enforce read-only access to any path outside the temporary
|
||||||
|
|
|
@ -558,11 +558,11 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
||||||
|
|
||||||
public void testAddOldIndexesReader() throws IOException {
|
public void testAddOldIndexesReader() throws IOException {
|
||||||
for (String name : oldNames) {
|
for (String name : oldNames) {
|
||||||
IndexReader reader = DirectoryReader.open(oldIndexDirs.get(name));
|
DirectoryReader reader = DirectoryReader.open(oldIndexDirs.get(name));
|
||||||
|
|
||||||
Directory targetDir = newDirectory();
|
Directory targetDir = newDirectory();
|
||||||
IndexWriter w = new IndexWriter(targetDir, newIndexWriterConfig(new MockAnalyzer(random())));
|
IndexWriter w = new IndexWriter(targetDir, newIndexWriterConfig(new MockAnalyzer(random())));
|
||||||
w.addIndexes(reader);
|
TestUtil.addIndexesSlowly(w, reader);
|
||||||
w.close();
|
w.close();
|
||||||
reader.close();
|
reader.close();
|
||||||
|
|
||||||
|
|
|
@ -18,18 +18,21 @@ package org.apache.lucene.benchmark.byTask.tasks;
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import java.nio.file.Paths;
|
import java.nio.file.Paths;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.lucene.benchmark.byTask.PerfRunData;
|
import org.apache.lucene.benchmark.byTask.PerfRunData;
|
||||||
import org.apache.lucene.index.DirectoryReader;
|
import org.apache.lucene.index.DirectoryReader;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.IndexWriter;
|
import org.apache.lucene.index.IndexWriter;
|
||||||
|
import org.apache.lucene.index.LeafReader;
|
||||||
|
import org.apache.lucene.index.LeafReaderContext;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.FSDirectory;
|
import org.apache.lucene.store.FSDirectory;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Adds an input index to an existing index, using
|
* Adds an input index to an existing index, using
|
||||||
* {@link IndexWriter#addIndexes(Directory...)} or
|
* {@link IndexWriter#addIndexes(Directory...)} or
|
||||||
* {@link IndexWriter#addIndexes(IndexReader...)}. The location of the input
|
* {@link IndexWriter#addIndexes(LeafReader...)}. The location of the input
|
||||||
* index is specified by the parameter {@link #ADDINDEXES_INPUT_DIR} and is
|
* index is specified by the parameter {@link #ADDINDEXES_INPUT_DIR} and is
|
||||||
* assumed to be a directory on the file system.
|
* assumed to be a directory on the file system.
|
||||||
* <p>
|
* <p>
|
||||||
|
@ -63,11 +66,13 @@ public class AddIndexesTask extends PerfTask {
|
||||||
if (useAddIndexesDir) {
|
if (useAddIndexesDir) {
|
||||||
writer.addIndexes(inputDir);
|
writer.addIndexes(inputDir);
|
||||||
} else {
|
} else {
|
||||||
IndexReader r = DirectoryReader.open(inputDir);
|
try (IndexReader r = DirectoryReader.open(inputDir)) {
|
||||||
try {
|
LeafReader leaves[] = new LeafReader[r.leaves().size()];
|
||||||
writer.addIndexes(r);
|
int i = 0;
|
||||||
} finally {
|
for (LeafReaderContext leaf : r.leaves()) {
|
||||||
r.close();
|
leaves[i++] = leaf.reader();
|
||||||
|
}
|
||||||
|
writer.addIndexes(leaves);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return 1;
|
return 1;
|
||||||
|
@ -79,7 +84,7 @@ public class AddIndexesTask extends PerfTask {
|
||||||
* @param params
|
* @param params
|
||||||
* {@code useAddIndexesDir=true} for using
|
* {@code useAddIndexesDir=true} for using
|
||||||
* {@link IndexWriter#addIndexes(Directory...)} or {@code false} for
|
* {@link IndexWriter#addIndexes(Directory...)} or {@code false} for
|
||||||
* using {@link IndexWriter#addIndexes(IndexReader...)}. Defaults to
|
* using {@link IndexWriter#addIndexes(LeafReader...)}. Defaults to
|
||||||
* {@code true}.
|
* {@code true}.
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -235,8 +235,8 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
|
||||||
public static final String SOURCE_MERGE = "merge";
|
public static final String SOURCE_MERGE = "merge";
|
||||||
/** Source of a segment which results from a flush. */
|
/** Source of a segment which results from a flush. */
|
||||||
public static final String SOURCE_FLUSH = "flush";
|
public static final String SOURCE_FLUSH = "flush";
|
||||||
/** Source of a segment which results from a call to {@link #addIndexes(IndexReader...)}. */
|
/** Source of a segment which results from a call to {@link #addIndexes(LeafReader...)}. */
|
||||||
public static final String SOURCE_ADDINDEXES_READERS = "addIndexes(IndexReader...)";
|
public static final String SOURCE_ADDINDEXES_READERS = "addIndexes(LeafReader...)";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Absolute hard maximum length for a term, in bytes once
|
* Absolute hard maximum length for a term, in bytes once
|
||||||
|
@ -2099,7 +2099,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
|
||||||
*
|
*
|
||||||
* <p>
|
* <p>
|
||||||
* NOTE: this method will forcefully abort all merges in progress. If other
|
* NOTE: this method will forcefully abort all merges in progress. If other
|
||||||
* threads are running {@link #forceMerge}, {@link #addIndexes(IndexReader[])}
|
* threads are running {@link #forceMerge}, {@link #addIndexes(LeafReader[])}
|
||||||
* or {@link #forceMergeDeletes} methods, they may receive
|
* or {@link #forceMergeDeletes} methods, they may receive
|
||||||
* {@link MergePolicy.MergeAbortedException}s.
|
* {@link MergePolicy.MergeAbortedException}s.
|
||||||
*/
|
*/
|
||||||
|
@ -2497,7 +2497,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
|
||||||
* index.
|
* index.
|
||||||
*
|
*
|
||||||
* <p>
|
* <p>
|
||||||
* <b>NOTE:</b> this method merges all given {@link IndexReader}s in one
|
* <b>NOTE:</b> this method merges all given {@link LeafReader}s in one
|
||||||
* merge. If you intend to merge a large number of readers, it may be better
|
* merge. If you intend to merge a large number of readers, it may be better
|
||||||
* to call this method multiple times, each time with a small set of readers.
|
* to call this method multiple times, each time with a small set of readers.
|
||||||
* In principle, if you use a merge policy with a {@code mergeFactor} or
|
* In principle, if you use a merge policy with a {@code mergeFactor} or
|
||||||
|
@ -2509,23 +2509,19 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
* if there is a low-level IO error
|
* if there is a low-level IO error
|
||||||
*/
|
*/
|
||||||
public void addIndexes(IndexReader... readers) throws IOException {
|
public void addIndexes(LeafReader... readers) throws IOException {
|
||||||
ensureOpen();
|
ensureOpen();
|
||||||
int numDocs = 0;
|
int numDocs = 0;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if (infoStream.isEnabled("IW")) {
|
if (infoStream.isEnabled("IW")) {
|
||||||
infoStream.message("IW", "flush at addIndexes(IndexReader...)");
|
infoStream.message("IW", "flush at addIndexes(LeafReader...)");
|
||||||
}
|
}
|
||||||
flush(false, true);
|
flush(false, true);
|
||||||
|
|
||||||
String mergedName = newSegmentName();
|
String mergedName = newSegmentName();
|
||||||
final List<LeafReader> mergeReaders = new ArrayList<>();
|
for (LeafReader leaf : readers) {
|
||||||
for (IndexReader indexReader : readers) {
|
numDocs += leaf.numDocs();
|
||||||
numDocs += indexReader.numDocs();
|
|
||||||
for (LeafReaderContext ctx : indexReader.leaves()) {
|
|
||||||
mergeReaders.add(ctx.reader());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Make sure adding the new documents to this index won't
|
// Make sure adding the new documents to this index won't
|
||||||
|
@ -2541,7 +2537,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
|
||||||
SegmentInfo info = new SegmentInfo(directory, Version.LATEST, mergedName, -1,
|
SegmentInfo info = new SegmentInfo(directory, Version.LATEST, mergedName, -1,
|
||||||
false, codec, null, StringHelper.randomId(), new HashMap<>());
|
false, codec, null, StringHelper.randomId(), new HashMap<>());
|
||||||
|
|
||||||
SegmentMerger merger = new SegmentMerger(mergeReaders, info, infoStream, trackingDir,
|
SegmentMerger merger = new SegmentMerger(Arrays.asList(readers), info, infoStream, trackingDir,
|
||||||
globalFieldNumberMap,
|
globalFieldNumberMap,
|
||||||
context);
|
context);
|
||||||
|
|
||||||
|
|
|
@ -165,9 +165,9 @@ public class TrackingIndexWriter {
|
||||||
return indexingGen.get();
|
return indexingGen.get();
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Calls {@link IndexWriter#addIndexes(IndexReader...)}
|
/** Calls {@link IndexWriter#addIndexes(LeafReader...)}
|
||||||
* and returns the generation that reflects this change. */
|
* and returns the generation that reflects this change. */
|
||||||
public long addIndexes(IndexReader... readers) throws IOException {
|
public long addIndexes(LeafReader... readers) throws IOException {
|
||||||
writer.addIndexes(readers);
|
writer.addIndexes(readers);
|
||||||
// Return gen as of when indexing finished:
|
// Return gen as of when indexing finished:
|
||||||
return indexingGen.get();
|
return indexingGen.get();
|
||||||
|
|
|
@ -84,31 +84,29 @@ public class Test2BPostingsBytes extends LuceneTestCase {
|
||||||
w.close();
|
w.close();
|
||||||
|
|
||||||
DirectoryReader oneThousand = DirectoryReader.open(dir);
|
DirectoryReader oneThousand = DirectoryReader.open(dir);
|
||||||
IndexReader subReaders[] = new IndexReader[1000];
|
DirectoryReader subReaders[] = new DirectoryReader[1000];
|
||||||
Arrays.fill(subReaders, oneThousand);
|
Arrays.fill(subReaders, oneThousand);
|
||||||
MultiReader mr = new MultiReader(subReaders);
|
|
||||||
BaseDirectoryWrapper dir2 = newFSDirectory(createTempDir("2BPostingsBytes2"));
|
BaseDirectoryWrapper dir2 = newFSDirectory(createTempDir("2BPostingsBytes2"));
|
||||||
if (dir2 instanceof MockDirectoryWrapper) {
|
if (dir2 instanceof MockDirectoryWrapper) {
|
||||||
((MockDirectoryWrapper)dir2).setThrottling(MockDirectoryWrapper.Throttling.NEVER);
|
((MockDirectoryWrapper)dir2).setThrottling(MockDirectoryWrapper.Throttling.NEVER);
|
||||||
}
|
}
|
||||||
IndexWriter w2 = new IndexWriter(dir2,
|
IndexWriter w2 = new IndexWriter(dir2,
|
||||||
new IndexWriterConfig(null));
|
new IndexWriterConfig(null));
|
||||||
w2.addIndexes(mr);
|
TestUtil.addIndexesSlowly(w2, subReaders);
|
||||||
w2.forceMerge(1);
|
w2.forceMerge(1);
|
||||||
w2.close();
|
w2.close();
|
||||||
oneThousand.close();
|
oneThousand.close();
|
||||||
|
|
||||||
DirectoryReader oneMillion = DirectoryReader.open(dir2);
|
DirectoryReader oneMillion = DirectoryReader.open(dir2);
|
||||||
subReaders = new IndexReader[2000];
|
subReaders = new DirectoryReader[2000];
|
||||||
Arrays.fill(subReaders, oneMillion);
|
Arrays.fill(subReaders, oneMillion);
|
||||||
mr = new MultiReader(subReaders);
|
|
||||||
BaseDirectoryWrapper dir3 = newFSDirectory(createTempDir("2BPostingsBytes3"));
|
BaseDirectoryWrapper dir3 = newFSDirectory(createTempDir("2BPostingsBytes3"));
|
||||||
if (dir3 instanceof MockDirectoryWrapper) {
|
if (dir3 instanceof MockDirectoryWrapper) {
|
||||||
((MockDirectoryWrapper)dir3).setThrottling(MockDirectoryWrapper.Throttling.NEVER);
|
((MockDirectoryWrapper)dir3).setThrottling(MockDirectoryWrapper.Throttling.NEVER);
|
||||||
}
|
}
|
||||||
IndexWriter w3 = new IndexWriter(dir3,
|
IndexWriter w3 = new IndexWriter(dir3,
|
||||||
new IndexWriterConfig(null));
|
new IndexWriterConfig(null));
|
||||||
w3.addIndexes(mr);
|
TestUtil.addIndexesSlowly(w3, subReaders);
|
||||||
w3.forceMerge(1);
|
w3.forceMerge(1);
|
||||||
w3.close();
|
w3.close();
|
||||||
oneMillion.close();
|
oneMillion.close();
|
||||||
|
|
|
@ -652,7 +652,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
||||||
IndexWriter writer2;
|
IndexWriter writer2;
|
||||||
final List<Throwable> failures = new ArrayList<>();
|
final List<Throwable> failures = new ArrayList<>();
|
||||||
volatile boolean didClose;
|
volatile boolean didClose;
|
||||||
final IndexReader[] readers;
|
final DirectoryReader[] readers;
|
||||||
final int NUM_COPY;
|
final int NUM_COPY;
|
||||||
final static int NUM_THREADS = 5;
|
final static int NUM_THREADS = 5;
|
||||||
final Thread[] threads = new Thread[NUM_THREADS];
|
final Thread[] threads = new Thread[NUM_THREADS];
|
||||||
|
@ -671,7 +671,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
||||||
writer2.commit();
|
writer2.commit();
|
||||||
|
|
||||||
|
|
||||||
readers = new IndexReader[NUM_COPY];
|
readers = new DirectoryReader[NUM_COPY];
|
||||||
for(int i=0;i<NUM_COPY;i++)
|
for(int i=0;i<NUM_COPY;i++)
|
||||||
readers[i] = DirectoryReader.open(dir);
|
readers[i] = DirectoryReader.open(dir);
|
||||||
}
|
}
|
||||||
|
@ -770,9 +770,9 @@ public class TestAddIndexes extends LuceneTestCase {
|
||||||
break;
|
break;
|
||||||
case 2:
|
case 2:
|
||||||
if (VERBOSE) {
|
if (VERBOSE) {
|
||||||
System.out.println(Thread.currentThread().getName() + ": TEST: addIndexes(IndexReader[])");
|
System.out.println(Thread.currentThread().getName() + ": TEST: addIndexes(LeafReader[])");
|
||||||
}
|
}
|
||||||
writer2.addIndexes(readers);
|
TestUtil.addIndexesSlowly(writer2, readers);
|
||||||
break;
|
break;
|
||||||
case 3:
|
case 3:
|
||||||
if (VERBOSE) {
|
if (VERBOSE) {
|
||||||
|
@ -875,9 +875,9 @@ public class TestAddIndexes extends LuceneTestCase {
|
||||||
break;
|
break;
|
||||||
case 2:
|
case 2:
|
||||||
if (VERBOSE) {
|
if (VERBOSE) {
|
||||||
System.out.println("TEST: " + Thread.currentThread().getName() + ": addIndexes(IR[])");
|
System.out.println("TEST: " + Thread.currentThread().getName() + ": addIndexes(LR[])");
|
||||||
}
|
}
|
||||||
writer2.addIndexes(readers);
|
TestUtil.addIndexesSlowly(writer2, readers);
|
||||||
break;
|
break;
|
||||||
case 3:
|
case 3:
|
||||||
if (VERBOSE) {
|
if (VERBOSE) {
|
||||||
|
@ -982,11 +982,8 @@ public class TestAddIndexes extends LuceneTestCase {
|
||||||
|
|
||||||
// Now delete the document
|
// Now delete the document
|
||||||
writer.deleteDocuments(new Term("id", "myid"));
|
writer.deleteDocuments(new Term("id", "myid"));
|
||||||
IndexReader r = DirectoryReader.open(dirs[1]);
|
try (DirectoryReader r = DirectoryReader.open(dirs[1])) {
|
||||||
try {
|
TestUtil.addIndexesSlowly(writer, r);
|
||||||
writer.addIndexes(r);
|
|
||||||
} finally {
|
|
||||||
r.close();
|
|
||||||
}
|
}
|
||||||
writer.commit();
|
writer.commit();
|
||||||
assertEquals("Documents from the incoming index should not have been deleted", 1, writer.numDocs());
|
assertEquals("Documents from the incoming index should not have been deleted", 1, writer.numDocs());
|
||||||
|
@ -1101,7 +1098,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
||||||
w.close();
|
w.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
IndexReader[] readers = new IndexReader[] { DirectoryReader.open(dirs[0]), DirectoryReader.open(dirs[1]) };
|
DirectoryReader[] readers = new DirectoryReader[] { DirectoryReader.open(dirs[0]), DirectoryReader.open(dirs[1]) };
|
||||||
|
|
||||||
MockDirectoryWrapper dir = new MockDirectoryWrapper(random(), new RAMDirectory());
|
MockDirectoryWrapper dir = new MockDirectoryWrapper(random(), new RAMDirectory());
|
||||||
dir.setEnableVirusScanner(false); // we check for specific list of files
|
dir.setEnableVirusScanner(false); // we check for specific list of files
|
||||||
|
@ -1111,7 +1108,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
||||||
lmp.setNoCFSRatio(1.0);
|
lmp.setNoCFSRatio(1.0);
|
||||||
lmp.setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY);
|
lmp.setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY);
|
||||||
IndexWriter w3 = new IndexWriter(dir, conf);
|
IndexWriter w3 = new IndexWriter(dir, conf);
|
||||||
w3.addIndexes(readers);
|
TestUtil.addIndexesSlowly(w3, readers);
|
||||||
w3.close();
|
w3.close();
|
||||||
// we should now see segments_X,
|
// we should now see segments_X,
|
||||||
// _Y.cfs,_Y.cfe, _Z.si
|
// _Y.cfs,_Y.cfe, _Z.si
|
||||||
|
@ -1182,7 +1179,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
||||||
doc.add(newStringField("f1", "doc1 field1", Field.Store.YES));
|
doc.add(newStringField("f1", "doc1 field1", Field.Store.YES));
|
||||||
doc.add(newStringField("id", "1", Field.Store.YES));
|
doc.add(newStringField("id", "1", Field.Store.YES));
|
||||||
w.addDocument(doc);
|
w.addDocument(doc);
|
||||||
IndexReader r1 = w.getReader();
|
DirectoryReader r1 = w.getReader();
|
||||||
w.close();
|
w.close();
|
||||||
|
|
||||||
Directory d2 = newDirectory();
|
Directory d2 = newDirectory();
|
||||||
|
@ -1191,12 +1188,12 @@ public class TestAddIndexes extends LuceneTestCase {
|
||||||
doc.add(newStringField("f2", "doc2 field2", Field.Store.YES));
|
doc.add(newStringField("f2", "doc2 field2", Field.Store.YES));
|
||||||
doc.add(newStringField("id", "2", Field.Store.YES));
|
doc.add(newStringField("id", "2", Field.Store.YES));
|
||||||
w.addDocument(doc);
|
w.addDocument(doc);
|
||||||
IndexReader r2 = w.getReader();
|
DirectoryReader r2 = w.getReader();
|
||||||
w.close();
|
w.close();
|
||||||
|
|
||||||
Directory d3 = newDirectory();
|
Directory d3 = newDirectory();
|
||||||
w = new RandomIndexWriter(random(), d3);
|
w = new RandomIndexWriter(random(), d3);
|
||||||
w.addIndexes(r1, r2);
|
TestUtil.addIndexesSlowly(w.w, r1, r2);
|
||||||
r1.close();
|
r1.close();
|
||||||
d1.close();
|
d1.close();
|
||||||
r2.close();
|
r2.close();
|
||||||
|
@ -1220,8 +1217,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
||||||
public void testAddEmpty() throws Exception {
|
public void testAddEmpty() throws Exception {
|
||||||
Directory d1 = newDirectory();
|
Directory d1 = newDirectory();
|
||||||
RandomIndexWriter w = new RandomIndexWriter(random(), d1);
|
RandomIndexWriter w = new RandomIndexWriter(random(), d1);
|
||||||
MultiReader empty = new MultiReader();
|
w.addIndexes(new LeafReader[0]);
|
||||||
w.addIndexes(empty);
|
|
||||||
w.close();
|
w.close();
|
||||||
DirectoryReader dr = DirectoryReader.open(d1);
|
DirectoryReader dr = DirectoryReader.open(d1);
|
||||||
for (LeafReaderContext ctx : dr.leaves()) {
|
for (LeafReaderContext ctx : dr.leaves()) {
|
||||||
|
@ -1239,7 +1235,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
||||||
Directory src = newDirectory(), dest = newDirectory();
|
Directory src = newDirectory(), dest = newDirectory();
|
||||||
RandomIndexWriter w = new RandomIndexWriter(random(), src);
|
RandomIndexWriter w = new RandomIndexWriter(random(), src);
|
||||||
w.addDocument(new Document());
|
w.addDocument(new Document());
|
||||||
IndexReader allDeletedReader = new AllDeletedFilterReader(w.getReader().leaves().get(0).reader());
|
LeafReader allDeletedReader = new AllDeletedFilterReader(w.getReader().leaves().get(0).reader());
|
||||||
w.close();
|
w.close();
|
||||||
|
|
||||||
w = new RandomIndexWriter(random(), dest);
|
w = new RandomIndexWriter(random(), dest);
|
||||||
|
|
|
@ -1106,7 +1106,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
|
||||||
writer.addIndexes(dir1);
|
writer.addIndexes(dir1);
|
||||||
} else {
|
} else {
|
||||||
DirectoryReader reader = DirectoryReader.open(dir1);
|
DirectoryReader reader = DirectoryReader.open(dir1);
|
||||||
writer.addIndexes(reader);
|
TestUtil.addIndexesSlowly(writer, reader);
|
||||||
reader.close();
|
reader.close();
|
||||||
}
|
}
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
|
@ -38,6 +38,7 @@ import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.util.Bits;
|
import org.apache.lucene.util.Bits;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
import org.apache.lucene.util.TestUtil;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
|
@ -612,9 +613,9 @@ public class TestDocValuesIndexing extends LuceneTestCase {
|
||||||
// expected
|
// expected
|
||||||
}
|
}
|
||||||
|
|
||||||
IndexReader r = DirectoryReader.open(dir2);
|
DirectoryReader r = DirectoryReader.open(dir2);
|
||||||
try {
|
try {
|
||||||
w.addIndexes(new IndexReader[] {r});
|
TestUtil.addIndexesSlowly(w, r);
|
||||||
fail("didn't hit expected exception");
|
fail("didn't hit expected exception");
|
||||||
} catch (IllegalArgumentException iae) {
|
} catch (IllegalArgumentException iae) {
|
||||||
// expected
|
// expected
|
||||||
|
@ -781,14 +782,14 @@ public class TestDocValuesIndexing extends LuceneTestCase {
|
||||||
doc = new Document();
|
doc = new Document();
|
||||||
doc.add(new SortedDocValuesField("dv", new BytesRef("foo")));
|
doc.add(new SortedDocValuesField("dv", new BytesRef("foo")));
|
||||||
writer.addDocument(doc);
|
writer.addDocument(doc);
|
||||||
IndexReader[] readers = new IndexReader[] {DirectoryReader.open(dir)};
|
DirectoryReader reader = DirectoryReader.open(dir);
|
||||||
try {
|
try {
|
||||||
writer.addIndexes(readers);
|
TestUtil.addIndexesSlowly(writer, reader);
|
||||||
fail("did not hit exception");
|
fail("did not hit exception");
|
||||||
} catch (IllegalArgumentException iae) {
|
} catch (IllegalArgumentException iae) {
|
||||||
// expected
|
// expected
|
||||||
}
|
}
|
||||||
readers[0].close();
|
reader.close();
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
dir.close();
|
dir.close();
|
||||||
|
@ -833,9 +834,9 @@ public class TestDocValuesIndexing extends LuceneTestCase {
|
||||||
Directory dir2 = newDirectory();
|
Directory dir2 = newDirectory();
|
||||||
conf = newIndexWriterConfig(new MockAnalyzer(random()));
|
conf = newIndexWriterConfig(new MockAnalyzer(random()));
|
||||||
writer = new IndexWriter(dir2, conf);
|
writer = new IndexWriter(dir2, conf);
|
||||||
IndexReader[] readers = new IndexReader[] {DirectoryReader.open(dir)};
|
DirectoryReader reader = DirectoryReader.open(dir);
|
||||||
writer.addIndexes(readers);
|
TestUtil.addIndexesSlowly(writer, reader);
|
||||||
readers[0].close();
|
reader.close();
|
||||||
doc = new Document();
|
doc = new Document();
|
||||||
doc.add(new SortedDocValuesField("dv", new BytesRef("foo")));
|
doc.add(new SortedDocValuesField("dv", new BytesRef("foo")));
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -138,11 +138,11 @@ public class TestFilterLeafReader extends LuceneTestCase {
|
||||||
((BaseDirectoryWrapper) target).setCrossCheckTermVectorsOnClose(false);
|
((BaseDirectoryWrapper) target).setCrossCheckTermVectorsOnClose(false);
|
||||||
|
|
||||||
writer = new IndexWriter(target, newIndexWriterConfig(new MockAnalyzer(random())));
|
writer = new IndexWriter(target, newIndexWriterConfig(new MockAnalyzer(random())));
|
||||||
IndexReader reader = new TestReader(DirectoryReader.open(directory));
|
try (LeafReader reader = new TestReader(DirectoryReader.open(directory))) {
|
||||||
writer.addIndexes(reader);
|
writer.addIndexes(reader);
|
||||||
|
}
|
||||||
writer.close();
|
writer.close();
|
||||||
reader.close();
|
IndexReader reader = DirectoryReader.open(target);
|
||||||
reader = DirectoryReader.open(target);
|
|
||||||
|
|
||||||
TermsEnum terms = MultiFields.getTerms(reader, "default").iterator(null);
|
TermsEnum terms = MultiFields.getTerms(reader, "default").iterator(null);
|
||||||
while (terms.next() != null) {
|
while (terms.next() != null) {
|
||||||
|
|
|
@ -874,8 +874,8 @@ public class TestIndexWriter extends LuceneTestCase {
|
||||||
writer2.addDocument(doc);
|
writer2.addDocument(doc);
|
||||||
writer2.close();
|
writer2.close();
|
||||||
|
|
||||||
IndexReader r1 = DirectoryReader.open(dir2);
|
DirectoryReader r1 = DirectoryReader.open(dir2);
|
||||||
writer.addIndexes(r1, r1);
|
TestUtil.addIndexesSlowly(writer, r1, r1);
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
IndexReader r3 = DirectoryReader.open(dir);
|
IndexReader r3 = DirectoryReader.open(dir);
|
||||||
|
|
|
@ -30,7 +30,9 @@ import org.apache.lucene.search.TopDocs;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
|
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
import org.apache.lucene.util.TestUtil;
|
||||||
import org.apache.lucene.util.TimeUnits;
|
import org.apache.lucene.util.TimeUnits;
|
||||||
|
|
||||||
import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite;
|
import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite;
|
||||||
|
|
||||||
@SuppressCodecs({ "SimpleText", "Memory", "Direct" })
|
@SuppressCodecs({ "SimpleText", "Memory", "Direct" })
|
||||||
|
@ -275,9 +277,9 @@ public class TestIndexWriterMaxDocs extends LuceneTestCase {
|
||||||
// expected
|
// expected
|
||||||
}
|
}
|
||||||
assertEquals(1, w2.maxDoc());
|
assertEquals(1, w2.maxDoc());
|
||||||
IndexReader ir = DirectoryReader.open(dir);
|
DirectoryReader ir = DirectoryReader.open(dir);
|
||||||
try {
|
try {
|
||||||
w2.addIndexes(new IndexReader[] {ir});
|
TestUtil.addIndexesSlowly(w2, ir);
|
||||||
fail("didn't hit exception");
|
fail("didn't hit exception");
|
||||||
} catch (IllegalStateException ise) {
|
} catch (IllegalStateException ise) {
|
||||||
// expected
|
// expected
|
||||||
|
|
|
@ -323,12 +323,12 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
writer.forceMerge(1);
|
writer.forceMerge(1);
|
||||||
} else if (1 == method) {
|
} else if (1 == method) {
|
||||||
IndexReader readers[] = new IndexReader[dirs.length];
|
DirectoryReader readers[] = new DirectoryReader[dirs.length];
|
||||||
for(int i=0;i<dirs.length;i++) {
|
for(int i=0;i<dirs.length;i++) {
|
||||||
readers[i] = DirectoryReader.open(dirs[i]);
|
readers[i] = DirectoryReader.open(dirs[i]);
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
writer.addIndexes(readers);
|
TestUtil.addIndexesSlowly(writer, readers);
|
||||||
} finally {
|
} finally {
|
||||||
for(int i=0;i<dirs.length;i++) {
|
for(int i=0;i<dirs.length;i++) {
|
||||||
readers[i].close();
|
readers[i].close();
|
||||||
|
|
|
@ -39,7 +39,7 @@ public class TestIndexWriterOutOfFileDescriptors extends LuceneTestCase {
|
||||||
dir.setRandomIOExceptionRateOnOpen(rate);
|
dir.setRandomIOExceptionRateOnOpen(rate);
|
||||||
int iters = atLeast(20);
|
int iters = atLeast(20);
|
||||||
LineFileDocs docs = new LineFileDocs(random());
|
LineFileDocs docs = new LineFileDocs(random());
|
||||||
IndexReader r = null;
|
DirectoryReader r = null;
|
||||||
DirectoryReader r2 = null;
|
DirectoryReader r2 = null;
|
||||||
boolean any = false;
|
boolean any = false;
|
||||||
MockDirectoryWrapper dirCopy = null;
|
MockDirectoryWrapper dirCopy = null;
|
||||||
|
@ -68,9 +68,9 @@ public class TestIndexWriterOutOfFileDescriptors extends LuceneTestCase {
|
||||||
if (r != null && random().nextInt(5) == 3) {
|
if (r != null && random().nextInt(5) == 3) {
|
||||||
if (random().nextBoolean()) {
|
if (random().nextBoolean()) {
|
||||||
if (VERBOSE) {
|
if (VERBOSE) {
|
||||||
System.out.println("TEST: addIndexes IR[]");
|
System.out.println("TEST: addIndexes LR[]");
|
||||||
}
|
}
|
||||||
w.addIndexes(new IndexReader[] {r});
|
TestUtil.addIndexesSlowly(w, r);
|
||||||
} else {
|
} else {
|
||||||
if (VERBOSE) {
|
if (VERBOSE) {
|
||||||
System.out.println("TEST: addIndexes Directory[]");
|
System.out.println("TEST: addIndexes Directory[]");
|
||||||
|
|
|
@ -399,7 +399,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
||||||
final Thread[] threads = new Thread[numThreads];
|
final Thread[] threads = new Thread[numThreads];
|
||||||
IndexWriter mainWriter;
|
IndexWriter mainWriter;
|
||||||
final List<Throwable> failures = new ArrayList<>();
|
final List<Throwable> failures = new ArrayList<>();
|
||||||
IndexReader[] readers;
|
DirectoryReader[] readers;
|
||||||
boolean didClose = false;
|
boolean didClose = false;
|
||||||
AtomicInteger count = new AtomicInteger(0);
|
AtomicInteger count = new AtomicInteger(0);
|
||||||
AtomicInteger numaddIndexes = new AtomicInteger(0);
|
AtomicInteger numaddIndexes = new AtomicInteger(0);
|
||||||
|
@ -418,7 +418,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
||||||
|
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
readers = new IndexReader[numDirs];
|
readers = new DirectoryReader[numDirs];
|
||||||
for (int i = 0; i < numDirs; i++)
|
for (int i = 0; i < numDirs; i++)
|
||||||
readers[i] = DirectoryReader.open(addDir);
|
readers[i] = DirectoryReader.open(addDir);
|
||||||
}
|
}
|
||||||
|
@ -498,7 +498,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
||||||
numaddIndexes.incrementAndGet();
|
numaddIndexes.incrementAndGet();
|
||||||
break;
|
break;
|
||||||
case 2:
|
case 2:
|
||||||
mainWriter.addIndexes(readers);
|
TestUtil.addIndexesSlowly(mainWriter, readers);
|
||||||
break;
|
break;
|
||||||
case 3:
|
case 3:
|
||||||
mainWriter.commit();
|
mainWriter.commit();
|
||||||
|
|
|
@ -1196,7 +1196,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
|
||||||
writer.addIndexes(dir1);
|
writer.addIndexes(dir1);
|
||||||
} else {
|
} else {
|
||||||
DirectoryReader reader = DirectoryReader.open(dir1);
|
DirectoryReader reader = DirectoryReader.open(dir1);
|
||||||
writer.addIndexes(reader);
|
TestUtil.addIndexesSlowly(writer, reader);
|
||||||
reader.close();
|
reader.close();
|
||||||
}
|
}
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
|
@ -18,6 +18,8 @@ package org.apache.lucene.index;
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.MockAnalyzer;
|
import org.apache.lucene.analysis.MockAnalyzer;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
|
@ -65,11 +67,11 @@ public class TestParallelReaderEmptyIndex extends LuceneTestCase {
|
||||||
DirectoryReader.open(rd2));
|
DirectoryReader.open(rd2));
|
||||||
|
|
||||||
// When unpatched, Lucene crashes here with a NoSuchElementException (caused by ParallelTermEnum)
|
// When unpatched, Lucene crashes here with a NoSuchElementException (caused by ParallelTermEnum)
|
||||||
iwOut.addIndexes(cpr);
|
List<LeafReader> leaves = new ArrayList<>();
|
||||||
iwOut.forceMerge(1);
|
for (LeafReaderContext leaf : cpr.leaves()) {
|
||||||
|
leaves.add(leaf.reader());
|
||||||
// 2nd try with a readerless parallel reader
|
}
|
||||||
iwOut.addIndexes(new ParallelCompositeReader());
|
iwOut.addIndexes(leaves.toArray(new LeafReader[0]));
|
||||||
iwOut.forceMerge(1);
|
iwOut.forceMerge(1);
|
||||||
|
|
||||||
iwOut.close();
|
iwOut.close();
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
package org.apache.lucene.search;
|
package org.apache.lucene.index;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
@ -33,6 +33,7 @@ import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.util.English;
|
import org.apache.lucene.util.English;
|
||||||
import org.apache.lucene.util.IOUtils;
|
import org.apache.lucene.util.IOUtils;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
import org.apache.lucene.util.TestUtil;
|
||||||
import org.junit.AfterClass;
|
import org.junit.AfterClass;
|
||||||
import org.junit.BeforeClass;
|
import org.junit.BeforeClass;
|
||||||
|
|
||||||
|
@ -154,8 +155,8 @@ public class TestTermVectors extends LuceneTestCase {
|
||||||
|
|
||||||
IndexWriter writer = createWriter(target);
|
IndexWriter writer = createWriter(target);
|
||||||
for (Directory dir : input) {
|
for (Directory dir : input) {
|
||||||
IndexReader r = DirectoryReader.open(dir);
|
DirectoryReader r = DirectoryReader.open(dir);
|
||||||
writer.addIndexes(r);
|
TestUtil.addIndexesSlowly(writer, r);
|
||||||
r.close();
|
r.close();
|
||||||
}
|
}
|
||||||
writer.forceMerge(1);
|
writer.forceMerge(1);
|
|
@ -56,7 +56,7 @@ public abstract class TaxonomyMergeUtils {
|
||||||
for (int i = 0; i < numReaders; i++) {
|
for (int i = 0; i < numReaders; i++) {
|
||||||
wrappedLeaves[i] = new OrdinalMappingLeafReader(leaves.get(i).reader(), ordinalMap, srcConfig);
|
wrappedLeaves[i] = new OrdinalMappingLeafReader(leaves.get(i).reader(), ordinalMap, srcConfig);
|
||||||
}
|
}
|
||||||
destIndexWriter.addIndexes(new MultiReader(wrappedLeaves));
|
destIndexWriter.addIndexes(wrappedLeaves);
|
||||||
|
|
||||||
// commit changes to taxonomy and index respectively.
|
// commit changes to taxonomy and index respectively.
|
||||||
destTaxoWriter.commit();
|
destTaxoWriter.commit();
|
||||||
|
|
|
@ -33,7 +33,7 @@ import org.apache.lucene.util.Version;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This tool splits input index into multiple equal parts. The method employed
|
* This tool splits input index into multiple equal parts. The method employed
|
||||||
* here uses {@link IndexWriter#addIndexes(IndexReader[])} where the input data
|
* here uses {@link IndexWriter#addIndexes(LeafReader[])} where the input data
|
||||||
* comes from the input index with artificially applied deletes to the document
|
* comes from the input index with artificially applied deletes to the document
|
||||||
* id-s that fall outside the selected partition.
|
* id-s that fall outside the selected partition.
|
||||||
* <p>Note 1: Deletes are only applied to a buffered list of deleted docs and
|
* <p>Note 1: Deletes are only applied to a buffered list of deleted docs and
|
||||||
|
@ -102,7 +102,7 @@ public class MultiPassIndexSplitter {
|
||||||
System.err.println("Writing part " + (i + 1) + " ...");
|
System.err.println("Writing part " + (i + 1) + " ...");
|
||||||
// pass the subreaders directly, as our wrapper's numDocs/hasDeletetions are not up-to-date
|
// pass the subreaders directly, as our wrapper's numDocs/hasDeletetions are not up-to-date
|
||||||
final List<? extends FakeDeleteLeafIndexReader> sr = input.getSequentialSubReaders();
|
final List<? extends FakeDeleteLeafIndexReader> sr = input.getSequentialSubReaders();
|
||||||
w.addIndexes(sr.toArray(new IndexReader[sr.size()])); // TODO: maybe take List<IR> here?
|
w.addIndexes(sr.toArray(new LeafReader[sr.size()])); // TODO: maybe take List<IR> here?
|
||||||
w.close();
|
w.close();
|
||||||
}
|
}
|
||||||
System.err.println("Done.");
|
System.err.println("Done.");
|
||||||
|
|
|
@ -103,7 +103,7 @@ public class PKIndexSplitter {
|
||||||
final IndexWriter w = new IndexWriter(target, config);
|
final IndexWriter w = new IndexWriter(target, config);
|
||||||
try {
|
try {
|
||||||
final List<LeafReaderContext> leaves = reader.leaves();
|
final List<LeafReaderContext> leaves = reader.leaves();
|
||||||
final IndexReader[] subReaders = new IndexReader[leaves.size()];
|
final LeafReader[] subReaders = new LeafReader[leaves.size()];
|
||||||
int i = 0;
|
int i = 0;
|
||||||
for (final LeafReaderContext ctx : leaves) {
|
for (final LeafReaderContext ctx : leaves) {
|
||||||
subReaders[i++] = new DocumentFilteredLeafIndexReader(ctx, preserveFilter, negateFilter);
|
subReaders[i++] = new DocumentFilteredLeafIndexReader(ctx, preserveFilter, negateFilter);
|
||||||
|
|
|
@ -71,7 +71,7 @@ public class IndexSortingTest extends SorterTestBase {
|
||||||
|
|
||||||
Directory target = newDirectory();
|
Directory target = newDirectory();
|
||||||
IndexWriter writer = new IndexWriter(target, newIndexWriterConfig(null));
|
IndexWriter writer = new IndexWriter(target, newIndexWriterConfig(null));
|
||||||
IndexReader reader = SortingLeafReader.wrap(unsortedReader, sorter);
|
LeafReader reader = SortingLeafReader.wrap(unsortedReader, sorter);
|
||||||
writer.addIndexes(reader);
|
writer.addIndexes(reader);
|
||||||
writer.close();
|
writer.close();
|
||||||
// NOTE: also closes unsortedReader
|
// NOTE: also closes unsortedReader
|
||||||
|
|
|
@ -207,7 +207,7 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase {
|
||||||
w.forceMerge(1);
|
w.forceMerge(1);
|
||||||
w.commit();
|
w.commit();
|
||||||
w.close();
|
w.close();
|
||||||
IndexReader reader = DirectoryReader.open(dir);
|
DirectoryReader reader = DirectoryReader.open(dir);
|
||||||
|
|
||||||
Directory dir2 = newDirectory();
|
Directory dir2 = newDirectory();
|
||||||
if (dir2 instanceof MockDirectoryWrapper) {
|
if (dir2 instanceof MockDirectoryWrapper) {
|
||||||
|
@ -219,7 +219,8 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase {
|
||||||
mp.setNoCFSRatio(0);
|
mp.setNoCFSRatio(0);
|
||||||
cfg = new IndexWriterConfig(new MockAnalyzer(random())).setUseCompoundFile(false).setMergePolicy(mp);
|
cfg = new IndexWriterConfig(new MockAnalyzer(random())).setUseCompoundFile(false).setMergePolicy(mp);
|
||||||
w = new IndexWriter(dir2, cfg);
|
w = new IndexWriter(dir2, cfg);
|
||||||
w.addIndexes(reader);
|
TestUtil.addIndexesSlowly(w, reader);
|
||||||
|
|
||||||
w.commit();
|
w.commit();
|
||||||
w.close();
|
w.close();
|
||||||
|
|
||||||
|
|
|
@ -647,7 +647,7 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat
|
||||||
|
|
||||||
Directory dir2 = newDirectory();
|
Directory dir2 = newDirectory();
|
||||||
w = new RandomIndexWriter(random(), dir2);
|
w = new RandomIndexWriter(random(), dir2);
|
||||||
w.addIndexes(reader);
|
TestUtil.addIndexesSlowly(w.w, reader);
|
||||||
reader.close();
|
reader.close();
|
||||||
dir.close();
|
dir.close();
|
||||||
|
|
||||||
|
@ -787,7 +787,7 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat
|
||||||
}
|
}
|
||||||
dirs[i] = newDirectory();
|
dirs[i] = newDirectory();
|
||||||
IndexWriter adder = new IndexWriter(dirs[i], new IndexWriterConfig(null));
|
IndexWriter adder = new IndexWriter(dirs[i], new IndexWriterConfig(null));
|
||||||
adder.addIndexes(reader);
|
TestUtil.addIndexesSlowly(adder, reader);
|
||||||
adder.commit();
|
adder.commit();
|
||||||
adder.close();
|
adder.close();
|
||||||
|
|
||||||
|
|
|
@ -222,7 +222,7 @@ public class RandomIndexWriter implements Closeable {
|
||||||
w.addIndexes(dirs);
|
w.addIndexes(dirs);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void addIndexes(IndexReader... readers) throws IOException {
|
public void addIndexes(LeafReader... readers) throws IOException {
|
||||||
LuceneTestCase.maybeChangeLiveIndexWriterConfig(r, w.getConfig());
|
LuceneTestCase.maybeChangeLiveIndexWriterConfig(r, w.getConfig());
|
||||||
w.addIndexes(readers);
|
w.addIndexes(readers);
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,6 +29,7 @@ import java.nio.CharBuffer;
|
||||||
import java.nio.file.FileSystem;
|
import java.nio.file.FileSystem;
|
||||||
import java.nio.file.Files;
|
import java.nio.file.Files;
|
||||||
import java.nio.file.Path;
|
import java.nio.file.Path;
|
||||||
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
@ -68,6 +69,7 @@ import org.apache.lucene.document.NumericDocValuesField;
|
||||||
import org.apache.lucene.document.SortedDocValuesField;
|
import org.apache.lucene.document.SortedDocValuesField;
|
||||||
import org.apache.lucene.index.CheckIndex;
|
import org.apache.lucene.index.CheckIndex;
|
||||||
import org.apache.lucene.index.ConcurrentMergeScheduler;
|
import org.apache.lucene.index.ConcurrentMergeScheduler;
|
||||||
|
import org.apache.lucene.index.DirectoryReader;
|
||||||
import org.apache.lucene.index.DocValuesType;
|
import org.apache.lucene.index.DocValuesType;
|
||||||
import org.apache.lucene.index.DocsAndPositionsEnum;
|
import org.apache.lucene.index.DocsAndPositionsEnum;
|
||||||
import org.apache.lucene.index.DocsEnum;
|
import org.apache.lucene.index.DocsEnum;
|
||||||
|
@ -872,6 +874,16 @@ public final class TestUtil {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static void addIndexesSlowly(IndexWriter writer, DirectoryReader... readers) throws IOException {
|
||||||
|
List<LeafReader> leaves = new ArrayList<>();
|
||||||
|
for (DirectoryReader reader : readers) {
|
||||||
|
for (LeafReaderContext context : reader.leaves()) {
|
||||||
|
leaves.add(context.reader());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
writer.addIndexes(leaves.toArray(new LeafReader[leaves.size()]));
|
||||||
|
}
|
||||||
|
|
||||||
/** just tries to configure things to keep the open file
|
/** just tries to configure things to keep the open file
|
||||||
* count lowish */
|
* count lowish */
|
||||||
|
|
|
@ -24,6 +24,8 @@ import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.index.DirectoryReader;
|
import org.apache.lucene.index.DirectoryReader;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.IndexWriter;
|
import org.apache.lucene.index.IndexWriter;
|
||||||
|
import org.apache.lucene.index.LeafReader;
|
||||||
|
import org.apache.lucene.index.LeafReaderContext;
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
import org.apache.lucene.queries.function.ValueSource;
|
import org.apache.lucene.queries.function.ValueSource;
|
||||||
import org.apache.lucene.search.BooleanClause;
|
import org.apache.lucene.search.BooleanClause;
|
||||||
|
@ -467,9 +469,15 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
|
||||||
|
|
||||||
List<DirectoryReader> readers = cmd.readers;
|
List<DirectoryReader> readers = cmd.readers;
|
||||||
if (readers != null && readers.size() > 0) {
|
if (readers != null && readers.size() > 0) {
|
||||||
|
List<LeafReader> leaves = new ArrayList<>();
|
||||||
|
for (DirectoryReader reader : readers) {
|
||||||
|
for (LeafReaderContext leaf : reader.leaves()) {
|
||||||
|
leaves.add(leaf.reader());
|
||||||
|
}
|
||||||
|
}
|
||||||
RefCounted<IndexWriter> iw = solrCoreState.getIndexWriter(core);
|
RefCounted<IndexWriter> iw = solrCoreState.getIndexWriter(core);
|
||||||
try {
|
try {
|
||||||
iw.get().addIndexes(readers.toArray(new IndexReader[readers.size()]));
|
iw.get().addIndexes(leaves.toArray(new LeafReader[leaves.size()]));
|
||||||
} finally {
|
} finally {
|
||||||
iw.decref();
|
iw.decref();
|
||||||
}
|
}
|
||||||
|
|
|
@ -129,7 +129,7 @@ public class SolrIndexSplitter {
|
||||||
// This removes deletions but optimize might still be needed because sub-shards will have the same number of segments as the parent shard.
|
// This removes deletions but optimize might still be needed because sub-shards will have the same number of segments as the parent shard.
|
||||||
for (int segmentNumber = 0; segmentNumber<leaves.size(); segmentNumber++) {
|
for (int segmentNumber = 0; segmentNumber<leaves.size(); segmentNumber++) {
|
||||||
log.info("SolrIndexSplitter: partition #" + partitionNumber + " partitionCount=" + numPieces + (ranges != null ? " range=" + ranges.get(partitionNumber) : "") + " segment #"+segmentNumber + " segmentCount=" + leaves.size());
|
log.info("SolrIndexSplitter: partition #" + partitionNumber + " partitionCount=" + numPieces + (ranges != null ? " range=" + ranges.get(partitionNumber) : "") + " segment #"+segmentNumber + " segmentCount=" + leaves.size());
|
||||||
IndexReader subReader = new LiveDocsReader( leaves.get(segmentNumber), segmentDocSets.get(segmentNumber)[partitionNumber] );
|
LeafReader subReader = new LiveDocsReader( leaves.get(segmentNumber), segmentDocSets.get(segmentNumber)[partitionNumber] );
|
||||||
iw.addIndexes(subReader);
|
iw.addIndexes(subReader);
|
||||||
}
|
}
|
||||||
success = true;
|
success = true;
|
||||||
|
|
Loading…
Reference in New Issue