mirror of https://github.com/apache/lucene.git
SOLR-2452: merged with trunk up to r1132517
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/solr2452@1132518 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
commit
39063eee73
|
@ -575,6 +575,11 @@
|
||||||
<groupId>org.apache.felix</groupId>
|
<groupId>org.apache.felix</groupId>
|
||||||
<artifactId>maven-bundle-plugin</artifactId>
|
<artifactId>maven-bundle-plugin</artifactId>
|
||||||
<version>2.3.4</version>
|
<version>2.3.4</version>
|
||||||
|
<configuration>
|
||||||
|
<instructions>
|
||||||
|
<Export-Package>*;-split-package:=merge-first</Export-Package>
|
||||||
|
</instructions>
|
||||||
|
</configuration>
|
||||||
<executions>
|
<executions>
|
||||||
<execution>
|
<execution>
|
||||||
<id>bundle-manifest</id>
|
<id>bundle-manifest</id>
|
||||||
|
|
|
@ -23,6 +23,7 @@ import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
|
|
||||||
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.automaton.CharacterRunAutomaton;
|
import org.apache.lucene.util.automaton.CharacterRunAutomaton;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -127,13 +128,16 @@ public final class MockAnalyzer extends Analyzer {
|
||||||
private synchronized TokenFilter maybePayload(TokenFilter stream, String fieldName) {
|
private synchronized TokenFilter maybePayload(TokenFilter stream, String fieldName) {
|
||||||
Integer val = previousMappings.get(fieldName);
|
Integer val = previousMappings.get(fieldName);
|
||||||
if (val == null) {
|
if (val == null) {
|
||||||
switch(random.nextInt(3)) {
|
val = -1; // no payloads
|
||||||
case 0: val = -1; // no payloads
|
if (LuceneTestCase.TEST_NIGHTLY || random.nextInt(20) == 0) {
|
||||||
break;
|
switch(random.nextInt(3)) {
|
||||||
case 1: val = Integer.MAX_VALUE; // variable length payload
|
case 0: val = -1; // no payloads
|
||||||
break;
|
break;
|
||||||
case 2: val = random.nextInt(12); // fixed length payload
|
case 1: val = Integer.MAX_VALUE; // variable length payload
|
||||||
break;
|
break;
|
||||||
|
case 2: val = random.nextInt(12); // fixed length payload
|
||||||
|
break;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
previousMappings.put(fieldName, val); // save it so we are consistent for this field
|
previousMappings.put(fieldName, val); // save it so we are consistent for this field
|
||||||
}
|
}
|
||||||
|
|
|
@ -272,7 +272,7 @@ public class RandomIndexWriter implements Closeable {
|
||||||
public void close() throws IOException {
|
public void close() throws IOException {
|
||||||
// if someone isn't using getReader() API, we want to be sure to
|
// if someone isn't using getReader() API, we want to be sure to
|
||||||
// maybeOptimize since presumably they might open a reader on the dir.
|
// maybeOptimize since presumably they might open a reader on the dir.
|
||||||
if (getReaderCalled == false && r.nextInt(4) == 2) {
|
if (getReaderCalled == false && r.nextInt(8) == 2) {
|
||||||
doRandomOptimize();
|
doRandomOptimize();
|
||||||
}
|
}
|
||||||
w.close();
|
w.close();
|
||||||
|
|
|
@ -836,14 +836,22 @@ public abstract class LuceneTestCase extends Assert {
|
||||||
c.setMergeScheduler(new SerialMergeScheduler());
|
c.setMergeScheduler(new SerialMergeScheduler());
|
||||||
}
|
}
|
||||||
if (r.nextBoolean()) {
|
if (r.nextBoolean()) {
|
||||||
if (r.nextInt(20) == 17) {
|
if ((TEST_NIGHTLY && random.nextBoolean()) || r.nextInt(20) == 17) {
|
||||||
c.setMaxBufferedDocs(2);
|
// crazy value
|
||||||
|
c.setMaxBufferedDocs(_TestUtil.nextInt(r, 2, 7));
|
||||||
} else {
|
} else {
|
||||||
c.setMaxBufferedDocs(_TestUtil.nextInt(r, 2, 1000));
|
// reasonable value
|
||||||
|
c.setMaxBufferedDocs(_TestUtil.nextInt(r, 8, 1000));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (r.nextBoolean()) {
|
if (r.nextBoolean()) {
|
||||||
c.setTermIndexInterval(_TestUtil.nextInt(r, 1, 1000));
|
if ((TEST_NIGHTLY && random.nextBoolean()) || r.nextInt(20) == 17) {
|
||||||
|
// crazy value
|
||||||
|
c.setTermIndexInterval(random.nextBoolean() ? _TestUtil.nextInt(r, 1, 31) : _TestUtil.nextInt(r, 129, 1000));
|
||||||
|
} else {
|
||||||
|
// reasonable value
|
||||||
|
c.setTermIndexInterval(_TestUtil.nextInt(r, 32, 128));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if (r.nextBoolean()) {
|
if (r.nextBoolean()) {
|
||||||
c.setIndexerThreadPool(new ThreadAffinityDocumentsWriterThreadPool(_TestUtil.nextInt(r, 1, 20)));
|
c.setIndexerThreadPool(new ThreadAffinityDocumentsWriterThreadPool(_TestUtil.nextInt(r, 1, 20)));
|
||||||
|
@ -874,22 +882,22 @@ public abstract class LuceneTestCase extends Assert {
|
||||||
LogMergePolicy logmp = r.nextBoolean() ? new LogDocMergePolicy() : new LogByteSizeMergePolicy();
|
LogMergePolicy logmp = r.nextBoolean() ? new LogDocMergePolicy() : new LogByteSizeMergePolicy();
|
||||||
logmp.setUseCompoundFile(r.nextBoolean());
|
logmp.setUseCompoundFile(r.nextBoolean());
|
||||||
logmp.setCalibrateSizeByDeletes(r.nextBoolean());
|
logmp.setCalibrateSizeByDeletes(r.nextBoolean());
|
||||||
if (r.nextInt(3) == 2) {
|
if ((TEST_NIGHTLY && random.nextBoolean()) || r.nextInt(20) == 17) {
|
||||||
logmp.setMergeFactor(2);
|
logmp.setMergeFactor(_TestUtil.nextInt(r, 2, 4));
|
||||||
} else {
|
} else {
|
||||||
logmp.setMergeFactor(_TestUtil.nextInt(r, 2, 20));
|
logmp.setMergeFactor(_TestUtil.nextInt(r, 5, 50));
|
||||||
}
|
}
|
||||||
return logmp;
|
return logmp;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static TieredMergePolicy newTieredMergePolicy(Random r) {
|
public static TieredMergePolicy newTieredMergePolicy(Random r) {
|
||||||
TieredMergePolicy tmp = new TieredMergePolicy();
|
TieredMergePolicy tmp = new TieredMergePolicy();
|
||||||
if (r.nextInt(3) == 2) {
|
if ((TEST_NIGHTLY && random.nextBoolean()) || r.nextInt(20) == 17) {
|
||||||
tmp.setMaxMergeAtOnce(2);
|
tmp.setMaxMergeAtOnce(_TestUtil.nextInt(r, 2, 4));
|
||||||
tmp.setMaxMergeAtOnceExplicit(2);
|
tmp.setMaxMergeAtOnceExplicit(_TestUtil.nextInt(r, 2, 4));
|
||||||
} else {
|
} else {
|
||||||
tmp.setMaxMergeAtOnce(_TestUtil.nextInt(r, 2, 20));
|
tmp.setMaxMergeAtOnce(_TestUtil.nextInt(r, 5, 50));
|
||||||
tmp.setMaxMergeAtOnceExplicit(_TestUtil.nextInt(r, 2, 30));
|
tmp.setMaxMergeAtOnceExplicit(_TestUtil.nextInt(r, 5, 50));
|
||||||
}
|
}
|
||||||
tmp.setMaxMergedSegmentMB(0.2 + r.nextDouble() * 2.0);
|
tmp.setMaxMergedSegmentMB(0.2 + r.nextDouble() * 2.0);
|
||||||
tmp.setFloorSegmentMB(0.2 + r.nextDouble() * 2.0);
|
tmp.setFloorSegmentMB(0.2 + r.nextDouble() * 2.0);
|
||||||
|
@ -1052,8 +1060,13 @@ public abstract class LuceneTestCase extends Assert {
|
||||||
/** Returns a new field instance, using the specified random.
|
/** Returns a new field instance, using the specified random.
|
||||||
* See {@link #newField(String, String, Field.Store, Field.Index, Field.TermVector)} for more information */
|
* See {@link #newField(String, String, Field.Store, Field.Index, Field.TermVector)} for more information */
|
||||||
public static Field newField(Random random, String name, String value, Store store, Index index, TermVector tv) {
|
public static Field newField(Random random, String name, String value, Store store, Index index, TermVector tv) {
|
||||||
|
if (!TEST_NIGHTLY && random.nextInt(20) > 0) {
|
||||||
|
// most of the time, don't modify the params
|
||||||
|
return new Field(name, value, store, index, tv);
|
||||||
|
}
|
||||||
|
|
||||||
if (!index.isIndexed())
|
if (!index.isIndexed())
|
||||||
return new Field(name, value, store, index);
|
return new Field(name, value, store, index, tv);
|
||||||
|
|
||||||
if (!store.isStored() && random.nextBoolean())
|
if (!store.isStored() && random.nextBoolean())
|
||||||
store = Store.YES; // randomly store it
|
store = Store.YES; // randomly store it
|
||||||
|
@ -1115,7 +1128,7 @@ public abstract class LuceneTestCase extends Assert {
|
||||||
};
|
};
|
||||||
|
|
||||||
public static String randomDirectory(Random random) {
|
public static String randomDirectory(Random random) {
|
||||||
if (random.nextInt(10) == 0) {
|
if (random.nextInt(20) == 0) {
|
||||||
return CORE_DIRECTORIES[random.nextInt(CORE_DIRECTORIES.length)];
|
return CORE_DIRECTORIES[random.nextInt(CORE_DIRECTORIES.length)];
|
||||||
} else {
|
} else {
|
||||||
return "RAMDirectory";
|
return "RAMDirectory";
|
||||||
|
@ -1179,7 +1192,7 @@ public abstract class LuceneTestCase extends Assert {
|
||||||
public static IndexSearcher newSearcher(IndexReader r, boolean maybeWrap) throws IOException {
|
public static IndexSearcher newSearcher(IndexReader r, boolean maybeWrap) throws IOException {
|
||||||
|
|
||||||
if (random.nextBoolean()) {
|
if (random.nextBoolean()) {
|
||||||
if (maybeWrap && random.nextBoolean()) {
|
if (maybeWrap && random.nextInt(20) == 0) {
|
||||||
return new IndexSearcher(new SlowMultiReaderWrapper(r));
|
return new IndexSearcher(new SlowMultiReaderWrapper(r));
|
||||||
} else {
|
} else {
|
||||||
return new IndexSearcher(r);
|
return new IndexSearcher(r);
|
||||||
|
@ -1241,6 +1254,7 @@ public abstract class LuceneTestCase extends Assert {
|
||||||
if (!TEST_TIMEZONE.equals("random")) sb.append(" -Dtests.timezone=").append(TEST_TIMEZONE);
|
if (!TEST_TIMEZONE.equals("random")) sb.append(" -Dtests.timezone=").append(TEST_TIMEZONE);
|
||||||
if (!TEST_DIRECTORY.equals("random")) sb.append(" -Dtests.directory=").append(TEST_DIRECTORY);
|
if (!TEST_DIRECTORY.equals("random")) sb.append(" -Dtests.directory=").append(TEST_DIRECTORY);
|
||||||
if (RANDOM_MULTIPLIER > 1) sb.append(" -Dtests.multiplier=").append(RANDOM_MULTIPLIER);
|
if (RANDOM_MULTIPLIER > 1) sb.append(" -Dtests.multiplier=").append(RANDOM_MULTIPLIER);
|
||||||
|
if (TEST_NIGHTLY) sb.append(" -Dtests.nightly=true");
|
||||||
return sb.toString();
|
return sb.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1407,6 +1421,10 @@ public abstract class LuceneTestCase extends Assert {
|
||||||
Codec codec = previousMappings.get(name);
|
Codec codec = previousMappings.get(name);
|
||||||
if (codec == null) {
|
if (codec == null) {
|
||||||
codec = knownCodecs.get(Math.abs(perFieldSeed ^ name.hashCode()) % knownCodecs.size());
|
codec = knownCodecs.get(Math.abs(perFieldSeed ^ name.hashCode()) % knownCodecs.size());
|
||||||
|
if (codec instanceof SimpleTextCodec && perFieldSeed % 5 != 0) {
|
||||||
|
// make simpletext rarer, choose again
|
||||||
|
codec = knownCodecs.get(Math.abs(perFieldSeed ^ name.toUpperCase(Locale.ENGLISH).hashCode()) % knownCodecs.size());
|
||||||
|
}
|
||||||
previousMappings.put(name, codec);
|
previousMappings.put(name, codec);
|
||||||
}
|
}
|
||||||
return codec.name;
|
return codec.name;
|
||||||
|
|
|
@ -155,6 +155,7 @@ public class Test2BTerms extends LuceneTestCase {
|
||||||
|
|
||||||
MockDirectoryWrapper dir = newFSDirectory(_TestUtil.getTempDir("2BTerms"));
|
MockDirectoryWrapper dir = newFSDirectory(_TestUtil.getTempDir("2BTerms"));
|
||||||
dir.setThrottling(MockDirectoryWrapper.Throttling.NEVER);
|
dir.setThrottling(MockDirectoryWrapper.Throttling.NEVER);
|
||||||
|
dir.setCheckIndexOnClose(false); // don't double-checkindex
|
||||||
//Directory dir = newFSDirectory(new File("/p/lucene/indices/2bindex"));
|
//Directory dir = newFSDirectory(new File("/p/lucene/indices/2bindex"));
|
||||||
|
|
||||||
if (true) {
|
if (true) {
|
||||||
|
|
|
@ -816,8 +816,6 @@ public class TestAddIndexes extends LuceneTestCase {
|
||||||
|
|
||||||
c.joinThreads();
|
c.joinThreads();
|
||||||
|
|
||||||
_TestUtil.checkIndex(c.dir2);
|
|
||||||
|
|
||||||
c.closeDir();
|
c.closeDir();
|
||||||
|
|
||||||
assertTrue(c.failures.size() == 0);
|
assertTrue(c.failures.size() == 0);
|
||||||
|
@ -908,8 +906,6 @@ public class TestAddIndexes extends LuceneTestCase {
|
||||||
if (VERBOSE) {
|
if (VERBOSE) {
|
||||||
System.out.println("TEST: done join threads");
|
System.out.println("TEST: done join threads");
|
||||||
}
|
}
|
||||||
_TestUtil.checkIndex(c.dir2);
|
|
||||||
|
|
||||||
c.closeDir();
|
c.closeDir();
|
||||||
|
|
||||||
assertTrue(c.failures.size() == 0);
|
assertTrue(c.failures.size() == 0);
|
||||||
|
@ -933,8 +929,6 @@ public class TestAddIndexes extends LuceneTestCase {
|
||||||
|
|
||||||
c.joinThreads();
|
c.joinThreads();
|
||||||
|
|
||||||
_TestUtil.checkIndex(c.dir2);
|
|
||||||
|
|
||||||
c.closeDir();
|
c.closeDir();
|
||||||
|
|
||||||
assertTrue(c.failures.size() == 0);
|
assertTrue(c.failures.size() == 0);
|
||||||
|
@ -1039,7 +1033,6 @@ public class TestAddIndexes extends LuceneTestCase {
|
||||||
writer.addIndexes(aux, aux2);
|
writer.addIndexes(aux, aux2);
|
||||||
assertEquals(190, writer.maxDoc());
|
assertEquals(190, writer.maxDoc());
|
||||||
writer.close();
|
writer.close();
|
||||||
_TestUtil.checkIndex(dir, provider);
|
|
||||||
|
|
||||||
dir.close();
|
dir.close();
|
||||||
aux.close();
|
aux.close();
|
||||||
|
|
|
@ -188,8 +188,6 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
||||||
w.setInfoStream(VERBOSE ? System.out : null);
|
w.setInfoStream(VERBOSE ? System.out : null);
|
||||||
w.optimize();
|
w.optimize();
|
||||||
w.close();
|
w.close();
|
||||||
|
|
||||||
_TestUtil.checkIndex(dir);
|
|
||||||
|
|
||||||
dir.close();
|
dir.close();
|
||||||
_TestUtil.rmDir(oldIndxeDir);
|
_TestUtil.rmDir(oldIndxeDir);
|
||||||
|
@ -207,8 +205,6 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
||||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||||
w.addIndexes(dir);
|
w.addIndexes(dir);
|
||||||
w.close();
|
w.close();
|
||||||
|
|
||||||
_TestUtil.checkIndex(targetDir);
|
|
||||||
|
|
||||||
dir.close();
|
dir.close();
|
||||||
targetDir.close();
|
targetDir.close();
|
||||||
|
@ -229,9 +225,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
||||||
w.addIndexes(reader);
|
w.addIndexes(reader);
|
||||||
w.close();
|
w.close();
|
||||||
reader.close();
|
reader.close();
|
||||||
|
|
||||||
_TestUtil.checkIndex(targetDir);
|
|
||||||
|
|
||||||
dir.close();
|
dir.close();
|
||||||
targetDir.close();
|
targetDir.close();
|
||||||
_TestUtil.rmDir(oldIndxeDir);
|
_TestUtil.rmDir(oldIndxeDir);
|
||||||
|
@ -743,8 +737,6 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
||||||
.upgrade();
|
.upgrade();
|
||||||
|
|
||||||
checkAllSegmentsUpgraded(dir);
|
checkAllSegmentsUpgraded(dir);
|
||||||
|
|
||||||
_TestUtil.checkIndex(dir);
|
|
||||||
|
|
||||||
dir.close();
|
dir.close();
|
||||||
_TestUtil.rmDir(oldIndxeDir);
|
_TestUtil.rmDir(oldIndxeDir);
|
||||||
|
|
|
@ -214,7 +214,7 @@ public class TestDocTermOrds extends LuceneTestCase {
|
||||||
public void testRandom() throws Exception {
|
public void testRandom() throws Exception {
|
||||||
MockDirectoryWrapper dir = newDirectory();
|
MockDirectoryWrapper dir = newDirectory();
|
||||||
|
|
||||||
final int NUM_TERMS = 100 * RANDOM_MULTIPLIER;
|
final int NUM_TERMS = (TEST_NIGHTLY ? 100 : 20) * RANDOM_MULTIPLIER;
|
||||||
final Set<BytesRef> terms = new HashSet<BytesRef>();
|
final Set<BytesRef> terms = new HashSet<BytesRef>();
|
||||||
while(terms.size() < NUM_TERMS) {
|
while(terms.size() < NUM_TERMS) {
|
||||||
final String s = _TestUtil.randomRealisticUnicodeString(random);
|
final String s = _TestUtil.randomRealisticUnicodeString(random);
|
||||||
|
@ -226,7 +226,7 @@ public class TestDocTermOrds extends LuceneTestCase {
|
||||||
final BytesRef[] termsArray = terms.toArray(new BytesRef[terms.size()]);
|
final BytesRef[] termsArray = terms.toArray(new BytesRef[terms.size()]);
|
||||||
Arrays.sort(termsArray);
|
Arrays.sort(termsArray);
|
||||||
|
|
||||||
final int NUM_DOCS = 1000 * RANDOM_MULTIPLIER;
|
final int NUM_DOCS = (TEST_NIGHTLY ? 1000 : 100) * RANDOM_MULTIPLIER;
|
||||||
|
|
||||||
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
|
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
|
||||||
|
|
||||||
|
@ -264,7 +264,7 @@ public class TestDocTermOrds extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
for(int ord : ordsForDocSet) {
|
for(int ord : ordsForDocSet) {
|
||||||
ordsForDoc[upto++] = ord;
|
ordsForDoc[upto++] = ord;
|
||||||
Field field = newField("field", termsArray[ord].utf8ToString(), Field.Index.NOT_ANALYZED);
|
Field field = newField("field", termsArray[ord].utf8ToString(), Field.Index.NOT_ANALYZED_NO_NORMS);
|
||||||
if (VERBOSE) {
|
if (VERBOSE) {
|
||||||
System.out.println(" f=" + termsArray[ord].utf8ToString());
|
System.out.println(" f=" + termsArray[ord].utf8ToString());
|
||||||
}
|
}
|
||||||
|
@ -317,7 +317,7 @@ public class TestDocTermOrds extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
final String[] prefixesArray = prefixes.toArray(new String[prefixes.size()]);
|
final String[] prefixesArray = prefixes.toArray(new String[prefixes.size()]);
|
||||||
|
|
||||||
final int NUM_TERMS = 100 * RANDOM_MULTIPLIER;
|
final int NUM_TERMS = (TEST_NIGHTLY ? 100 : 20) * RANDOM_MULTIPLIER;
|
||||||
final Set<BytesRef> terms = new HashSet<BytesRef>();
|
final Set<BytesRef> terms = new HashSet<BytesRef>();
|
||||||
while(terms.size() < NUM_TERMS) {
|
while(terms.size() < NUM_TERMS) {
|
||||||
final String s = prefixesArray[random.nextInt(prefixesArray.length)] + _TestUtil.randomRealisticUnicodeString(random);
|
final String s = prefixesArray[random.nextInt(prefixesArray.length)] + _TestUtil.randomRealisticUnicodeString(random);
|
||||||
|
@ -329,7 +329,7 @@ public class TestDocTermOrds extends LuceneTestCase {
|
||||||
final BytesRef[] termsArray = terms.toArray(new BytesRef[terms.size()]);
|
final BytesRef[] termsArray = terms.toArray(new BytesRef[terms.size()]);
|
||||||
Arrays.sort(termsArray);
|
Arrays.sort(termsArray);
|
||||||
|
|
||||||
final int NUM_DOCS = 1000 * RANDOM_MULTIPLIER;
|
final int NUM_DOCS = (TEST_NIGHTLY ? 1000 : 100) * RANDOM_MULTIPLIER;
|
||||||
|
|
||||||
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
|
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
|
||||||
|
|
||||||
|
@ -367,7 +367,7 @@ public class TestDocTermOrds extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
for(int ord : ordsForDocSet) {
|
for(int ord : ordsForDocSet) {
|
||||||
ordsForDoc[upto++] = ord;
|
ordsForDoc[upto++] = ord;
|
||||||
Field field = newField("field", termsArray[ord].utf8ToString(), Field.Index.NOT_ANALYZED);
|
Field field = newField("field", termsArray[ord].utf8ToString(), Field.Index.NOT_ANALYZED_NO_NORMS);
|
||||||
if (VERBOSE) {
|
if (VERBOSE) {
|
||||||
System.out.println(" f=" + termsArray[ord].utf8ToString());
|
System.out.println(" f=" + termsArray[ord].utf8ToString());
|
||||||
}
|
}
|
||||||
|
@ -458,9 +458,9 @@ public class TestDocTermOrds extends LuceneTestCase {
|
||||||
final TermsEnum te = dto.getOrdTermsEnum(r);
|
final TermsEnum te = dto.getOrdTermsEnum(r);
|
||||||
if (te == null) {
|
if (te == null) {
|
||||||
if (prefixRef == null) {
|
if (prefixRef == null) {
|
||||||
assertNull(r.fields().terms("field"));
|
assertNull(MultiFields.getTerms(r, "field"));
|
||||||
} else {
|
} else {
|
||||||
Terms terms = r.fields().terms("field");
|
Terms terms = MultiFields.getTerms(r, "field");
|
||||||
if (terms != null) {
|
if (terms != null) {
|
||||||
TermsEnum termsEnum = terms.iterator();
|
TermsEnum termsEnum = terms.iterator();
|
||||||
TermsEnum.SeekStatus result = termsEnum.seek(prefixRef, false);
|
TermsEnum.SeekStatus result = termsEnum.seek(prefixRef, false);
|
||||||
|
|
|
@ -51,13 +51,13 @@ public class TestDocsAndPositions extends LuceneTestCase {
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
doc.add(newField(fieldName, "1 2 3 4 5 6 7 8 9 10 "
|
doc.add(newField(fieldName, "1 2 3 4 5 6 7 8 9 10 "
|
||||||
+ "1 2 3 4 5 6 7 8 9 10 " + "1 2 3 4 5 6 7 8 9 10 "
|
+ "1 2 3 4 5 6 7 8 9 10 " + "1 2 3 4 5 6 7 8 9 10 "
|
||||||
+ "1 2 3 4 5 6 7 8 9 10", Field.Store.YES, Field.Index.ANALYZED));
|
+ "1 2 3 4 5 6 7 8 9 10", Field.Store.NO, Field.Index.ANALYZED_NO_NORMS));
|
||||||
writer.addDocument(doc);
|
writer.addDocument(doc);
|
||||||
}
|
}
|
||||||
IndexReader reader = writer.getReader();
|
IndexReader reader = writer.getReader();
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
for (int i = 0; i < 39 * RANDOM_MULTIPLIER; i++) {
|
for (int i = 0; i < (TEST_NIGHTLY ? 39 : 13) * RANDOM_MULTIPLIER; i++) {
|
||||||
BytesRef bytes = new BytesRef("1");
|
BytesRef bytes = new BytesRef("1");
|
||||||
ReaderContext topReaderContext = reader.getTopReaderContext();
|
ReaderContext topReaderContext = reader.getTopReaderContext();
|
||||||
AtomicReaderContext[] leaves = ReaderUtil.leaves(topReaderContext);
|
AtomicReaderContext[] leaves = ReaderUtil.leaves(topReaderContext);
|
||||||
|
@ -112,7 +112,7 @@ public class TestDocsAndPositions extends LuceneTestCase {
|
||||||
Directory dir = newDirectory();
|
Directory dir = newDirectory();
|
||||||
RandomIndexWriter writer = new RandomIndexWriter(random, dir,
|
RandomIndexWriter writer = new RandomIndexWriter(random, dir,
|
||||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
|
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
|
||||||
int numDocs = 131;
|
int numDocs = TEST_NIGHTLY ? 131 : 47;
|
||||||
int max = 1051;
|
int max = 1051;
|
||||||
int term = random.nextInt(max);
|
int term = random.nextInt(max);
|
||||||
Integer[][] positionsInDoc = new Integer[numDocs][];
|
Integer[][] positionsInDoc = new Integer[numDocs][];
|
||||||
|
@ -120,7 +120,8 @@ public class TestDocsAndPositions extends LuceneTestCase {
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
ArrayList<Integer> positions = new ArrayList<Integer>();
|
ArrayList<Integer> positions = new ArrayList<Integer>();
|
||||||
StringBuilder builder = new StringBuilder();
|
StringBuilder builder = new StringBuilder();
|
||||||
for (int j = 0; j < 3049; j++) {
|
int num = TEST_NIGHTLY ? 3049 : 499;
|
||||||
|
for (int j = 0; j < num; j++) {
|
||||||
int nextInt = random.nextInt(max);
|
int nextInt = random.nextInt(max);
|
||||||
builder.append(nextInt).append(" ");
|
builder.append(nextInt).append(" ");
|
||||||
if (nextInt == term) {
|
if (nextInt == term) {
|
||||||
|
@ -129,10 +130,10 @@ public class TestDocsAndPositions extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
if (positions.size() == 0) {
|
if (positions.size() == 0) {
|
||||||
builder.append(term);
|
builder.append(term);
|
||||||
positions.add(3049);
|
positions.add(num);
|
||||||
}
|
}
|
||||||
doc.add(newField(fieldName, builder.toString(), Field.Store.YES,
|
doc.add(newField(fieldName, builder.toString(), Field.Store.NO,
|
||||||
Field.Index.ANALYZED));
|
Field.Index.ANALYZED_NO_NORMS));
|
||||||
positionsInDoc[i] = positions.toArray(new Integer[0]);
|
positionsInDoc[i] = positions.toArray(new Integer[0]);
|
||||||
writer.addDocument(doc);
|
writer.addDocument(doc);
|
||||||
}
|
}
|
||||||
|
@ -140,7 +141,7 @@ public class TestDocsAndPositions extends LuceneTestCase {
|
||||||
IndexReader reader = writer.getReader();
|
IndexReader reader = writer.getReader();
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
for (int i = 0; i < 39 * RANDOM_MULTIPLIER; i++) {
|
for (int i = 0; i < (TEST_NIGHTLY ? 39 : 13) * RANDOM_MULTIPLIER; i++) {
|
||||||
BytesRef bytes = new BytesRef("" + term);
|
BytesRef bytes = new BytesRef("" + term);
|
||||||
ReaderContext topReaderContext = reader.getTopReaderContext();
|
ReaderContext topReaderContext = reader.getTopReaderContext();
|
||||||
AtomicReaderContext[] leaves = ReaderUtil.leaves(topReaderContext);
|
AtomicReaderContext[] leaves = ReaderUtil.leaves(topReaderContext);
|
||||||
|
@ -192,7 +193,7 @@ public class TestDocsAndPositions extends LuceneTestCase {
|
||||||
Directory dir = newDirectory();
|
Directory dir = newDirectory();
|
||||||
RandomIndexWriter writer = new RandomIndexWriter(random, dir,
|
RandomIndexWriter writer = new RandomIndexWriter(random, dir,
|
||||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
|
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
|
||||||
int numDocs = 499;
|
int numDocs = TEST_NIGHTLY ? 499 : 131;
|
||||||
int max = 15678;
|
int max = 15678;
|
||||||
int term = random.nextInt(max);
|
int term = random.nextInt(max);
|
||||||
int[] freqInDoc = new int[numDocs];
|
int[] freqInDoc = new int[numDocs];
|
||||||
|
@ -206,15 +207,15 @@ public class TestDocsAndPositions extends LuceneTestCase {
|
||||||
freqInDoc[i]++;
|
freqInDoc[i]++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
doc.add(newField(fieldName, builder.toString(), Field.Store.YES,
|
doc.add(newField(fieldName, builder.toString(), Field.Store.NO,
|
||||||
Field.Index.ANALYZED));
|
Field.Index.ANALYZED_NO_NORMS));
|
||||||
writer.addDocument(doc);
|
writer.addDocument(doc);
|
||||||
}
|
}
|
||||||
|
|
||||||
IndexReader reader = writer.getReader();
|
IndexReader reader = writer.getReader();
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
for (int i = 0; i < 39 * RANDOM_MULTIPLIER; i++) {
|
for (int i = 0; i < (TEST_NIGHTLY ? 39 : 13) * RANDOM_MULTIPLIER; i++) {
|
||||||
BytesRef bytes = new BytesRef("" + term);
|
BytesRef bytes = new BytesRef("" + term);
|
||||||
ReaderContext topReaderContext = reader.getTopReaderContext();
|
ReaderContext topReaderContext = reader.getTopReaderContext();
|
||||||
AtomicReaderContext[] leaves = ReaderUtil.leaves(topReaderContext);
|
AtomicReaderContext[] leaves = ReaderUtil.leaves(topReaderContext);
|
||||||
|
@ -281,8 +282,8 @@ public class TestDocsAndPositions extends LuceneTestCase {
|
||||||
builder.append("odd ");
|
builder.append("odd ");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
doc.add(newField(fieldName, builder.toString(), Field.Store.YES,
|
doc.add(newField(fieldName, builder.toString(), Field.Store.NO,
|
||||||
Field.Index.ANALYZED));
|
Field.Index.ANALYZED_NO_NORMS));
|
||||||
writer.addDocument(doc);
|
writer.addDocument(doc);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -290,7 +291,7 @@ public class TestDocsAndPositions extends LuceneTestCase {
|
||||||
IndexReader reader = writer.getReader();
|
IndexReader reader = writer.getReader();
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
for (int i = 0; i < 39 * RANDOM_MULTIPLIER; i++) {
|
for (int i = 0; i < (TEST_NIGHTLY ? 39 : 13) * RANDOM_MULTIPLIER; i++) {
|
||||||
BytesRef bytes = new BytesRef("even");
|
BytesRef bytes = new BytesRef("even");
|
||||||
|
|
||||||
ReaderContext topReaderContext = reader.getTopReaderContext();
|
ReaderContext topReaderContext = reader.getTopReaderContext();
|
||||||
|
|
|
@ -39,16 +39,17 @@ import org.apache.lucene.store.IndexInput;
|
||||||
import org.apache.lucene.store.IndexOutput;
|
import org.apache.lucene.store.IndexOutput;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util._TestUtil;
|
import org.apache.lucene.util._TestUtil;
|
||||||
|
import org.junit.AfterClass;
|
||||||
|
import org.junit.BeforeClass;
|
||||||
|
|
||||||
public class TestFieldsReader extends LuceneTestCase {
|
public class TestFieldsReader extends LuceneTestCase {
|
||||||
private Directory dir;
|
private static Directory dir;
|
||||||
private Document testDoc = new Document();
|
private static Document testDoc = new Document();
|
||||||
private FieldInfos fieldInfos = null;
|
private static FieldInfos fieldInfos = null;
|
||||||
private final static String TEST_SEGMENT_NAME = "_0";
|
private final static String TEST_SEGMENT_NAME = "_0";
|
||||||
|
|
||||||
@Override
|
@BeforeClass
|
||||||
public void setUp() throws Exception {
|
public static void beforeClass() throws Exception {
|
||||||
super.setUp();
|
|
||||||
fieldInfos = new FieldInfos();
|
fieldInfos = new FieldInfos();
|
||||||
DocHelper.setupDoc(testDoc);
|
DocHelper.setupDoc(testDoc);
|
||||||
_TestUtil.add(testDoc, fieldInfos);
|
_TestUtil.add(testDoc, fieldInfos);
|
||||||
|
@ -61,10 +62,12 @@ public class TestFieldsReader extends LuceneTestCase {
|
||||||
FaultyIndexInput.doFail = false;
|
FaultyIndexInput.doFail = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@AfterClass
|
||||||
public void tearDown() throws Exception {
|
public static void afterClass() throws Exception {
|
||||||
dir.close();
|
dir.close();
|
||||||
super.tearDown();
|
dir = null;
|
||||||
|
fieldInfos = null;
|
||||||
|
testDoc = null;
|
||||||
}
|
}
|
||||||
public void test() throws IOException {
|
public void test() throws IOException {
|
||||||
assertTrue(dir != null);
|
assertTrue(dir != null);
|
||||||
|
@ -302,7 +305,7 @@ public class TestFieldsReader extends LuceneTestCase {
|
||||||
FieldsReader reader;
|
FieldsReader reader;
|
||||||
long lazyTime = 0;
|
long lazyTime = 0;
|
||||||
long regularTime = 0;
|
long regularTime = 0;
|
||||||
int length = 50;
|
int length = 10;
|
||||||
Set<String> lazyFieldNames = new HashSet<String>();
|
Set<String> lazyFieldNames = new HashSet<String>();
|
||||||
lazyFieldNames.add(DocHelper.LARGE_LAZY_FIELD_KEY);
|
lazyFieldNames.add(DocHelper.LARGE_LAZY_FIELD_KEY);
|
||||||
SetBasedFieldSelector fieldSelector = new SetBasedFieldSelector(Collections. <String> emptySet(), lazyFieldNames);
|
SetBasedFieldSelector fieldSelector = new SetBasedFieldSelector(Collections. <String> emptySet(), lazyFieldNames);
|
||||||
|
|
|
@ -231,8 +231,8 @@ public class TestFlushByRamOrCountsPolicy extends LuceneTestCase {
|
||||||
for (int i = 0; i < numThreads.length; i++) {
|
for (int i = 0; i < numThreads.length; i++) {
|
||||||
AtomicInteger numDocs = new AtomicInteger(numDocumentsToIndex);
|
AtomicInteger numDocs = new AtomicInteger(numDocumentsToIndex);
|
||||||
MockDirectoryWrapper dir = newDirectory();
|
MockDirectoryWrapper dir = newDirectory();
|
||||||
// mock a very slow harddisk here so that flushing is very slow
|
// mock a very slow harddisk sometimes here so that flushing is very slow
|
||||||
dir.setThrottling(MockDirectoryWrapper.Throttling.ALWAYS);
|
dir.setThrottling(MockDirectoryWrapper.Throttling.SOMETIMES);
|
||||||
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT,
|
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||||
new MockAnalyzer(random));
|
new MockAnalyzer(random));
|
||||||
iwc.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH);
|
iwc.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH);
|
||||||
|
|
|
@ -42,7 +42,7 @@ import org.apache.lucene.util._TestUtil;
|
||||||
public class TestGlobalFieldNumbers extends LuceneTestCase {
|
public class TestGlobalFieldNumbers extends LuceneTestCase {
|
||||||
|
|
||||||
public void testGlobalFieldNumberFiles() throws IOException {
|
public void testGlobalFieldNumberFiles() throws IOException {
|
||||||
for (int i = 0; i < 39; i++) {
|
for (int i = 0; i < (TEST_NIGHTLY ? 39 : 13); i++) {
|
||||||
Directory dir = newDirectory();
|
Directory dir = newDirectory();
|
||||||
{
|
{
|
||||||
IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT,
|
IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||||
|
@ -113,7 +113,7 @@ public class TestGlobalFieldNumbers extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testIndexReaderCommit() throws IOException {
|
public void testIndexReaderCommit() throws IOException {
|
||||||
for (int i = 0; i < 39; i++) {
|
for (int i = 0; i < (TEST_NIGHTLY ? 39 : 13); i++) {
|
||||||
Directory dir = newDirectory();
|
Directory dir = newDirectory();
|
||||||
{
|
{
|
||||||
IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT,
|
IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||||
|
@ -156,7 +156,7 @@ public class TestGlobalFieldNumbers extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testGlobalFieldNumberFilesAcrossCommits() throws IOException {
|
public void testGlobalFieldNumberFilesAcrossCommits() throws IOException {
|
||||||
for (int i = 0; i < 39; i++) {
|
for (int i = 0; i < (TEST_NIGHTLY ? 39 : 13); i++) {
|
||||||
Directory dir = newDirectory();
|
Directory dir = newDirectory();
|
||||||
{
|
{
|
||||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||||
|
@ -207,7 +207,7 @@ public class TestGlobalFieldNumbers extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testGlobalFieldNumberOnOldCommit() throws IOException {
|
public void testGlobalFieldNumberOnOldCommit() throws IOException {
|
||||||
for (int i = 0; i < 39; i++) {
|
for (int i = 0; i < (TEST_NIGHTLY ? 39 : 13); i++) {
|
||||||
Directory dir = newDirectory();
|
Directory dir = newDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexDeletionPolicy(
|
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexDeletionPolicy(
|
||||||
|
|
|
@ -304,7 +304,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
d.close();
|
d.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
private void assertTermDocsCount(String msg,
|
static void assertTermDocsCount(String msg,
|
||||||
IndexReader reader,
|
IndexReader reader,
|
||||||
Term term,
|
Term term,
|
||||||
int expected)
|
int expected)
|
||||||
|
@ -322,50 +322,6 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
assertEquals(msg + ", count mismatch", expected, count);
|
assertEquals(msg + ", count mismatch", expected, count);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testBasicDelete() throws IOException {
|
|
||||||
Directory dir = newDirectory();
|
|
||||||
|
|
||||||
IndexWriter writer = null;
|
|
||||||
IndexReader reader = null;
|
|
||||||
Term searchTerm = new Term("content", "aaa");
|
|
||||||
|
|
||||||
// add 100 documents with term : aaa
|
|
||||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
|
||||||
writer.setInfoStream(VERBOSE ? System.out : null);
|
|
||||||
for (int i = 0; i < 100; i++) {
|
|
||||||
addDoc(writer, searchTerm.text());
|
|
||||||
}
|
|
||||||
writer.close();
|
|
||||||
|
|
||||||
// OPEN READER AT THIS POINT - this should fix the view of the
|
|
||||||
// index at the point of having 100 "aaa" documents and 0 "bbb"
|
|
||||||
reader = IndexReader.open(dir, false);
|
|
||||||
assertEquals("first docFreq", 100, reader.docFreq(searchTerm));
|
|
||||||
assertTermDocsCount("first reader", reader, searchTerm, 100);
|
|
||||||
reader.close();
|
|
||||||
|
|
||||||
// DELETE DOCUMENTS CONTAINING TERM: aaa
|
|
||||||
int deleted = 0;
|
|
||||||
reader = IndexReader.open(dir, false);
|
|
||||||
deleted = reader.deleteDocuments(searchTerm);
|
|
||||||
assertEquals("deleted count", 100, deleted);
|
|
||||||
assertEquals("deleted docFreq", 100, reader.docFreq(searchTerm));
|
|
||||||
assertTermDocsCount("deleted termDocs", reader, searchTerm, 0);
|
|
||||||
|
|
||||||
// open a 2nd reader to make sure first reader can
|
|
||||||
// commit its changes (.del) while second reader
|
|
||||||
// is open:
|
|
||||||
IndexReader reader2 = IndexReader.open(dir, false);
|
|
||||||
reader.close();
|
|
||||||
|
|
||||||
// CREATE A NEW READER and re-test
|
|
||||||
reader = IndexReader.open(dir, false);
|
|
||||||
assertEquals("deleted docFreq", 0, reader.docFreq(searchTerm));
|
|
||||||
assertTermDocsCount("deleted termDocs", reader, searchTerm, 0);
|
|
||||||
reader.close();
|
|
||||||
reader2.close();
|
|
||||||
dir.close();
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testBinaryFields() throws IOException {
|
public void testBinaryFields() throws IOException {
|
||||||
Directory dir = newDirectory();
|
Directory dir = newDirectory();
|
||||||
|
@ -600,11 +556,6 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
dir.close();
|
dir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public void testDeleteReaderWriterConflictUnoptimized() throws IOException{
|
|
||||||
deleteReaderWriterConflict(false);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ??? public void testOpenEmptyDirectory() throws IOException{
|
/* ??? public void testOpenEmptyDirectory() throws IOException{
|
||||||
String dirName = "test.empty";
|
String dirName = "test.empty";
|
||||||
File fileDirName = new File(dirName);
|
File fileDirName = new File(dirName);
|
||||||
|
@ -620,90 +571,6 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
rmDir(fileDirName);
|
rmDir(fileDirName);
|
||||||
}*/
|
}*/
|
||||||
|
|
||||||
public void testDeleteReaderWriterConflictOptimized() throws IOException{
|
|
||||||
deleteReaderWriterConflict(true);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void deleteReaderWriterConflict(boolean optimize) throws IOException {
|
|
||||||
//Directory dir = new RAMDirectory();
|
|
||||||
Directory dir = newDirectory();
|
|
||||||
|
|
||||||
Term searchTerm = new Term("content", "aaa");
|
|
||||||
Term searchTerm2 = new Term("content", "bbb");
|
|
||||||
|
|
||||||
// add 100 documents with term : aaa
|
|
||||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
|
|
||||||
for (int i = 0; i < 100; i++) {
|
|
||||||
addDoc(writer, searchTerm.text());
|
|
||||||
}
|
|
||||||
writer.close();
|
|
||||||
|
|
||||||
// OPEN READER AT THIS POINT - this should fix the view of the
|
|
||||||
// index at the point of having 100 "aaa" documents and 0 "bbb"
|
|
||||||
IndexReader reader = IndexReader.open(dir, false);
|
|
||||||
assertEquals("first docFreq", 100, reader.docFreq(searchTerm));
|
|
||||||
assertEquals("first docFreq", 0, reader.docFreq(searchTerm2));
|
|
||||||
assertTermDocsCount("first reader", reader, searchTerm, 100);
|
|
||||||
assertTermDocsCount("first reader", reader, searchTerm2, 0);
|
|
||||||
|
|
||||||
// add 100 documents with term : bbb
|
|
||||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
|
|
||||||
for (int i = 0; i < 100; i++) {
|
|
||||||
addDoc(writer, searchTerm2.text());
|
|
||||||
}
|
|
||||||
|
|
||||||
// REQUEST OPTIMIZATION
|
|
||||||
// This causes a new segment to become current for all subsequent
|
|
||||||
// searchers. Because of this, deletions made via a previously open
|
|
||||||
// reader, which would be applied to that reader's segment, are lost
|
|
||||||
// for subsequent searchers/readers
|
|
||||||
if(optimize)
|
|
||||||
writer.optimize();
|
|
||||||
writer.close();
|
|
||||||
|
|
||||||
// The reader should not see the new data
|
|
||||||
assertEquals("first docFreq", 100, reader.docFreq(searchTerm));
|
|
||||||
assertEquals("first docFreq", 0, reader.docFreq(searchTerm2));
|
|
||||||
assertTermDocsCount("first reader", reader, searchTerm, 100);
|
|
||||||
assertTermDocsCount("first reader", reader, searchTerm2, 0);
|
|
||||||
|
|
||||||
|
|
||||||
// DELETE DOCUMENTS CONTAINING TERM: aaa
|
|
||||||
// NOTE: the reader was created when only "aaa" documents were in
|
|
||||||
int deleted = 0;
|
|
||||||
try {
|
|
||||||
deleted = reader.deleteDocuments(searchTerm);
|
|
||||||
fail("Delete allowed on an index reader with stale segment information");
|
|
||||||
} catch (StaleReaderException e) {
|
|
||||||
/* success */
|
|
||||||
}
|
|
||||||
|
|
||||||
// Re-open index reader and try again. This time it should see
|
|
||||||
// the new data.
|
|
||||||
reader.close();
|
|
||||||
reader = IndexReader.open(dir, false);
|
|
||||||
assertEquals("first docFreq", 100, reader.docFreq(searchTerm));
|
|
||||||
assertEquals("first docFreq", 100, reader.docFreq(searchTerm2));
|
|
||||||
assertTermDocsCount("first reader", reader, searchTerm, 100);
|
|
||||||
assertTermDocsCount("first reader", reader, searchTerm2, 100);
|
|
||||||
|
|
||||||
deleted = reader.deleteDocuments(searchTerm);
|
|
||||||
assertEquals("deleted count", 100, deleted);
|
|
||||||
assertEquals("deleted docFreq", 100, reader.docFreq(searchTerm));
|
|
||||||
assertEquals("deleted docFreq", 100, reader.docFreq(searchTerm2));
|
|
||||||
assertTermDocsCount("deleted termDocs", reader, searchTerm, 0);
|
|
||||||
assertTermDocsCount("deleted termDocs", reader, searchTerm2, 100);
|
|
||||||
reader.close();
|
|
||||||
|
|
||||||
// CREATE A NEW READER and re-test
|
|
||||||
reader = IndexReader.open(dir, false);
|
|
||||||
assertEquals("deleted docFreq", 100, reader.docFreq(searchTerm2));
|
|
||||||
assertTermDocsCount("deleted termDocs", reader, searchTerm, 0);
|
|
||||||
assertTermDocsCount("deleted termDocs", reader, searchTerm2, 100);
|
|
||||||
reader.close();
|
|
||||||
dir.close();
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testFilesOpenClose() throws IOException {
|
public void testFilesOpenClose() throws IOException {
|
||||||
// Create initial data set
|
// Create initial data set
|
||||||
File dirFile = _TestUtil.getTempDir("TestIndexReader.testFilesOpenClose");
|
File dirFile = _TestUtil.getTempDir("TestIndexReader.testFilesOpenClose");
|
||||||
|
@ -812,259 +679,6 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
dir.close();
|
dir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testUndeleteAll() throws IOException {
|
|
||||||
Directory dir = newDirectory();
|
|
||||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
|
||||||
addDocumentWithFields(writer);
|
|
||||||
addDocumentWithFields(writer);
|
|
||||||
writer.close();
|
|
||||||
IndexReader reader = IndexReader.open(dir, false);
|
|
||||||
reader.deleteDocument(0);
|
|
||||||
reader.deleteDocument(1);
|
|
||||||
reader.undeleteAll();
|
|
||||||
reader.close();
|
|
||||||
reader = IndexReader.open(dir, false);
|
|
||||||
assertEquals(2, reader.numDocs()); // nothing has really been deleted thanks to undeleteAll()
|
|
||||||
reader.close();
|
|
||||||
dir.close();
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testUndeleteAllAfterClose() throws IOException {
|
|
||||||
Directory dir = newDirectory();
|
|
||||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
|
||||||
addDocumentWithFields(writer);
|
|
||||||
addDocumentWithFields(writer);
|
|
||||||
writer.close();
|
|
||||||
IndexReader reader = IndexReader.open(dir, false);
|
|
||||||
reader.deleteDocument(0);
|
|
||||||
reader.close();
|
|
||||||
reader = IndexReader.open(dir, false);
|
|
||||||
reader.undeleteAll();
|
|
||||||
assertEquals(2, reader.numDocs()); // nothing has really been deleted thanks to undeleteAll()
|
|
||||||
reader.close();
|
|
||||||
dir.close();
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testUndeleteAllAfterCloseThenReopen() throws IOException {
|
|
||||||
Directory dir = newDirectory();
|
|
||||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
|
||||||
addDocumentWithFields(writer);
|
|
||||||
addDocumentWithFields(writer);
|
|
||||||
writer.close();
|
|
||||||
IndexReader reader = IndexReader.open(dir, false);
|
|
||||||
reader.deleteDocument(0);
|
|
||||||
reader.close();
|
|
||||||
reader = IndexReader.open(dir, false);
|
|
||||||
reader.undeleteAll();
|
|
||||||
reader.close();
|
|
||||||
reader = IndexReader.open(dir, false);
|
|
||||||
assertEquals(2, reader.numDocs()); // nothing has really been deleted thanks to undeleteAll()
|
|
||||||
reader.close();
|
|
||||||
dir.close();
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testDeleteReaderReaderConflictUnoptimized() throws IOException{
|
|
||||||
deleteReaderReaderConflict(false);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testDeleteReaderReaderConflictOptimized() throws IOException{
|
|
||||||
deleteReaderReaderConflict(true);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Make sure if reader tries to commit but hits disk
|
|
||||||
* full that reader remains consistent and usable.
|
|
||||||
*/
|
|
||||||
public void testDiskFull() throws IOException {
|
|
||||||
|
|
||||||
Term searchTerm = new Term("content", "aaa");
|
|
||||||
int START_COUNT = 157;
|
|
||||||
int END_COUNT = 144;
|
|
||||||
|
|
||||||
// First build up a starting index:
|
|
||||||
MockDirectoryWrapper startDir = newDirectory();
|
|
||||||
IndexWriter writer = new IndexWriter(startDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
|
||||||
if (VERBOSE) {
|
|
||||||
System.out.println("TEST: create initial index");
|
|
||||||
writer.setInfoStream(System.out);
|
|
||||||
}
|
|
||||||
for(int i=0;i<157;i++) {
|
|
||||||
Document d = new Document();
|
|
||||||
d.add(newField("id", Integer.toString(i), Field.Store.YES, Field.Index.NOT_ANALYZED));
|
|
||||||
d.add(newField("content", "aaa " + i, Field.Store.NO, Field.Index.ANALYZED));
|
|
||||||
writer.addDocument(d);
|
|
||||||
if (0==i%10)
|
|
||||||
writer.commit();
|
|
||||||
}
|
|
||||||
writer.close();
|
|
||||||
|
|
||||||
{
|
|
||||||
IndexReader r = IndexReader.open(startDir);
|
|
||||||
IndexSearcher searcher = newSearcher(r);
|
|
||||||
ScoreDoc[] hits = null;
|
|
||||||
try {
|
|
||||||
hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
|
|
||||||
} catch (IOException e) {
|
|
||||||
e.printStackTrace();
|
|
||||||
fail("exception when init searching: " + e);
|
|
||||||
}
|
|
||||||
searcher.close();
|
|
||||||
r.close();
|
|
||||||
}
|
|
||||||
|
|
||||||
long diskUsage = startDir.getRecomputedActualSizeInBytes();
|
|
||||||
long diskFree = diskUsage+100;
|
|
||||||
|
|
||||||
IOException err = null;
|
|
||||||
|
|
||||||
boolean done = false;
|
|
||||||
boolean gotExc = false;
|
|
||||||
|
|
||||||
// Iterate w/ ever increasing free disk space:
|
|
||||||
while(!done) {
|
|
||||||
MockDirectoryWrapper dir = new MockDirectoryWrapper(random, new RAMDirectory(startDir));
|
|
||||||
|
|
||||||
// If IndexReader hits disk full, it can write to
|
|
||||||
// the same files again.
|
|
||||||
dir.setPreventDoubleWrite(false);
|
|
||||||
|
|
||||||
IndexReader reader = IndexReader.open(dir, false);
|
|
||||||
|
|
||||||
// For each disk size, first try to commit against
|
|
||||||
// dir that will hit random IOExceptions & disk
|
|
||||||
// full; after, give it infinite disk space & turn
|
|
||||||
// off random IOExceptions & retry w/ same reader:
|
|
||||||
boolean success = false;
|
|
||||||
|
|
||||||
for(int x=0;x<2;x++) {
|
|
||||||
|
|
||||||
double rate = 0.05;
|
|
||||||
double diskRatio = ((double) diskFree)/diskUsage;
|
|
||||||
long thisDiskFree;
|
|
||||||
String testName;
|
|
||||||
|
|
||||||
if (0 == x) {
|
|
||||||
thisDiskFree = diskFree;
|
|
||||||
if (diskRatio >= 2.0) {
|
|
||||||
rate /= 2;
|
|
||||||
}
|
|
||||||
if (diskRatio >= 4.0) {
|
|
||||||
rate /= 2;
|
|
||||||
}
|
|
||||||
if (diskRatio >= 6.0) {
|
|
||||||
rate = 0.0;
|
|
||||||
}
|
|
||||||
if (VERBOSE) {
|
|
||||||
System.out.println("\ncycle: " + diskFree + " bytes");
|
|
||||||
}
|
|
||||||
testName = "disk full during reader.close() @ " + thisDiskFree + " bytes";
|
|
||||||
} else {
|
|
||||||
thisDiskFree = 0;
|
|
||||||
rate = 0.0;
|
|
||||||
if (VERBOSE) {
|
|
||||||
System.out.println("\ncycle: same writer: unlimited disk space");
|
|
||||||
}
|
|
||||||
testName = "reader re-use after disk full";
|
|
||||||
}
|
|
||||||
|
|
||||||
dir.setMaxSizeInBytes(thisDiskFree);
|
|
||||||
dir.setRandomIOExceptionRate(rate);
|
|
||||||
Similarity sim = new DefaultSimilarity();
|
|
||||||
try {
|
|
||||||
if (0 == x) {
|
|
||||||
int docId = 12;
|
|
||||||
for(int i=0;i<13;i++) {
|
|
||||||
reader.deleteDocument(docId);
|
|
||||||
reader.setNorm(docId, "content", sim.encodeNormValue(2.0f));
|
|
||||||
docId += 12;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
reader.close();
|
|
||||||
success = true;
|
|
||||||
if (0 == x) {
|
|
||||||
done = true;
|
|
||||||
}
|
|
||||||
} catch (IOException e) {
|
|
||||||
if (VERBOSE) {
|
|
||||||
System.out.println(" hit IOException: " + e);
|
|
||||||
e.printStackTrace(System.out);
|
|
||||||
}
|
|
||||||
err = e;
|
|
||||||
gotExc = true;
|
|
||||||
if (1 == x) {
|
|
||||||
e.printStackTrace();
|
|
||||||
fail(testName + " hit IOException after disk space was freed up");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Finally, verify index is not corrupt, and, if
|
|
||||||
// we succeeded, we see all docs changed, and if
|
|
||||||
// we failed, we see either all docs or no docs
|
|
||||||
// changed (transactional semantics):
|
|
||||||
IndexReader newReader = null;
|
|
||||||
try {
|
|
||||||
newReader = IndexReader.open(dir, false);
|
|
||||||
} catch (IOException e) {
|
|
||||||
e.printStackTrace();
|
|
||||||
fail(testName + ":exception when creating IndexReader after disk full during close: " + e);
|
|
||||||
}
|
|
||||||
/*
|
|
||||||
int result = newReader.docFreq(searchTerm);
|
|
||||||
if (success) {
|
|
||||||
if (result != END_COUNT) {
|
|
||||||
fail(testName + ": method did not throw exception but docFreq('aaa') is " + result + " instead of expected " + END_COUNT);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// On hitting exception we still may have added
|
|
||||||
// all docs:
|
|
||||||
if (result != START_COUNT && result != END_COUNT) {
|
|
||||||
err.printStackTrace();
|
|
||||||
fail(testName + ": method did throw exception but docFreq('aaa') is " + result + " instead of expected " + START_COUNT + " or " + END_COUNT);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
IndexSearcher searcher = newSearcher(newReader);
|
|
||||||
ScoreDoc[] hits = null;
|
|
||||||
try {
|
|
||||||
hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
|
|
||||||
} catch (IOException e) {
|
|
||||||
e.printStackTrace();
|
|
||||||
fail(testName + ": exception when searching: " + e);
|
|
||||||
}
|
|
||||||
int result2 = hits.length;
|
|
||||||
if (success) {
|
|
||||||
if (result2 != END_COUNT) {
|
|
||||||
fail(testName + ": method did not throw exception but hits.length for search on term 'aaa' is " + result2 + " instead of expected " + END_COUNT);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// On hitting exception we still may have added
|
|
||||||
// all docs:
|
|
||||||
if (result2 != START_COUNT && result2 != END_COUNT) {
|
|
||||||
err.printStackTrace();
|
|
||||||
fail(testName + ": method did throw exception but hits.length for search on term 'aaa' is " + result2 + " instead of expected " + START_COUNT);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
searcher.close();
|
|
||||||
newReader.close();
|
|
||||||
|
|
||||||
if (result2 == END_COUNT) {
|
|
||||||
if (!gotExc)
|
|
||||||
fail("never hit disk full");
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
dir.close();
|
|
||||||
|
|
||||||
// Try again with 10 more bytes of free space:
|
|
||||||
diskFree += 10;
|
|
||||||
}
|
|
||||||
|
|
||||||
startDir.close();
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testDocsOutOfOrderJIRA140() throws IOException {
|
public void testDocsOutOfOrderJIRA140() throws IOException {
|
||||||
Directory dir = newDirectory();
|
Directory dir = newDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||||
|
@ -1161,133 +775,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
dir.close();
|
dir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testMultiReaderDeletes() throws Exception {
|
static void addDocumentWithFields(IndexWriter writer) throws IOException
|
||||||
Directory dir = newDirectory();
|
|
||||||
RandomIndexWriter w= new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
|
|
||||||
Document doc = new Document();
|
|
||||||
doc.add(newField("f", "doctor", Field.Store.NO, Field.Index.NOT_ANALYZED));
|
|
||||||
w.addDocument(doc);
|
|
||||||
doc = new Document();
|
|
||||||
w.commit();
|
|
||||||
doc.add(newField("f", "who", Field.Store.NO, Field.Index.NOT_ANALYZED));
|
|
||||||
w.addDocument(doc);
|
|
||||||
IndexReader r = new SlowMultiReaderWrapper(w.getReader());
|
|
||||||
w.close();
|
|
||||||
|
|
||||||
assertNull(r.getDeletedDocs());
|
|
||||||
r.close();
|
|
||||||
|
|
||||||
r = new SlowMultiReaderWrapper(IndexReader.open(dir, false));
|
|
||||||
|
|
||||||
assertNull(r.getDeletedDocs());
|
|
||||||
assertEquals(1, r.deleteDocuments(new Term("f", "doctor")));
|
|
||||||
assertNotNull(r.getDeletedDocs());
|
|
||||||
assertTrue(r.getDeletedDocs().get(0));
|
|
||||||
assertEquals(1, r.deleteDocuments(new Term("f", "who")));
|
|
||||||
assertTrue(r.getDeletedDocs().get(1));
|
|
||||||
r.close();
|
|
||||||
dir.close();
|
|
||||||
}
|
|
||||||
|
|
||||||
private void deleteReaderReaderConflict(boolean optimize) throws IOException {
|
|
||||||
Directory dir = newDirectory();
|
|
||||||
|
|
||||||
Term searchTerm1 = new Term("content", "aaa");
|
|
||||||
Term searchTerm2 = new Term("content", "bbb");
|
|
||||||
Term searchTerm3 = new Term("content", "ccc");
|
|
||||||
|
|
||||||
// add 100 documents with term : aaa
|
|
||||||
// add 100 documents with term : bbb
|
|
||||||
// add 100 documents with term : ccc
|
|
||||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
|
|
||||||
for (int i = 0; i < 100; i++) {
|
|
||||||
addDoc(writer, searchTerm1.text());
|
|
||||||
addDoc(writer, searchTerm2.text());
|
|
||||||
addDoc(writer, searchTerm3.text());
|
|
||||||
}
|
|
||||||
if(optimize)
|
|
||||||
writer.optimize();
|
|
||||||
writer.close();
|
|
||||||
|
|
||||||
// OPEN TWO READERS
|
|
||||||
// Both readers get segment info as exists at this time
|
|
||||||
IndexReader reader1 = IndexReader.open(dir, false);
|
|
||||||
assertEquals("first opened", 100, reader1.docFreq(searchTerm1));
|
|
||||||
assertEquals("first opened", 100, reader1.docFreq(searchTerm2));
|
|
||||||
assertEquals("first opened", 100, reader1.docFreq(searchTerm3));
|
|
||||||
assertTermDocsCount("first opened", reader1, searchTerm1, 100);
|
|
||||||
assertTermDocsCount("first opened", reader1, searchTerm2, 100);
|
|
||||||
assertTermDocsCount("first opened", reader1, searchTerm3, 100);
|
|
||||||
|
|
||||||
IndexReader reader2 = IndexReader.open(dir, false);
|
|
||||||
assertEquals("first opened", 100, reader2.docFreq(searchTerm1));
|
|
||||||
assertEquals("first opened", 100, reader2.docFreq(searchTerm2));
|
|
||||||
assertEquals("first opened", 100, reader2.docFreq(searchTerm3));
|
|
||||||
assertTermDocsCount("first opened", reader2, searchTerm1, 100);
|
|
||||||
assertTermDocsCount("first opened", reader2, searchTerm2, 100);
|
|
||||||
assertTermDocsCount("first opened", reader2, searchTerm3, 100);
|
|
||||||
|
|
||||||
// DELETE DOCS FROM READER 2 and CLOSE IT
|
|
||||||
// delete documents containing term: aaa
|
|
||||||
// when the reader is closed, the segment info is updated and
|
|
||||||
// the first reader is now stale
|
|
||||||
reader2.deleteDocuments(searchTerm1);
|
|
||||||
assertEquals("after delete 1", 100, reader2.docFreq(searchTerm1));
|
|
||||||
assertEquals("after delete 1", 100, reader2.docFreq(searchTerm2));
|
|
||||||
assertEquals("after delete 1", 100, reader2.docFreq(searchTerm3));
|
|
||||||
assertTermDocsCount("after delete 1", reader2, searchTerm1, 0);
|
|
||||||
assertTermDocsCount("after delete 1", reader2, searchTerm2, 100);
|
|
||||||
assertTermDocsCount("after delete 1", reader2, searchTerm3, 100);
|
|
||||||
reader2.close();
|
|
||||||
|
|
||||||
// Make sure reader 1 is unchanged since it was open earlier
|
|
||||||
assertEquals("after delete 1", 100, reader1.docFreq(searchTerm1));
|
|
||||||
assertEquals("after delete 1", 100, reader1.docFreq(searchTerm2));
|
|
||||||
assertEquals("after delete 1", 100, reader1.docFreq(searchTerm3));
|
|
||||||
assertTermDocsCount("after delete 1", reader1, searchTerm1, 100);
|
|
||||||
assertTermDocsCount("after delete 1", reader1, searchTerm2, 100);
|
|
||||||
assertTermDocsCount("after delete 1", reader1, searchTerm3, 100);
|
|
||||||
|
|
||||||
|
|
||||||
// ATTEMPT TO DELETE FROM STALE READER
|
|
||||||
// delete documents containing term: bbb
|
|
||||||
try {
|
|
||||||
reader1.deleteDocuments(searchTerm2);
|
|
||||||
fail("Delete allowed from a stale index reader");
|
|
||||||
} catch (IOException e) {
|
|
||||||
/* success */
|
|
||||||
}
|
|
||||||
|
|
||||||
// RECREATE READER AND TRY AGAIN
|
|
||||||
reader1.close();
|
|
||||||
reader1 = IndexReader.open(dir, false);
|
|
||||||
assertEquals("reopened", 100, reader1.docFreq(searchTerm1));
|
|
||||||
assertEquals("reopened", 100, reader1.docFreq(searchTerm2));
|
|
||||||
assertEquals("reopened", 100, reader1.docFreq(searchTerm3));
|
|
||||||
assertTermDocsCount("reopened", reader1, searchTerm1, 0);
|
|
||||||
assertTermDocsCount("reopened", reader1, searchTerm2, 100);
|
|
||||||
assertTermDocsCount("reopened", reader1, searchTerm3, 100);
|
|
||||||
|
|
||||||
reader1.deleteDocuments(searchTerm2);
|
|
||||||
assertEquals("deleted 2", 100, reader1.docFreq(searchTerm1));
|
|
||||||
assertEquals("deleted 2", 100, reader1.docFreq(searchTerm2));
|
|
||||||
assertEquals("deleted 2", 100, reader1.docFreq(searchTerm3));
|
|
||||||
assertTermDocsCount("deleted 2", reader1, searchTerm1, 0);
|
|
||||||
assertTermDocsCount("deleted 2", reader1, searchTerm2, 0);
|
|
||||||
assertTermDocsCount("deleted 2", reader1, searchTerm3, 100);
|
|
||||||
reader1.close();
|
|
||||||
|
|
||||||
// Open another reader to confirm that everything is deleted
|
|
||||||
reader2 = IndexReader.open(dir, false);
|
|
||||||
assertTermDocsCount("reopened 2", reader2, searchTerm1, 0);
|
|
||||||
assertTermDocsCount("reopened 2", reader2, searchTerm2, 0);
|
|
||||||
assertTermDocsCount("reopened 2", reader2, searchTerm3, 100);
|
|
||||||
reader2.close();
|
|
||||||
|
|
||||||
dir.close();
|
|
||||||
}
|
|
||||||
|
|
||||||
private void addDocumentWithFields(IndexWriter writer) throws IOException
|
|
||||||
{
|
{
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
doc.add(newField("keyword","test1", Field.Store.YES, Field.Index.NOT_ANALYZED));
|
doc.add(newField("keyword","test1", Field.Store.YES, Field.Index.NOT_ANALYZED));
|
||||||
|
@ -1297,7 +785,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
writer.addDocument(doc);
|
writer.addDocument(doc);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void addDocumentWithDifferentFields(IndexWriter writer) throws IOException
|
static void addDocumentWithDifferentFields(IndexWriter writer) throws IOException
|
||||||
{
|
{
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
doc.add(newField("keyword2","test1", Field.Store.YES, Field.Index.NOT_ANALYZED));
|
doc.add(newField("keyword2","test1", Field.Store.YES, Field.Index.NOT_ANALYZED));
|
||||||
|
@ -1307,7 +795,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
writer.addDocument(doc);
|
writer.addDocument(doc);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void addDocumentWithTermVectorFields(IndexWriter writer) throws IOException
|
static void addDocumentWithTermVectorFields(IndexWriter writer) throws IOException
|
||||||
{
|
{
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
doc.add(newField("tvnot","tvnot", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.NO));
|
doc.add(newField("tvnot","tvnot", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.NO));
|
||||||
|
@ -1319,7 +807,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
writer.addDocument(doc);
|
writer.addDocument(doc);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void addDoc(IndexWriter writer, String value) throws IOException {
|
static void addDoc(IndexWriter writer, String value) throws IOException {
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
doc.add(newField("content", value, Field.Store.NO, Field.Index.ANALYZED));
|
doc.add(newField("content", value, Field.Store.NO, Field.Index.ANALYZED));
|
||||||
writer.addDocument(doc);
|
writer.addDocument(doc);
|
||||||
|
@ -1557,28 +1045,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
dir.close();
|
dir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
// LUCENE-1647
|
static Document createDocument(String id) {
|
||||||
public void testIndexReaderUnDeleteAll() throws Exception {
|
|
||||||
MockDirectoryWrapper dir = newDirectory();
|
|
||||||
dir.setPreventDoubleWrite(false);
|
|
||||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
|
||||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
|
||||||
writer.addDocument(createDocument("a"));
|
|
||||||
writer.addDocument(createDocument("b"));
|
|
||||||
writer.addDocument(createDocument("c"));
|
|
||||||
writer.close();
|
|
||||||
IndexReader reader = IndexReader.open(dir, false);
|
|
||||||
reader.deleteDocuments(new Term("id", "a"));
|
|
||||||
reader.flush();
|
|
||||||
reader.deleteDocuments(new Term("id", "b"));
|
|
||||||
reader.undeleteAll();
|
|
||||||
reader.deleteDocuments(new Term("id", "b"));
|
|
||||||
reader.close();
|
|
||||||
IndexReader.open(dir,true).close();
|
|
||||||
dir.close();
|
|
||||||
}
|
|
||||||
|
|
||||||
private Document createDocument(String id) {
|
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
doc.add(newField("id", id, Field.Store.YES, Field.Index.NOT_ANALYZED_NO_NORMS));
|
doc.add(newField("id", id, Field.Store.YES, Field.Index.NOT_ANALYZED_NO_NORMS));
|
||||||
return doc;
|
return doc;
|
||||||
|
@ -1692,54 +1159,6 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
dir.close();
|
dir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
// LUCENE-1579: Make sure all SegmentReaders are new when
|
|
||||||
// reopen switches readOnly
|
|
||||||
public void testReopenChangeReadonly() throws Exception {
|
|
||||||
Directory dir = newDirectory();
|
|
||||||
IndexWriter writer = new IndexWriter(
|
|
||||||
dir,
|
|
||||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
|
||||||
setMaxBufferedDocs(-1).
|
|
||||||
setMergePolicy(newLogMergePolicy(10))
|
|
||||||
);
|
|
||||||
Document doc = new Document();
|
|
||||||
doc.add(newField("number", "17", Field.Store.NO, Field.Index.NOT_ANALYZED));
|
|
||||||
writer.addDocument(doc);
|
|
||||||
writer.commit();
|
|
||||||
|
|
||||||
// Open reader1
|
|
||||||
IndexReader r = IndexReader.open(dir, false);
|
|
||||||
assertTrue(r instanceof DirectoryReader);
|
|
||||||
IndexReader r1 = getOnlySegmentReader(r);
|
|
||||||
final int[] ints = FieldCache.DEFAULT.getInts(r1, "number");
|
|
||||||
assertEquals(1, ints.length);
|
|
||||||
assertEquals(17, ints[0]);
|
|
||||||
|
|
||||||
// Reopen to readonly w/ no chnages
|
|
||||||
IndexReader r3 = r.reopen(true);
|
|
||||||
assertTrue(((DirectoryReader) r3).readOnly);
|
|
||||||
r3.close();
|
|
||||||
|
|
||||||
// Add new segment
|
|
||||||
writer.addDocument(doc);
|
|
||||||
writer.commit();
|
|
||||||
|
|
||||||
// Reopen reader1 --> reader2
|
|
||||||
IndexReader r2 = r.reopen(true);
|
|
||||||
r.close();
|
|
||||||
assertTrue(((DirectoryReader) r2).readOnly);
|
|
||||||
IndexReader[] subs = r2.getSequentialSubReaders();
|
|
||||||
final int[] ints2 = FieldCache.DEFAULT.getInts(subs[0], "number");
|
|
||||||
r2.close();
|
|
||||||
|
|
||||||
assertTrue(((SegmentReader) subs[0]).readOnly);
|
|
||||||
assertTrue(((SegmentReader) subs[1]).readOnly);
|
|
||||||
assertTrue(ints == ints2);
|
|
||||||
|
|
||||||
writer.close();
|
|
||||||
dir.close();
|
|
||||||
}
|
|
||||||
|
|
||||||
// LUCENE-1586: getUniqueTermCount
|
// LUCENE-1586: getUniqueTermCount
|
||||||
public void testUniqueTermCount() throws Exception {
|
public void testUniqueTermCount() throws Exception {
|
||||||
Directory dir = newDirectory();
|
Directory dir = newDirectory();
|
||||||
|
|
|
@ -0,0 +1,374 @@
|
||||||
|
package org.apache.lucene.index;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright ownership.
|
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
* (the "License"); you may not use this file except in compliance with
|
||||||
|
* the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import org.apache.lucene.analysis.MockAnalyzer;
|
||||||
|
import org.apache.lucene.document.Document;
|
||||||
|
import org.apache.lucene.document.Field;
|
||||||
|
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||||
|
import org.apache.lucene.store.Directory;
|
||||||
|
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||||
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
|
||||||
|
import static org.apache.lucene.index.TestIndexReader.addDoc;
|
||||||
|
import static org.apache.lucene.index.TestIndexReader.addDocumentWithFields;
|
||||||
|
import static org.apache.lucene.index.TestIndexReader.assertTermDocsCount;
|
||||||
|
import static org.apache.lucene.index.TestIndexReader.createDocument;
|
||||||
|
|
||||||
|
public class TestIndexReaderDelete extends LuceneTestCase {
|
||||||
|
private void deleteReaderReaderConflict(boolean optimize) throws IOException {
|
||||||
|
Directory dir = newDirectory();
|
||||||
|
|
||||||
|
Term searchTerm1 = new Term("content", "aaa");
|
||||||
|
Term searchTerm2 = new Term("content", "bbb");
|
||||||
|
Term searchTerm3 = new Term("content", "ccc");
|
||||||
|
|
||||||
|
// add 100 documents with term : aaa
|
||||||
|
// add 100 documents with term : bbb
|
||||||
|
// add 100 documents with term : ccc
|
||||||
|
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
|
||||||
|
for (int i = 0; i < 100; i++) {
|
||||||
|
addDoc(writer, searchTerm1.text());
|
||||||
|
addDoc(writer, searchTerm2.text());
|
||||||
|
addDoc(writer, searchTerm3.text());
|
||||||
|
}
|
||||||
|
if(optimize)
|
||||||
|
writer.optimize();
|
||||||
|
writer.close();
|
||||||
|
|
||||||
|
// OPEN TWO READERS
|
||||||
|
// Both readers get segment info as exists at this time
|
||||||
|
IndexReader reader1 = IndexReader.open(dir, false);
|
||||||
|
assertEquals("first opened", 100, reader1.docFreq(searchTerm1));
|
||||||
|
assertEquals("first opened", 100, reader1.docFreq(searchTerm2));
|
||||||
|
assertEquals("first opened", 100, reader1.docFreq(searchTerm3));
|
||||||
|
assertTermDocsCount("first opened", reader1, searchTerm1, 100);
|
||||||
|
assertTermDocsCount("first opened", reader1, searchTerm2, 100);
|
||||||
|
assertTermDocsCount("first opened", reader1, searchTerm3, 100);
|
||||||
|
|
||||||
|
IndexReader reader2 = IndexReader.open(dir, false);
|
||||||
|
assertEquals("first opened", 100, reader2.docFreq(searchTerm1));
|
||||||
|
assertEquals("first opened", 100, reader2.docFreq(searchTerm2));
|
||||||
|
assertEquals("first opened", 100, reader2.docFreq(searchTerm3));
|
||||||
|
assertTermDocsCount("first opened", reader2, searchTerm1, 100);
|
||||||
|
assertTermDocsCount("first opened", reader2, searchTerm2, 100);
|
||||||
|
assertTermDocsCount("first opened", reader2, searchTerm3, 100);
|
||||||
|
|
||||||
|
// DELETE DOCS FROM READER 2 and CLOSE IT
|
||||||
|
// delete documents containing term: aaa
|
||||||
|
// when the reader is closed, the segment info is updated and
|
||||||
|
// the first reader is now stale
|
||||||
|
reader2.deleteDocuments(searchTerm1);
|
||||||
|
assertEquals("after delete 1", 100, reader2.docFreq(searchTerm1));
|
||||||
|
assertEquals("after delete 1", 100, reader2.docFreq(searchTerm2));
|
||||||
|
assertEquals("after delete 1", 100, reader2.docFreq(searchTerm3));
|
||||||
|
assertTermDocsCount("after delete 1", reader2, searchTerm1, 0);
|
||||||
|
assertTermDocsCount("after delete 1", reader2, searchTerm2, 100);
|
||||||
|
assertTermDocsCount("after delete 1", reader2, searchTerm3, 100);
|
||||||
|
reader2.close();
|
||||||
|
|
||||||
|
// Make sure reader 1 is unchanged since it was open earlier
|
||||||
|
assertEquals("after delete 1", 100, reader1.docFreq(searchTerm1));
|
||||||
|
assertEquals("after delete 1", 100, reader1.docFreq(searchTerm2));
|
||||||
|
assertEquals("after delete 1", 100, reader1.docFreq(searchTerm3));
|
||||||
|
assertTermDocsCount("after delete 1", reader1, searchTerm1, 100);
|
||||||
|
assertTermDocsCount("after delete 1", reader1, searchTerm2, 100);
|
||||||
|
assertTermDocsCount("after delete 1", reader1, searchTerm3, 100);
|
||||||
|
|
||||||
|
|
||||||
|
// ATTEMPT TO DELETE FROM STALE READER
|
||||||
|
// delete documents containing term: bbb
|
||||||
|
try {
|
||||||
|
reader1.deleteDocuments(searchTerm2);
|
||||||
|
fail("Delete allowed from a stale index reader");
|
||||||
|
} catch (IOException e) {
|
||||||
|
/* success */
|
||||||
|
}
|
||||||
|
|
||||||
|
// RECREATE READER AND TRY AGAIN
|
||||||
|
reader1.close();
|
||||||
|
reader1 = IndexReader.open(dir, false);
|
||||||
|
assertEquals("reopened", 100, reader1.docFreq(searchTerm1));
|
||||||
|
assertEquals("reopened", 100, reader1.docFreq(searchTerm2));
|
||||||
|
assertEquals("reopened", 100, reader1.docFreq(searchTerm3));
|
||||||
|
assertTermDocsCount("reopened", reader1, searchTerm1, 0);
|
||||||
|
assertTermDocsCount("reopened", reader1, searchTerm2, 100);
|
||||||
|
assertTermDocsCount("reopened", reader1, searchTerm3, 100);
|
||||||
|
|
||||||
|
reader1.deleteDocuments(searchTerm2);
|
||||||
|
assertEquals("deleted 2", 100, reader1.docFreq(searchTerm1));
|
||||||
|
assertEquals("deleted 2", 100, reader1.docFreq(searchTerm2));
|
||||||
|
assertEquals("deleted 2", 100, reader1.docFreq(searchTerm3));
|
||||||
|
assertTermDocsCount("deleted 2", reader1, searchTerm1, 0);
|
||||||
|
assertTermDocsCount("deleted 2", reader1, searchTerm2, 0);
|
||||||
|
assertTermDocsCount("deleted 2", reader1, searchTerm3, 100);
|
||||||
|
reader1.close();
|
||||||
|
|
||||||
|
// Open another reader to confirm that everything is deleted
|
||||||
|
reader2 = IndexReader.open(dir, false);
|
||||||
|
assertTermDocsCount("reopened 2", reader2, searchTerm1, 0);
|
||||||
|
assertTermDocsCount("reopened 2", reader2, searchTerm2, 0);
|
||||||
|
assertTermDocsCount("reopened 2", reader2, searchTerm3, 100);
|
||||||
|
reader2.close();
|
||||||
|
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
private void deleteReaderWriterConflict(boolean optimize) throws IOException {
|
||||||
|
//Directory dir = new RAMDirectory();
|
||||||
|
Directory dir = newDirectory();
|
||||||
|
|
||||||
|
Term searchTerm = new Term("content", "aaa");
|
||||||
|
Term searchTerm2 = new Term("content", "bbb");
|
||||||
|
|
||||||
|
// add 100 documents with term : aaa
|
||||||
|
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
|
||||||
|
for (int i = 0; i < 100; i++) {
|
||||||
|
addDoc(writer, searchTerm.text());
|
||||||
|
}
|
||||||
|
writer.close();
|
||||||
|
|
||||||
|
// OPEN READER AT THIS POINT - this should fix the view of the
|
||||||
|
// index at the point of having 100 "aaa" documents and 0 "bbb"
|
||||||
|
IndexReader reader = IndexReader.open(dir, false);
|
||||||
|
assertEquals("first docFreq", 100, reader.docFreq(searchTerm));
|
||||||
|
assertEquals("first docFreq", 0, reader.docFreq(searchTerm2));
|
||||||
|
assertTermDocsCount("first reader", reader, searchTerm, 100);
|
||||||
|
assertTermDocsCount("first reader", reader, searchTerm2, 0);
|
||||||
|
|
||||||
|
// add 100 documents with term : bbb
|
||||||
|
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
|
||||||
|
for (int i = 0; i < 100; i++) {
|
||||||
|
addDoc(writer, searchTerm2.text());
|
||||||
|
}
|
||||||
|
|
||||||
|
// REQUEST OPTIMIZATION
|
||||||
|
// This causes a new segment to become current for all subsequent
|
||||||
|
// searchers. Because of this, deletions made via a previously open
|
||||||
|
// reader, which would be applied to that reader's segment, are lost
|
||||||
|
// for subsequent searchers/readers
|
||||||
|
if(optimize)
|
||||||
|
writer.optimize();
|
||||||
|
writer.close();
|
||||||
|
|
||||||
|
// The reader should not see the new data
|
||||||
|
assertEquals("first docFreq", 100, reader.docFreq(searchTerm));
|
||||||
|
assertEquals("first docFreq", 0, reader.docFreq(searchTerm2));
|
||||||
|
assertTermDocsCount("first reader", reader, searchTerm, 100);
|
||||||
|
assertTermDocsCount("first reader", reader, searchTerm2, 0);
|
||||||
|
|
||||||
|
|
||||||
|
// DELETE DOCUMENTS CONTAINING TERM: aaa
|
||||||
|
// NOTE: the reader was created when only "aaa" documents were in
|
||||||
|
int deleted = 0;
|
||||||
|
try {
|
||||||
|
deleted = reader.deleteDocuments(searchTerm);
|
||||||
|
fail("Delete allowed on an index reader with stale segment information");
|
||||||
|
} catch (StaleReaderException e) {
|
||||||
|
/* success */
|
||||||
|
}
|
||||||
|
|
||||||
|
// Re-open index reader and try again. This time it should see
|
||||||
|
// the new data.
|
||||||
|
reader.close();
|
||||||
|
reader = IndexReader.open(dir, false);
|
||||||
|
assertEquals("first docFreq", 100, reader.docFreq(searchTerm));
|
||||||
|
assertEquals("first docFreq", 100, reader.docFreq(searchTerm2));
|
||||||
|
assertTermDocsCount("first reader", reader, searchTerm, 100);
|
||||||
|
assertTermDocsCount("first reader", reader, searchTerm2, 100);
|
||||||
|
|
||||||
|
deleted = reader.deleteDocuments(searchTerm);
|
||||||
|
assertEquals("deleted count", 100, deleted);
|
||||||
|
assertEquals("deleted docFreq", 100, reader.docFreq(searchTerm));
|
||||||
|
assertEquals("deleted docFreq", 100, reader.docFreq(searchTerm2));
|
||||||
|
assertTermDocsCount("deleted termDocs", reader, searchTerm, 0);
|
||||||
|
assertTermDocsCount("deleted termDocs", reader, searchTerm2, 100);
|
||||||
|
reader.close();
|
||||||
|
|
||||||
|
// CREATE A NEW READER and re-test
|
||||||
|
reader = IndexReader.open(dir, false);
|
||||||
|
assertEquals("deleted docFreq", 100, reader.docFreq(searchTerm2));
|
||||||
|
assertTermDocsCount("deleted termDocs", reader, searchTerm, 0);
|
||||||
|
assertTermDocsCount("deleted termDocs", reader, searchTerm2, 100);
|
||||||
|
reader.close();
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testBasicDelete() throws IOException {
|
||||||
|
Directory dir = newDirectory();
|
||||||
|
|
||||||
|
IndexWriter writer = null;
|
||||||
|
IndexReader reader = null;
|
||||||
|
Term searchTerm = new Term("content", "aaa");
|
||||||
|
|
||||||
|
// add 100 documents with term : aaa
|
||||||
|
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||||
|
writer.setInfoStream(VERBOSE ? System.out : null);
|
||||||
|
for (int i = 0; i < 100; i++) {
|
||||||
|
addDoc(writer, searchTerm.text());
|
||||||
|
}
|
||||||
|
writer.close();
|
||||||
|
|
||||||
|
// OPEN READER AT THIS POINT - this should fix the view of the
|
||||||
|
// index at the point of having 100 "aaa" documents and 0 "bbb"
|
||||||
|
reader = IndexReader.open(dir, false);
|
||||||
|
assertEquals("first docFreq", 100, reader.docFreq(searchTerm));
|
||||||
|
assertTermDocsCount("first reader", reader, searchTerm, 100);
|
||||||
|
reader.close();
|
||||||
|
|
||||||
|
// DELETE DOCUMENTS CONTAINING TERM: aaa
|
||||||
|
int deleted = 0;
|
||||||
|
reader = IndexReader.open(dir, false);
|
||||||
|
deleted = reader.deleteDocuments(searchTerm);
|
||||||
|
assertEquals("deleted count", 100, deleted);
|
||||||
|
assertEquals("deleted docFreq", 100, reader.docFreq(searchTerm));
|
||||||
|
assertTermDocsCount("deleted termDocs", reader, searchTerm, 0);
|
||||||
|
|
||||||
|
// open a 2nd reader to make sure first reader can
|
||||||
|
// commit its changes (.del) while second reader
|
||||||
|
// is open:
|
||||||
|
IndexReader reader2 = IndexReader.open(dir, false);
|
||||||
|
reader.close();
|
||||||
|
|
||||||
|
// CREATE A NEW READER and re-test
|
||||||
|
reader = IndexReader.open(dir, false);
|
||||||
|
assertEquals("deleted docFreq", 0, reader.docFreq(searchTerm));
|
||||||
|
assertTermDocsCount("deleted termDocs", reader, searchTerm, 0);
|
||||||
|
reader.close();
|
||||||
|
reader2.close();
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDeleteReaderReaderConflictUnoptimized() throws IOException {
|
||||||
|
deleteReaderReaderConflict(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDeleteReaderReaderConflictOptimized() throws IOException {
|
||||||
|
deleteReaderReaderConflict(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDeleteReaderWriterConflictUnoptimized() throws IOException {
|
||||||
|
deleteReaderWriterConflict(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDeleteReaderWriterConflictOptimized() throws IOException {
|
||||||
|
deleteReaderWriterConflict(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testMultiReaderDeletes() throws Exception {
|
||||||
|
Directory dir = newDirectory();
|
||||||
|
RandomIndexWriter w= new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
|
||||||
|
Document doc = new Document();
|
||||||
|
doc.add(newField("f", "doctor", Field.Store.NO, Field.Index.NOT_ANALYZED));
|
||||||
|
w.addDocument(doc);
|
||||||
|
doc = new Document();
|
||||||
|
w.commit();
|
||||||
|
doc.add(newField("f", "who", Field.Store.NO, Field.Index.NOT_ANALYZED));
|
||||||
|
w.addDocument(doc);
|
||||||
|
IndexReader r = new SlowMultiReaderWrapper(w.getReader());
|
||||||
|
w.close();
|
||||||
|
|
||||||
|
assertNull(r.getDeletedDocs());
|
||||||
|
r.close();
|
||||||
|
|
||||||
|
r = new SlowMultiReaderWrapper(IndexReader.open(dir, false));
|
||||||
|
|
||||||
|
assertNull(r.getDeletedDocs());
|
||||||
|
assertEquals(1, r.deleteDocuments(new Term("f", "doctor")));
|
||||||
|
assertNotNull(r.getDeletedDocs());
|
||||||
|
assertTrue(r.getDeletedDocs().get(0));
|
||||||
|
assertEquals(1, r.deleteDocuments(new Term("f", "who")));
|
||||||
|
assertTrue(r.getDeletedDocs().get(1));
|
||||||
|
r.close();
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testUndeleteAll() throws IOException {
|
||||||
|
Directory dir = newDirectory();
|
||||||
|
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||||
|
addDocumentWithFields(writer);
|
||||||
|
addDocumentWithFields(writer);
|
||||||
|
writer.close();
|
||||||
|
IndexReader reader = IndexReader.open(dir, false);
|
||||||
|
reader.deleteDocument(0);
|
||||||
|
reader.deleteDocument(1);
|
||||||
|
reader.undeleteAll();
|
||||||
|
reader.close();
|
||||||
|
reader = IndexReader.open(dir, false);
|
||||||
|
assertEquals(2, reader.numDocs()); // nothing has really been deleted thanks to undeleteAll()
|
||||||
|
reader.close();
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testUndeleteAllAfterClose() throws IOException {
|
||||||
|
Directory dir = newDirectory();
|
||||||
|
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||||
|
addDocumentWithFields(writer);
|
||||||
|
addDocumentWithFields(writer);
|
||||||
|
writer.close();
|
||||||
|
IndexReader reader = IndexReader.open(dir, false);
|
||||||
|
reader.deleteDocument(0);
|
||||||
|
reader.close();
|
||||||
|
reader = IndexReader.open(dir, false);
|
||||||
|
reader.undeleteAll();
|
||||||
|
assertEquals(2, reader.numDocs()); // nothing has really been deleted thanks to undeleteAll()
|
||||||
|
reader.close();
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testUndeleteAllAfterCloseThenReopen() throws IOException {
|
||||||
|
Directory dir = newDirectory();
|
||||||
|
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||||
|
addDocumentWithFields(writer);
|
||||||
|
addDocumentWithFields(writer);
|
||||||
|
writer.close();
|
||||||
|
IndexReader reader = IndexReader.open(dir, false);
|
||||||
|
reader.deleteDocument(0);
|
||||||
|
reader.close();
|
||||||
|
reader = IndexReader.open(dir, false);
|
||||||
|
reader.undeleteAll();
|
||||||
|
reader.close();
|
||||||
|
reader = IndexReader.open(dir, false);
|
||||||
|
assertEquals(2, reader.numDocs()); // nothing has really been deleted thanks to undeleteAll()
|
||||||
|
reader.close();
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
// LUCENE-1647
|
||||||
|
public void testIndexReaderUnDeleteAll() throws Exception {
|
||||||
|
MockDirectoryWrapper dir = newDirectory();
|
||||||
|
dir.setPreventDoubleWrite(false);
|
||||||
|
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||||
|
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||||
|
writer.addDocument(createDocument("a"));
|
||||||
|
writer.addDocument(createDocument("b"));
|
||||||
|
writer.addDocument(createDocument("c"));
|
||||||
|
writer.close();
|
||||||
|
IndexReader reader = IndexReader.open(dir, false);
|
||||||
|
reader.deleteDocuments(new Term("id", "a"));
|
||||||
|
reader.flush();
|
||||||
|
reader.deleteDocuments(new Term("id", "b"));
|
||||||
|
reader.undeleteAll();
|
||||||
|
reader.deleteDocuments(new Term("id", "b"));
|
||||||
|
reader.close();
|
||||||
|
IndexReader.open(dir,true).close();
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,228 @@
|
||||||
|
package org.apache.lucene.index;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright ownership.
|
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
* (the "License"); you may not use this file except in compliance with
|
||||||
|
* the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import org.apache.lucene.analysis.MockAnalyzer;
|
||||||
|
import org.apache.lucene.document.Document;
|
||||||
|
import org.apache.lucene.document.Field;
|
||||||
|
import org.apache.lucene.search.DefaultSimilarity;
|
||||||
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
|
import org.apache.lucene.search.ScoreDoc;
|
||||||
|
import org.apache.lucene.search.Similarity;
|
||||||
|
import org.apache.lucene.search.TermQuery;
|
||||||
|
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||||
|
import org.apache.lucene.store.RAMDirectory;
|
||||||
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
|
||||||
|
public class TestIndexReaderOnDiskFull extends LuceneTestCase {
|
||||||
|
/**
|
||||||
|
* Make sure if reader tries to commit but hits disk
|
||||||
|
* full that reader remains consistent and usable.
|
||||||
|
*/
|
||||||
|
public void testDiskFull() throws IOException {
|
||||||
|
|
||||||
|
Term searchTerm = new Term("content", "aaa");
|
||||||
|
int START_COUNT = 157;
|
||||||
|
int END_COUNT = 144;
|
||||||
|
|
||||||
|
// First build up a starting index:
|
||||||
|
MockDirectoryWrapper startDir = newDirectory();
|
||||||
|
IndexWriter writer = new IndexWriter(startDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||||
|
if (VERBOSE) {
|
||||||
|
System.out.println("TEST: create initial index");
|
||||||
|
writer.setInfoStream(System.out);
|
||||||
|
}
|
||||||
|
for(int i=0;i<157;i++) {
|
||||||
|
Document d = new Document();
|
||||||
|
d.add(newField("id", Integer.toString(i), Field.Store.YES, Field.Index.NOT_ANALYZED));
|
||||||
|
d.add(newField("content", "aaa " + i, Field.Store.NO, Field.Index.ANALYZED));
|
||||||
|
writer.addDocument(d);
|
||||||
|
if (0==i%10)
|
||||||
|
writer.commit();
|
||||||
|
}
|
||||||
|
writer.close();
|
||||||
|
|
||||||
|
{
|
||||||
|
IndexReader r = IndexReader.open(startDir);
|
||||||
|
IndexSearcher searcher = newSearcher(r);
|
||||||
|
ScoreDoc[] hits = null;
|
||||||
|
try {
|
||||||
|
hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
|
||||||
|
} catch (IOException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
fail("exception when init searching: " + e);
|
||||||
|
}
|
||||||
|
searcher.close();
|
||||||
|
r.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
long diskUsage = startDir.getRecomputedActualSizeInBytes();
|
||||||
|
long diskFree = diskUsage+100;
|
||||||
|
|
||||||
|
IOException err = null;
|
||||||
|
|
||||||
|
boolean done = false;
|
||||||
|
boolean gotExc = false;
|
||||||
|
|
||||||
|
// Iterate w/ ever increasing free disk space:
|
||||||
|
while(!done) {
|
||||||
|
MockDirectoryWrapper dir = new MockDirectoryWrapper(random, new RAMDirectory(startDir));
|
||||||
|
|
||||||
|
// If IndexReader hits disk full, it can write to
|
||||||
|
// the same files again.
|
||||||
|
dir.setPreventDoubleWrite(false);
|
||||||
|
|
||||||
|
IndexReader reader = IndexReader.open(dir, false);
|
||||||
|
|
||||||
|
// For each disk size, first try to commit against
|
||||||
|
// dir that will hit random IOExceptions & disk
|
||||||
|
// full; after, give it infinite disk space & turn
|
||||||
|
// off random IOExceptions & retry w/ same reader:
|
||||||
|
boolean success = false;
|
||||||
|
|
||||||
|
for(int x=0;x<2;x++) {
|
||||||
|
|
||||||
|
double rate = 0.05;
|
||||||
|
double diskRatio = ((double) diskFree)/diskUsage;
|
||||||
|
long thisDiskFree;
|
||||||
|
String testName;
|
||||||
|
|
||||||
|
if (0 == x) {
|
||||||
|
thisDiskFree = diskFree;
|
||||||
|
if (diskRatio >= 2.0) {
|
||||||
|
rate /= 2;
|
||||||
|
}
|
||||||
|
if (diskRatio >= 4.0) {
|
||||||
|
rate /= 2;
|
||||||
|
}
|
||||||
|
if (diskRatio >= 6.0) {
|
||||||
|
rate = 0.0;
|
||||||
|
}
|
||||||
|
if (VERBOSE) {
|
||||||
|
System.out.println("\ncycle: " + diskFree + " bytes");
|
||||||
|
}
|
||||||
|
testName = "disk full during reader.close() @ " + thisDiskFree + " bytes";
|
||||||
|
} else {
|
||||||
|
thisDiskFree = 0;
|
||||||
|
rate = 0.0;
|
||||||
|
if (VERBOSE) {
|
||||||
|
System.out.println("\ncycle: same writer: unlimited disk space");
|
||||||
|
}
|
||||||
|
testName = "reader re-use after disk full";
|
||||||
|
}
|
||||||
|
|
||||||
|
dir.setMaxSizeInBytes(thisDiskFree);
|
||||||
|
dir.setRandomIOExceptionRate(rate);
|
||||||
|
Similarity sim = new DefaultSimilarity();
|
||||||
|
try {
|
||||||
|
if (0 == x) {
|
||||||
|
int docId = 12;
|
||||||
|
for(int i=0;i<13;i++) {
|
||||||
|
reader.deleteDocument(docId);
|
||||||
|
reader.setNorm(docId, "content", sim.encodeNormValue(2.0f));
|
||||||
|
docId += 12;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
reader.close();
|
||||||
|
success = true;
|
||||||
|
if (0 == x) {
|
||||||
|
done = true;
|
||||||
|
}
|
||||||
|
} catch (IOException e) {
|
||||||
|
if (VERBOSE) {
|
||||||
|
System.out.println(" hit IOException: " + e);
|
||||||
|
e.printStackTrace(System.out);
|
||||||
|
}
|
||||||
|
err = e;
|
||||||
|
gotExc = true;
|
||||||
|
if (1 == x) {
|
||||||
|
e.printStackTrace();
|
||||||
|
fail(testName + " hit IOException after disk space was freed up");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Finally, verify index is not corrupt, and, if
|
||||||
|
// we succeeded, we see all docs changed, and if
|
||||||
|
// we failed, we see either all docs or no docs
|
||||||
|
// changed (transactional semantics):
|
||||||
|
IndexReader newReader = null;
|
||||||
|
try {
|
||||||
|
newReader = IndexReader.open(dir, false);
|
||||||
|
} catch (IOException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
fail(testName + ":exception when creating IndexReader after disk full during close: " + e);
|
||||||
|
}
|
||||||
|
/*
|
||||||
|
int result = newReader.docFreq(searchTerm);
|
||||||
|
if (success) {
|
||||||
|
if (result != END_COUNT) {
|
||||||
|
fail(testName + ": method did not throw exception but docFreq('aaa') is " + result + " instead of expected " + END_COUNT);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// On hitting exception we still may have added
|
||||||
|
// all docs:
|
||||||
|
if (result != START_COUNT && result != END_COUNT) {
|
||||||
|
err.printStackTrace();
|
||||||
|
fail(testName + ": method did throw exception but docFreq('aaa') is " + result + " instead of expected " + START_COUNT + " or " + END_COUNT);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
IndexSearcher searcher = newSearcher(newReader);
|
||||||
|
ScoreDoc[] hits = null;
|
||||||
|
try {
|
||||||
|
hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
|
||||||
|
} catch (IOException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
fail(testName + ": exception when searching: " + e);
|
||||||
|
}
|
||||||
|
int result2 = hits.length;
|
||||||
|
if (success) {
|
||||||
|
if (result2 != END_COUNT) {
|
||||||
|
fail(testName + ": method did not throw exception but hits.length for search on term 'aaa' is " + result2 + " instead of expected " + END_COUNT);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// On hitting exception we still may have added
|
||||||
|
// all docs:
|
||||||
|
if (result2 != START_COUNT && result2 != END_COUNT) {
|
||||||
|
err.printStackTrace();
|
||||||
|
fail(testName + ": method did throw exception but hits.length for search on term 'aaa' is " + result2 + " instead of expected " + START_COUNT);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
searcher.close();
|
||||||
|
newReader.close();
|
||||||
|
|
||||||
|
if (result2 == END_COUNT) {
|
||||||
|
if (!gotExc)
|
||||||
|
fail("never hit disk full");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
dir.close();
|
||||||
|
|
||||||
|
// Try again with 10 more bytes of free space:
|
||||||
|
diskFree += 10;
|
||||||
|
}
|
||||||
|
|
||||||
|
startDir.close();
|
||||||
|
}
|
||||||
|
}
|
|
@ -36,6 +36,7 @@ import org.apache.lucene.document.Field.Store;
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||||
import org.apache.lucene.search.DefaultSimilarity;
|
import org.apache.lucene.search.DefaultSimilarity;
|
||||||
|
import org.apache.lucene.search.FieldCache;
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
import org.apache.lucene.search.ScoreDoc;
|
import org.apache.lucene.search.ScoreDoc;
|
||||||
import org.apache.lucene.search.Similarity;
|
import org.apache.lucene.search.Similarity;
|
||||||
|
@ -1244,4 +1245,52 @@ public class TestIndexReaderReopen extends LuceneTestCase {
|
||||||
r.close();
|
r.close();
|
||||||
dir.close();
|
dir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// LUCENE-1579: Make sure all SegmentReaders are new when
|
||||||
|
// reopen switches readOnly
|
||||||
|
public void testReopenChangeReadonly() throws Exception {
|
||||||
|
Directory dir = newDirectory();
|
||||||
|
IndexWriter writer = new IndexWriter(
|
||||||
|
dir,
|
||||||
|
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||||
|
setMaxBufferedDocs(-1).
|
||||||
|
setMergePolicy(newLogMergePolicy(10))
|
||||||
|
);
|
||||||
|
Document doc = new Document();
|
||||||
|
doc.add(newField("number", "17", Field.Store.NO, Field.Index.NOT_ANALYZED));
|
||||||
|
writer.addDocument(doc);
|
||||||
|
writer.commit();
|
||||||
|
|
||||||
|
// Open reader1
|
||||||
|
IndexReader r = IndexReader.open(dir, false);
|
||||||
|
assertTrue(r instanceof DirectoryReader);
|
||||||
|
IndexReader r1 = getOnlySegmentReader(r);
|
||||||
|
final int[] ints = FieldCache.DEFAULT.getInts(r1, "number");
|
||||||
|
assertEquals(1, ints.length);
|
||||||
|
assertEquals(17, ints[0]);
|
||||||
|
|
||||||
|
// Reopen to readonly w/ no chnages
|
||||||
|
IndexReader r3 = r.reopen(true);
|
||||||
|
assertTrue(((DirectoryReader) r3).readOnly);
|
||||||
|
r3.close();
|
||||||
|
|
||||||
|
// Add new segment
|
||||||
|
writer.addDocument(doc);
|
||||||
|
writer.commit();
|
||||||
|
|
||||||
|
// Reopen reader1 --> reader2
|
||||||
|
IndexReader r2 = r.reopen(true);
|
||||||
|
r.close();
|
||||||
|
assertTrue(((DirectoryReader) r2).readOnly);
|
||||||
|
IndexReader[] subs = r2.getSequentialSubReaders();
|
||||||
|
final int[] ints2 = FieldCache.DEFAULT.getInts(subs[0], "number");
|
||||||
|
r2.close();
|
||||||
|
|
||||||
|
assertTrue(((SegmentReader) subs[0]).readOnly);
|
||||||
|
assertTrue(((SegmentReader) subs[1]).readOnly);
|
||||||
|
assertTrue(ints == ints2);
|
||||||
|
|
||||||
|
writer.close();
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,668 @@
|
||||||
|
package org.apache.lucene.index;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright ownership.
|
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
* (the "License"); you may not use this file except in compliance with
|
||||||
|
* the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.Reader;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
|
|
||||||
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
|
import org.apache.lucene.analysis.MockAnalyzer;
|
||||||
|
import org.apache.lucene.analysis.MockFixedLengthPayloadFilter;
|
||||||
|
import org.apache.lucene.analysis.MockTokenizer;
|
||||||
|
import org.apache.lucene.analysis.TokenStream;
|
||||||
|
import org.apache.lucene.document.Document;
|
||||||
|
import org.apache.lucene.document.Field;
|
||||||
|
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||||
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
|
import org.apache.lucene.search.ScoreDoc;
|
||||||
|
import org.apache.lucene.search.TermQuery;
|
||||||
|
import org.apache.lucene.store.Directory;
|
||||||
|
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||||
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
import org.apache.lucene.util._TestUtil;
|
||||||
|
|
||||||
|
public class TestIndexWriterCommit extends LuceneTestCase {
|
||||||
|
/*
|
||||||
|
* Simple test for "commit on close": open writer then
|
||||||
|
* add a bunch of docs, making sure reader does not see
|
||||||
|
* these docs until writer is closed.
|
||||||
|
*/
|
||||||
|
public void testCommitOnClose() throws IOException {
|
||||||
|
Directory dir = newDirectory();
|
||||||
|
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||||
|
for (int i = 0; i < 14; i++) {
|
||||||
|
TestIndexWriter.addDoc(writer);
|
||||||
|
}
|
||||||
|
writer.close();
|
||||||
|
|
||||||
|
Term searchTerm = new Term("content", "aaa");
|
||||||
|
IndexSearcher searcher = new IndexSearcher(dir, false);
|
||||||
|
ScoreDoc[] hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
|
||||||
|
assertEquals("first number of hits", 14, hits.length);
|
||||||
|
searcher.close();
|
||||||
|
|
||||||
|
IndexReader reader = IndexReader.open(dir, true);
|
||||||
|
|
||||||
|
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||||
|
for(int i=0;i<3;i++) {
|
||||||
|
for(int j=0;j<11;j++) {
|
||||||
|
TestIndexWriter.addDoc(writer);
|
||||||
|
}
|
||||||
|
searcher = new IndexSearcher(dir, false);
|
||||||
|
hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
|
||||||
|
assertEquals("reader incorrectly sees changes from writer", 14, hits.length);
|
||||||
|
searcher.close();
|
||||||
|
assertTrue("reader should have still been current", reader.isCurrent());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now, close the writer:
|
||||||
|
writer.close();
|
||||||
|
assertFalse("reader should not be current now", reader.isCurrent());
|
||||||
|
|
||||||
|
searcher = new IndexSearcher(dir, false);
|
||||||
|
hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
|
||||||
|
assertEquals("reader did not see changes after writer was closed", 47, hits.length);
|
||||||
|
searcher.close();
|
||||||
|
reader.close();
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Simple test for "commit on close": open writer, then
|
||||||
|
* add a bunch of docs, making sure reader does not see
|
||||||
|
* them until writer has closed. Then instead of
|
||||||
|
* closing the writer, call abort and verify reader sees
|
||||||
|
* nothing was added. Then verify we can open the index
|
||||||
|
* and add docs to it.
|
||||||
|
*/
|
||||||
|
public void testCommitOnCloseAbort() throws IOException {
|
||||||
|
MockDirectoryWrapper dir = newDirectory();
|
||||||
|
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(10));
|
||||||
|
for (int i = 0; i < 14; i++) {
|
||||||
|
TestIndexWriter.addDoc(writer);
|
||||||
|
}
|
||||||
|
writer.close();
|
||||||
|
|
||||||
|
Term searchTerm = new Term("content", "aaa");
|
||||||
|
IndexSearcher searcher = new IndexSearcher(dir, false);
|
||||||
|
ScoreDoc[] hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
|
||||||
|
assertEquals("first number of hits", 14, hits.length);
|
||||||
|
searcher.close();
|
||||||
|
|
||||||
|
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||||
|
.setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(10));
|
||||||
|
for(int j=0;j<17;j++) {
|
||||||
|
TestIndexWriter.addDoc(writer);
|
||||||
|
}
|
||||||
|
// Delete all docs:
|
||||||
|
writer.deleteDocuments(searchTerm);
|
||||||
|
|
||||||
|
searcher = new IndexSearcher(dir, false);
|
||||||
|
hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
|
||||||
|
assertEquals("reader incorrectly sees changes from writer", 14, hits.length);
|
||||||
|
searcher.close();
|
||||||
|
|
||||||
|
// Now, close the writer:
|
||||||
|
writer.rollback();
|
||||||
|
|
||||||
|
TestIndexWriter.assertNoUnreferencedFiles(dir, "unreferenced files remain after rollback()");
|
||||||
|
|
||||||
|
searcher = new IndexSearcher(dir, false);
|
||||||
|
hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
|
||||||
|
assertEquals("saw changes after writer.abort", 14, hits.length);
|
||||||
|
searcher.close();
|
||||||
|
|
||||||
|
// Now make sure we can re-open the index, add docs,
|
||||||
|
// and all is good:
|
||||||
|
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||||
|
.setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(10));
|
||||||
|
|
||||||
|
// On abort, writer in fact may write to the same
|
||||||
|
// segments_N file:
|
||||||
|
dir.setPreventDoubleWrite(false);
|
||||||
|
|
||||||
|
for(int i=0;i<12;i++) {
|
||||||
|
for(int j=0;j<17;j++) {
|
||||||
|
TestIndexWriter.addDoc(writer);
|
||||||
|
}
|
||||||
|
searcher = new IndexSearcher(dir, false);
|
||||||
|
hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
|
||||||
|
assertEquals("reader incorrectly sees changes from writer", 14, hits.length);
|
||||||
|
searcher.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
writer.close();
|
||||||
|
searcher = new IndexSearcher(dir, false);
|
||||||
|
hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
|
||||||
|
assertEquals("didn't see changes after close", 218, hits.length);
|
||||||
|
searcher.close();
|
||||||
|
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Verify that a writer with "commit on close" indeed
|
||||||
|
* cleans up the temp segments created after opening
|
||||||
|
* that are not referenced by the starting segments
|
||||||
|
* file. We check this by using MockDirectoryWrapper to
|
||||||
|
* measure max temp disk space used.
|
||||||
|
*/
|
||||||
|
public void testCommitOnCloseDiskUsage() throws IOException {
|
||||||
|
MockDirectoryWrapper dir = newDirectory();
|
||||||
|
Analyzer analyzer;
|
||||||
|
if (random.nextBoolean()) {
|
||||||
|
// no payloads
|
||||||
|
analyzer = new Analyzer() {
|
||||||
|
@Override
|
||||||
|
public TokenStream tokenStream(String fieldName, Reader reader) {
|
||||||
|
return new MockTokenizer(reader, MockTokenizer.WHITESPACE, true);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
// fixed length payloads
|
||||||
|
final int length = random.nextInt(200);
|
||||||
|
analyzer = new Analyzer() {
|
||||||
|
@Override
|
||||||
|
public TokenStream tokenStream(String fieldName, Reader reader) {
|
||||||
|
return new MockFixedLengthPayloadFilter(random,
|
||||||
|
new MockTokenizer(reader, MockTokenizer.WHITESPACE, true),
|
||||||
|
length);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
IndexWriter writer = new IndexWriter(
|
||||||
|
dir,
|
||||||
|
newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer).
|
||||||
|
setMaxBufferedDocs(10).
|
||||||
|
setReaderPooling(false).
|
||||||
|
setMergePolicy(newLogMergePolicy(10))
|
||||||
|
);
|
||||||
|
for(int j=0;j<30;j++) {
|
||||||
|
TestIndexWriter.addDocWithIndex(writer, j);
|
||||||
|
}
|
||||||
|
writer.close();
|
||||||
|
dir.resetMaxUsedSizeInBytes();
|
||||||
|
|
||||||
|
dir.setTrackDiskUsage(true);
|
||||||
|
long startDiskUsage = dir.getMaxUsedSizeInBytes();
|
||||||
|
writer = new IndexWriter(
|
||||||
|
dir,
|
||||||
|
newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer)
|
||||||
|
.setOpenMode(OpenMode.APPEND).
|
||||||
|
setMaxBufferedDocs(10).
|
||||||
|
setMergeScheduler(new SerialMergeScheduler()).
|
||||||
|
setReaderPooling(false).
|
||||||
|
setMergePolicy(newLogMergePolicy(10))
|
||||||
|
|
||||||
|
);
|
||||||
|
for(int j=0;j<1470;j++) {
|
||||||
|
TestIndexWriter.addDocWithIndex(writer, j);
|
||||||
|
}
|
||||||
|
long midDiskUsage = dir.getMaxUsedSizeInBytes();
|
||||||
|
dir.resetMaxUsedSizeInBytes();
|
||||||
|
writer.optimize();
|
||||||
|
writer.close();
|
||||||
|
|
||||||
|
IndexReader.open(dir, true).close();
|
||||||
|
|
||||||
|
long endDiskUsage = dir.getMaxUsedSizeInBytes();
|
||||||
|
|
||||||
|
// Ending index is 50X as large as starting index; due
|
||||||
|
// to 3X disk usage normally we allow 150X max
|
||||||
|
// transient usage. If something is wrong w/ deleter
|
||||||
|
// and it doesn't delete intermediate segments then it
|
||||||
|
// will exceed this 150X:
|
||||||
|
// System.out.println("start " + startDiskUsage + "; mid " + midDiskUsage + ";end " + endDiskUsage);
|
||||||
|
assertTrue("writer used too much space while adding documents: mid=" + midDiskUsage + " start=" + startDiskUsage + " end=" + endDiskUsage + " max=" + (startDiskUsage*150),
|
||||||
|
midDiskUsage < 150*startDiskUsage);
|
||||||
|
assertTrue("writer used too much space after close: endDiskUsage=" + endDiskUsage + " startDiskUsage=" + startDiskUsage + " max=" + (startDiskUsage*150),
|
||||||
|
endDiskUsage < 150*startDiskUsage);
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Verify that calling optimize when writer is open for
|
||||||
|
* "commit on close" works correctly both for rollback()
|
||||||
|
* and close().
|
||||||
|
*/
|
||||||
|
public void testCommitOnCloseOptimize() throws IOException {
|
||||||
|
MockDirectoryWrapper dir = newDirectory();
|
||||||
|
// Must disable throwing exc on double-write: this
|
||||||
|
// test uses IW.rollback which easily results in
|
||||||
|
// writing to same file more than once
|
||||||
|
dir.setPreventDoubleWrite(false);
|
||||||
|
IndexWriter writer = new IndexWriter(
|
||||||
|
dir,
|
||||||
|
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||||
|
setMaxBufferedDocs(10).
|
||||||
|
setMergePolicy(newLogMergePolicy(10))
|
||||||
|
);
|
||||||
|
for(int j=0;j<17;j++) {
|
||||||
|
TestIndexWriter.addDocWithIndex(writer, j);
|
||||||
|
}
|
||||||
|
writer.close();
|
||||||
|
|
||||||
|
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
|
||||||
|
writer.optimize();
|
||||||
|
|
||||||
|
if (VERBOSE) {
|
||||||
|
writer.setInfoStream(System.out);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Open a reader before closing (commiting) the writer:
|
||||||
|
IndexReader reader = IndexReader.open(dir, true);
|
||||||
|
|
||||||
|
// Reader should see index as unoptimized at this
|
||||||
|
// point:
|
||||||
|
assertFalse("Reader incorrectly sees that the index is optimized", reader.isOptimized());
|
||||||
|
reader.close();
|
||||||
|
|
||||||
|
// Abort the writer:
|
||||||
|
writer.rollback();
|
||||||
|
TestIndexWriter.assertNoUnreferencedFiles(dir, "aborted writer after optimize");
|
||||||
|
|
||||||
|
// Open a reader after aborting writer:
|
||||||
|
reader = IndexReader.open(dir, true);
|
||||||
|
|
||||||
|
// Reader should still see index as unoptimized:
|
||||||
|
assertFalse("Reader incorrectly sees that the index is optimized", reader.isOptimized());
|
||||||
|
reader.close();
|
||||||
|
|
||||||
|
if (VERBOSE) {
|
||||||
|
System.out.println("TEST: do real optimize");
|
||||||
|
}
|
||||||
|
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
|
||||||
|
if (VERBOSE) {
|
||||||
|
writer.setInfoStream(System.out);
|
||||||
|
}
|
||||||
|
writer.optimize();
|
||||||
|
writer.close();
|
||||||
|
|
||||||
|
if (VERBOSE) {
|
||||||
|
System.out.println("TEST: writer closed");
|
||||||
|
}
|
||||||
|
TestIndexWriter.assertNoUnreferencedFiles(dir, "aborted writer after optimize");
|
||||||
|
|
||||||
|
// Open a reader after aborting writer:
|
||||||
|
reader = IndexReader.open(dir, true);
|
||||||
|
|
||||||
|
// Reader should still see index as unoptimized:
|
||||||
|
assertTrue("Reader incorrectly sees that the index is unoptimized", reader.isOptimized());
|
||||||
|
reader.close();
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
// LUCENE-2095: make sure with multiple threads commit
|
||||||
|
// doesn't return until all changes are in fact in the
|
||||||
|
// index
|
||||||
|
public void testCommitThreadSafety() throws Throwable {
|
||||||
|
final int NUM_THREADS = 5;
|
||||||
|
final double RUN_SEC = 0.5;
|
||||||
|
final Directory dir = newDirectory();
|
||||||
|
final RandomIndexWriter w = new RandomIndexWriter(random, dir, newIndexWriterConfig(
|
||||||
|
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
|
||||||
|
_TestUtil.reduceOpenFiles(w.w);
|
||||||
|
w.commit();
|
||||||
|
final AtomicBoolean failed = new AtomicBoolean();
|
||||||
|
Thread[] threads = new Thread[NUM_THREADS];
|
||||||
|
final long endTime = System.currentTimeMillis()+((long) (RUN_SEC*1000));
|
||||||
|
for(int i=0;i<NUM_THREADS;i++) {
|
||||||
|
final int finalI = i;
|
||||||
|
threads[i] = new Thread() {
|
||||||
|
@Override
|
||||||
|
public void run() {
|
||||||
|
try {
|
||||||
|
final Document doc = new Document();
|
||||||
|
IndexReader r = IndexReader.open(dir);
|
||||||
|
Field f = newField("f", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
|
||||||
|
doc.add(f);
|
||||||
|
int count = 0;
|
||||||
|
do {
|
||||||
|
if (failed.get()) break;
|
||||||
|
for(int j=0;j<10;j++) {
|
||||||
|
final String s = finalI + "_" + String.valueOf(count++);
|
||||||
|
f.setValue(s);
|
||||||
|
w.addDocument(doc);
|
||||||
|
w.commit();
|
||||||
|
IndexReader r2 = r.reopen();
|
||||||
|
assertTrue(r2 != r);
|
||||||
|
r.close();
|
||||||
|
r = r2;
|
||||||
|
assertEquals("term=f:" + s + "; r=" + r, 1, r.docFreq(new Term("f", s)));
|
||||||
|
}
|
||||||
|
} while(System.currentTimeMillis() < endTime);
|
||||||
|
r.close();
|
||||||
|
} catch (Throwable t) {
|
||||||
|
failed.set(true);
|
||||||
|
throw new RuntimeException(t);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
threads[i].start();
|
||||||
|
}
|
||||||
|
for(int i=0;i<NUM_THREADS;i++) {
|
||||||
|
threads[i].join();
|
||||||
|
}
|
||||||
|
assertFalse(failed.get());
|
||||||
|
w.close();
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
// LUCENE-1044: test writer.commit() when ac=false
|
||||||
|
public void testForceCommit() throws IOException {
|
||||||
|
Directory dir = newDirectory();
|
||||||
|
|
||||||
|
IndexWriter writer = new IndexWriter(
|
||||||
|
dir,
|
||||||
|
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||||
|
setMaxBufferedDocs(2).
|
||||||
|
setMergePolicy(newLogMergePolicy(5))
|
||||||
|
);
|
||||||
|
writer.commit();
|
||||||
|
|
||||||
|
for (int i = 0; i < 23; i++)
|
||||||
|
TestIndexWriter.addDoc(writer);
|
||||||
|
|
||||||
|
IndexReader reader = IndexReader.open(dir, true);
|
||||||
|
assertEquals(0, reader.numDocs());
|
||||||
|
writer.commit();
|
||||||
|
IndexReader reader2 = reader.reopen();
|
||||||
|
assertEquals(0, reader.numDocs());
|
||||||
|
assertEquals(23, reader2.numDocs());
|
||||||
|
reader.close();
|
||||||
|
|
||||||
|
for (int i = 0; i < 17; i++)
|
||||||
|
TestIndexWriter.addDoc(writer);
|
||||||
|
assertEquals(23, reader2.numDocs());
|
||||||
|
reader2.close();
|
||||||
|
reader = IndexReader.open(dir, true);
|
||||||
|
assertEquals(23, reader.numDocs());
|
||||||
|
reader.close();
|
||||||
|
writer.commit();
|
||||||
|
|
||||||
|
reader = IndexReader.open(dir, true);
|
||||||
|
assertEquals(40, reader.numDocs());
|
||||||
|
reader.close();
|
||||||
|
writer.close();
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testFutureCommit() throws Exception {
|
||||||
|
Directory dir = newDirectory();
|
||||||
|
|
||||||
|
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE));
|
||||||
|
Document doc = new Document();
|
||||||
|
w.addDocument(doc);
|
||||||
|
|
||||||
|
// commit to "first"
|
||||||
|
Map<String,String> commitData = new HashMap<String,String>();
|
||||||
|
commitData.put("tag", "first");
|
||||||
|
w.commit(commitData);
|
||||||
|
|
||||||
|
// commit to "second"
|
||||||
|
w.addDocument(doc);
|
||||||
|
commitData.put("tag", "second");
|
||||||
|
w.commit(commitData);
|
||||||
|
w.close();
|
||||||
|
|
||||||
|
// open "first" with IndexWriter
|
||||||
|
IndexCommit commit = null;
|
||||||
|
for(IndexCommit c : IndexReader.listCommits(dir)) {
|
||||||
|
if (c.getUserData().get("tag").equals("first")) {
|
||||||
|
commit = c;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
assertNotNull(commit);
|
||||||
|
|
||||||
|
w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE).setIndexCommit(commit));
|
||||||
|
|
||||||
|
assertEquals(1, w.numDocs());
|
||||||
|
|
||||||
|
// commit IndexWriter to "third"
|
||||||
|
w.addDocument(doc);
|
||||||
|
commitData.put("tag", "third");
|
||||||
|
w.commit(commitData);
|
||||||
|
w.close();
|
||||||
|
|
||||||
|
// make sure "second" commit is still there
|
||||||
|
commit = null;
|
||||||
|
for(IndexCommit c : IndexReader.listCommits(dir)) {
|
||||||
|
if (c.getUserData().get("tag").equals("second")) {
|
||||||
|
commit = c;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
assertNotNull(commit);
|
||||||
|
|
||||||
|
IndexReader r = IndexReader.open(commit, true);
|
||||||
|
assertEquals(2, r.numDocs());
|
||||||
|
r.close();
|
||||||
|
|
||||||
|
// open "second", w/ writeable IndexReader & commit
|
||||||
|
r = IndexReader.open(commit, NoDeletionPolicy.INSTANCE, false);
|
||||||
|
assertEquals(2, r.numDocs());
|
||||||
|
r.deleteDocument(0);
|
||||||
|
r.deleteDocument(1);
|
||||||
|
commitData.put("tag", "fourth");
|
||||||
|
r.commit(commitData);
|
||||||
|
r.close();
|
||||||
|
|
||||||
|
// make sure "third" commit is still there
|
||||||
|
commit = null;
|
||||||
|
for(IndexCommit c : IndexReader.listCommits(dir)) {
|
||||||
|
if (c.getUserData().get("tag").equals("third")) {
|
||||||
|
commit = c;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
assertNotNull(commit);
|
||||||
|
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testNoCommits() throws Exception {
|
||||||
|
// Tests that if we don't call commit(), the directory has 0 commits. This has
|
||||||
|
// changed since LUCENE-2386, where before IW would always commit on a fresh
|
||||||
|
// new index.
|
||||||
|
Directory dir = newDirectory();
|
||||||
|
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||||
|
try {
|
||||||
|
IndexReader.listCommits(dir);
|
||||||
|
fail("listCommits should have thrown an exception over empty index");
|
||||||
|
} catch (IndexNotFoundException e) {
|
||||||
|
// that's expected !
|
||||||
|
}
|
||||||
|
// No changes still should generate a commit, because it's a new index.
|
||||||
|
writer.close();
|
||||||
|
assertEquals("expected 1 commits!", 1, IndexReader.listCommits(dir).size());
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
// LUCENE-1274: test writer.prepareCommit()
|
||||||
|
public void testPrepareCommit() throws IOException {
|
||||||
|
Directory dir = newDirectory();
|
||||||
|
|
||||||
|
IndexWriter writer = new IndexWriter(
|
||||||
|
dir,
|
||||||
|
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||||
|
setMaxBufferedDocs(2).
|
||||||
|
setMergePolicy(newLogMergePolicy(5))
|
||||||
|
);
|
||||||
|
writer.commit();
|
||||||
|
|
||||||
|
for (int i = 0; i < 23; i++)
|
||||||
|
TestIndexWriter.addDoc(writer);
|
||||||
|
|
||||||
|
IndexReader reader = IndexReader.open(dir, true);
|
||||||
|
assertEquals(0, reader.numDocs());
|
||||||
|
|
||||||
|
writer.prepareCommit();
|
||||||
|
|
||||||
|
IndexReader reader2 = IndexReader.open(dir, true);
|
||||||
|
assertEquals(0, reader2.numDocs());
|
||||||
|
|
||||||
|
writer.commit();
|
||||||
|
|
||||||
|
IndexReader reader3 = reader.reopen();
|
||||||
|
assertEquals(0, reader.numDocs());
|
||||||
|
assertEquals(0, reader2.numDocs());
|
||||||
|
assertEquals(23, reader3.numDocs());
|
||||||
|
reader.close();
|
||||||
|
reader2.close();
|
||||||
|
|
||||||
|
for (int i = 0; i < 17; i++)
|
||||||
|
TestIndexWriter.addDoc(writer);
|
||||||
|
|
||||||
|
assertEquals(23, reader3.numDocs());
|
||||||
|
reader3.close();
|
||||||
|
reader = IndexReader.open(dir, true);
|
||||||
|
assertEquals(23, reader.numDocs());
|
||||||
|
reader.close();
|
||||||
|
|
||||||
|
writer.prepareCommit();
|
||||||
|
|
||||||
|
reader = IndexReader.open(dir, true);
|
||||||
|
assertEquals(23, reader.numDocs());
|
||||||
|
reader.close();
|
||||||
|
|
||||||
|
writer.commit();
|
||||||
|
reader = IndexReader.open(dir, true);
|
||||||
|
assertEquals(40, reader.numDocs());
|
||||||
|
reader.close();
|
||||||
|
writer.close();
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
// LUCENE-1274: test writer.prepareCommit()
|
||||||
|
public void testPrepareCommitRollback() throws IOException {
|
||||||
|
MockDirectoryWrapper dir = newDirectory();
|
||||||
|
dir.setPreventDoubleWrite(false);
|
||||||
|
|
||||||
|
IndexWriter writer = new IndexWriter(
|
||||||
|
dir,
|
||||||
|
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||||
|
setMaxBufferedDocs(2).
|
||||||
|
setMergePolicy(newLogMergePolicy(5))
|
||||||
|
);
|
||||||
|
writer.commit();
|
||||||
|
|
||||||
|
for (int i = 0; i < 23; i++)
|
||||||
|
TestIndexWriter.addDoc(writer);
|
||||||
|
|
||||||
|
IndexReader reader = IndexReader.open(dir, true);
|
||||||
|
assertEquals(0, reader.numDocs());
|
||||||
|
|
||||||
|
writer.prepareCommit();
|
||||||
|
|
||||||
|
IndexReader reader2 = IndexReader.open(dir, true);
|
||||||
|
assertEquals(0, reader2.numDocs());
|
||||||
|
|
||||||
|
writer.rollback();
|
||||||
|
|
||||||
|
IndexReader reader3 = reader.reopen();
|
||||||
|
assertEquals(0, reader.numDocs());
|
||||||
|
assertEquals(0, reader2.numDocs());
|
||||||
|
assertEquals(0, reader3.numDocs());
|
||||||
|
reader.close();
|
||||||
|
reader2.close();
|
||||||
|
|
||||||
|
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||||
|
for (int i = 0; i < 17; i++)
|
||||||
|
TestIndexWriter.addDoc(writer);
|
||||||
|
|
||||||
|
assertEquals(0, reader3.numDocs());
|
||||||
|
reader3.close();
|
||||||
|
reader = IndexReader.open(dir, true);
|
||||||
|
assertEquals(0, reader.numDocs());
|
||||||
|
reader.close();
|
||||||
|
|
||||||
|
writer.prepareCommit();
|
||||||
|
|
||||||
|
reader = IndexReader.open(dir, true);
|
||||||
|
assertEquals(0, reader.numDocs());
|
||||||
|
reader.close();
|
||||||
|
|
||||||
|
writer.commit();
|
||||||
|
reader = IndexReader.open(dir, true);
|
||||||
|
assertEquals(17, reader.numDocs());
|
||||||
|
reader.close();
|
||||||
|
writer.close();
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
// LUCENE-1274
|
||||||
|
public void testPrepareCommitNoChanges() throws IOException {
|
||||||
|
Directory dir = newDirectory();
|
||||||
|
|
||||||
|
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||||
|
writer.prepareCommit();
|
||||||
|
writer.commit();
|
||||||
|
writer.close();
|
||||||
|
|
||||||
|
IndexReader reader = IndexReader.open(dir, true);
|
||||||
|
assertEquals(0, reader.numDocs());
|
||||||
|
reader.close();
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
// LUCENE-1382
|
||||||
|
public void testCommitUserData() throws IOException {
|
||||||
|
Directory dir = newDirectory();
|
||||||
|
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2));
|
||||||
|
for(int j=0;j<17;j++)
|
||||||
|
TestIndexWriter.addDoc(w);
|
||||||
|
w.close();
|
||||||
|
|
||||||
|
assertEquals(0, IndexReader.getCommitUserData(dir).size());
|
||||||
|
|
||||||
|
IndexReader r = IndexReader.open(dir, true);
|
||||||
|
// commit(Map) never called for this index
|
||||||
|
assertEquals(0, r.getCommitUserData().size());
|
||||||
|
r.close();
|
||||||
|
|
||||||
|
w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2));
|
||||||
|
for(int j=0;j<17;j++)
|
||||||
|
TestIndexWriter.addDoc(w);
|
||||||
|
Map<String,String> data = new HashMap<String,String>();
|
||||||
|
data.put("label", "test1");
|
||||||
|
w.commit(data);
|
||||||
|
w.close();
|
||||||
|
|
||||||
|
assertEquals("test1", IndexReader.getCommitUserData(dir).get("label"));
|
||||||
|
|
||||||
|
r = IndexReader.open(dir, true);
|
||||||
|
assertEquals("test1", r.getCommitUserData().get("label"));
|
||||||
|
r.close();
|
||||||
|
|
||||||
|
w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||||
|
w.optimize();
|
||||||
|
w.close();
|
||||||
|
|
||||||
|
assertEquals("test1", IndexReader.getCommitUserData(dir).get("label"));
|
||||||
|
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
}
|
|
@ -18,11 +18,21 @@ package org.apache.lucene.index;
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.io.Reader;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Random;
|
||||||
|
|
||||||
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
import org.apache.lucene.analysis.MockAnalyzer;
|
import org.apache.lucene.analysis.MockAnalyzer;
|
||||||
import org.apache.lucene.analysis.MockTokenizer;
|
import org.apache.lucene.analysis.MockTokenizer;
|
||||||
|
import org.apache.lucene.analysis.TokenStream;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
|
import org.apache.lucene.document.Field.Index;
|
||||||
|
import org.apache.lucene.document.Field.Store;
|
||||||
|
import org.apache.lucene.document.Field.TermVector;
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
import org.apache.lucene.search.ScoreDoc;
|
import org.apache.lucene.search.ScoreDoc;
|
||||||
import org.apache.lucene.search.TermQuery;
|
import org.apache.lucene.search.TermQuery;
|
||||||
|
@ -860,4 +870,79 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
||||||
modifier.close();
|
modifier.close();
|
||||||
dir.close();
|
dir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testDeleteAllSlowly() throws Exception {
|
||||||
|
final Directory dir = newDirectory();
|
||||||
|
RandomIndexWriter w = new RandomIndexWriter(random, dir);
|
||||||
|
final int NUM_DOCS = 1000 * RANDOM_MULTIPLIER;
|
||||||
|
final List<Integer> ids = new ArrayList<Integer>(NUM_DOCS);
|
||||||
|
for(int id=0;id<NUM_DOCS;id++) {
|
||||||
|
ids.add(id);
|
||||||
|
}
|
||||||
|
Collections.shuffle(ids, random);
|
||||||
|
for(int id : ids) {
|
||||||
|
Document doc = new Document();
|
||||||
|
doc.add(newField("id", ""+id, Field.Index.NOT_ANALYZED));
|
||||||
|
w.addDocument(doc);
|
||||||
|
}
|
||||||
|
Collections.shuffle(ids, random);
|
||||||
|
int upto = 0;
|
||||||
|
while(upto < ids.size()) {
|
||||||
|
final int left = ids.size() - upto;
|
||||||
|
final int inc = Math.min(left, _TestUtil.nextInt(random, 1, 20));
|
||||||
|
final int limit = upto + inc;
|
||||||
|
while(upto < limit) {
|
||||||
|
w.deleteDocuments(new Term("id", ""+ids.get(upto++)));
|
||||||
|
}
|
||||||
|
final IndexReader r = w.getReader();
|
||||||
|
assertEquals(NUM_DOCS - upto, r.numDocs());
|
||||||
|
r.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
w.close();
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testIndexingThenDeleting() throws Exception {
|
||||||
|
final Random r = random;
|
||||||
|
Directory dir = newDirectory();
|
||||||
|
// note this test explicitly disables payloads
|
||||||
|
final Analyzer analyzer = new Analyzer() {
|
||||||
|
@Override
|
||||||
|
public TokenStream tokenStream(String fieldName, Reader reader) {
|
||||||
|
return new MockTokenizer(reader, MockTokenizer.WHITESPACE, true);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer).setRAMBufferSizeMB(1.0).setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH).setMaxBufferedDeleteTerms(IndexWriterConfig.DISABLE_AUTO_FLUSH));
|
||||||
|
w.setInfoStream(VERBOSE ? System.out : null);
|
||||||
|
Document doc = new Document();
|
||||||
|
doc.add(newField("field", "go 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20", Field.Store.NO, Field.Index.ANALYZED));
|
||||||
|
int num = TEST_NIGHTLY ? 6 * RANDOM_MULTIPLIER : 3 * RANDOM_MULTIPLIER;
|
||||||
|
for (int iter = 0; iter < num; iter++) {
|
||||||
|
int count = 0;
|
||||||
|
|
||||||
|
final boolean doIndexing = r.nextBoolean();
|
||||||
|
if (VERBOSE) {
|
||||||
|
System.out.println("TEST: iter doIndexing=" + doIndexing);
|
||||||
|
}
|
||||||
|
if (doIndexing) {
|
||||||
|
// Add docs until a flush is triggered
|
||||||
|
final int startFlushCount = w.getFlushCount();
|
||||||
|
while(w.getFlushCount() == startFlushCount) {
|
||||||
|
w.addDocument(doc);
|
||||||
|
count++;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Delete docs until a flush is triggered
|
||||||
|
final int startFlushCount = w.getFlushCount();
|
||||||
|
while(w.getFlushCount() == startFlushCount) {
|
||||||
|
w.deleteDocuments(new Term("foo", ""+count));
|
||||||
|
count++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
assertTrue("flush happened too quickly during " + (doIndexing ? "indexing" : "deleting") + " count=" + count, count > 3000);
|
||||||
|
}
|
||||||
|
w.close();
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -236,7 +236,6 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
||||||
assertEquals(count, count2);
|
assertEquals(count, count2);
|
||||||
r2.close();
|
r2.close();
|
||||||
|
|
||||||
_TestUtil.checkIndex(dir);
|
|
||||||
dir.close();
|
dir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -287,7 +286,6 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
||||||
assertEquals(count, count2);
|
assertEquals(count, count2);
|
||||||
r2.close();
|
r2.close();
|
||||||
|
|
||||||
_TestUtil.checkIndex(dir);
|
|
||||||
dir.close();
|
dir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -349,7 +347,6 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
||||||
// expected
|
// expected
|
||||||
}
|
}
|
||||||
w.close();
|
w.close();
|
||||||
_TestUtil.checkIndex(dir);
|
|
||||||
dir.close();
|
dir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -104,9 +104,12 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
|
||||||
dir,
|
dir,
|
||||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||||
setMaxBufferedDocs(10).
|
setMaxBufferedDocs(10).
|
||||||
setMergePolicy(newLogMergePolicy())
|
setMergePolicy(newLogMergePolicy()).
|
||||||
|
setMergeScheduler(new SerialMergeScheduler())
|
||||||
);
|
);
|
||||||
|
|
||||||
|
writer.setInfoStream(VERBOSE ? System.out : null);
|
||||||
|
|
||||||
for (int i = 0; i < 250; i++) {
|
for (int i = 0; i < 250; i++) {
|
||||||
addDoc(writer);
|
addDoc(writer);
|
||||||
checkInvariants(writer);
|
checkInvariants(writer);
|
||||||
|
|
|
@ -19,6 +19,9 @@ import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.analysis.MockAnalyzer;
|
import org.apache.lucene.analysis.MockAnalyzer;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
|
import org.apache.lucene.document.Field.Index;
|
||||||
|
import org.apache.lucene.document.Field.Store;
|
||||||
|
import org.apache.lucene.document.Field.TermVector;
|
||||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
|
||||||
|
@ -113,4 +116,187 @@ public class TestIndexWriterMerging extends LuceneTestCase
|
||||||
}
|
}
|
||||||
writer.close();
|
writer.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// LUCENE-325: test expungeDeletes, when 2 singular merges
|
||||||
|
// are required
|
||||||
|
public void testExpungeDeletes() throws IOException {
|
||||||
|
Directory dir = newDirectory();
|
||||||
|
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||||
|
TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||||
|
.setMaxBufferedDocs(2).setRAMBufferSizeMB(
|
||||||
|
IndexWriterConfig.DISABLE_AUTO_FLUSH));
|
||||||
|
writer.setInfoStream(VERBOSE ? System.out : null);
|
||||||
|
Document document = new Document();
|
||||||
|
|
||||||
|
document = new Document();
|
||||||
|
Field storedField = newField("stored", "stored", Field.Store.YES,
|
||||||
|
Field.Index.NO);
|
||||||
|
document.add(storedField);
|
||||||
|
Field termVectorField = newField("termVector", "termVector",
|
||||||
|
Field.Store.NO, Field.Index.NOT_ANALYZED,
|
||||||
|
Field.TermVector.WITH_POSITIONS_OFFSETS);
|
||||||
|
document.add(termVectorField);
|
||||||
|
for(int i=0;i<10;i++)
|
||||||
|
writer.addDocument(document);
|
||||||
|
writer.close();
|
||||||
|
|
||||||
|
IndexReader ir = IndexReader.open(dir, false);
|
||||||
|
assertEquals(10, ir.maxDoc());
|
||||||
|
assertEquals(10, ir.numDocs());
|
||||||
|
ir.deleteDocument(0);
|
||||||
|
ir.deleteDocument(7);
|
||||||
|
assertEquals(8, ir.numDocs());
|
||||||
|
ir.close();
|
||||||
|
|
||||||
|
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
|
||||||
|
assertEquals(8, writer.numDocs());
|
||||||
|
assertEquals(10, writer.maxDoc());
|
||||||
|
writer.expungeDeletes();
|
||||||
|
assertEquals(8, writer.numDocs());
|
||||||
|
writer.close();
|
||||||
|
ir = IndexReader.open(dir, true);
|
||||||
|
assertEquals(8, ir.maxDoc());
|
||||||
|
assertEquals(8, ir.numDocs());
|
||||||
|
ir.close();
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
// LUCENE-325: test expungeDeletes, when many adjacent merges are required
|
||||||
|
public void testExpungeDeletes2() throws IOException {
|
||||||
|
Directory dir = newDirectory();
|
||||||
|
IndexWriter writer = new IndexWriter(
|
||||||
|
dir,
|
||||||
|
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||||
|
setMaxBufferedDocs(2).
|
||||||
|
setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH).
|
||||||
|
setMergePolicy(newLogMergePolicy(50))
|
||||||
|
);
|
||||||
|
|
||||||
|
Document document = new Document();
|
||||||
|
|
||||||
|
document = new Document();
|
||||||
|
Field storedField = newField("stored", "stored", Store.YES,
|
||||||
|
Index.NO);
|
||||||
|
document.add(storedField);
|
||||||
|
Field termVectorField = newField("termVector", "termVector",
|
||||||
|
Store.NO, Index.NOT_ANALYZED,
|
||||||
|
TermVector.WITH_POSITIONS_OFFSETS);
|
||||||
|
document.add(termVectorField);
|
||||||
|
for(int i=0;i<98;i++)
|
||||||
|
writer.addDocument(document);
|
||||||
|
writer.close();
|
||||||
|
|
||||||
|
IndexReader ir = IndexReader.open(dir, false);
|
||||||
|
assertEquals(98, ir.maxDoc());
|
||||||
|
assertEquals(98, ir.numDocs());
|
||||||
|
for(int i=0;i<98;i+=2)
|
||||||
|
ir.deleteDocument(i);
|
||||||
|
assertEquals(49, ir.numDocs());
|
||||||
|
ir.close();
|
||||||
|
|
||||||
|
writer = new IndexWriter(
|
||||||
|
dir,
|
||||||
|
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||||
|
setMergePolicy(newLogMergePolicy(3))
|
||||||
|
);
|
||||||
|
assertEquals(49, writer.numDocs());
|
||||||
|
writer.expungeDeletes();
|
||||||
|
writer.close();
|
||||||
|
ir = IndexReader.open(dir, true);
|
||||||
|
assertEquals(49, ir.maxDoc());
|
||||||
|
assertEquals(49, ir.numDocs());
|
||||||
|
ir.close();
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
// LUCENE-325: test expungeDeletes without waiting, when
|
||||||
|
// many adjacent merges are required
|
||||||
|
public void testExpungeDeletes3() throws IOException {
|
||||||
|
Directory dir = newDirectory();
|
||||||
|
IndexWriter writer = new IndexWriter(
|
||||||
|
dir,
|
||||||
|
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||||
|
setMaxBufferedDocs(2).
|
||||||
|
setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH).
|
||||||
|
setMergePolicy(newLogMergePolicy(50))
|
||||||
|
);
|
||||||
|
|
||||||
|
Document document = new Document();
|
||||||
|
|
||||||
|
document = new Document();
|
||||||
|
Field storedField = newField("stored", "stored", Field.Store.YES,
|
||||||
|
Field.Index.NO);
|
||||||
|
document.add(storedField);
|
||||||
|
Field termVectorField = newField("termVector", "termVector",
|
||||||
|
Field.Store.NO, Field.Index.NOT_ANALYZED,
|
||||||
|
Field.TermVector.WITH_POSITIONS_OFFSETS);
|
||||||
|
document.add(termVectorField);
|
||||||
|
for(int i=0;i<98;i++)
|
||||||
|
writer.addDocument(document);
|
||||||
|
writer.close();
|
||||||
|
|
||||||
|
IndexReader ir = IndexReader.open(dir, false);
|
||||||
|
assertEquals(98, ir.maxDoc());
|
||||||
|
assertEquals(98, ir.numDocs());
|
||||||
|
for(int i=0;i<98;i+=2)
|
||||||
|
ir.deleteDocument(i);
|
||||||
|
assertEquals(49, ir.numDocs());
|
||||||
|
ir.close();
|
||||||
|
|
||||||
|
writer = new IndexWriter(
|
||||||
|
dir,
|
||||||
|
newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||||
|
setMergePolicy(newLogMergePolicy(3))
|
||||||
|
);
|
||||||
|
writer.expungeDeletes(false);
|
||||||
|
writer.close();
|
||||||
|
ir = IndexReader.open(dir, true);
|
||||||
|
assertEquals(49, ir.maxDoc());
|
||||||
|
assertEquals(49, ir.numDocs());
|
||||||
|
ir.close();
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Just intercepts all merges & verifies that we are never
|
||||||
|
// merging a segment with >= 20 (maxMergeDocs) docs
|
||||||
|
private class MyMergeScheduler extends MergeScheduler {
|
||||||
|
@Override
|
||||||
|
synchronized public void merge(IndexWriter writer)
|
||||||
|
throws CorruptIndexException, IOException {
|
||||||
|
|
||||||
|
while(true) {
|
||||||
|
MergePolicy.OneMerge merge = writer.getNextMerge();
|
||||||
|
if (merge == null) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
for(int i=0;i<merge.segments.size();i++) {
|
||||||
|
assert merge.segments.get(i).docCount < 20;
|
||||||
|
}
|
||||||
|
writer.merge(merge);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void close() {}
|
||||||
|
}
|
||||||
|
|
||||||
|
// LUCENE-1013
|
||||||
|
public void testSetMaxMergeDocs() throws IOException {
|
||||||
|
Directory dir = newDirectory();
|
||||||
|
IndexWriterConfig conf = newIndexWriterConfig(
|
||||||
|
TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||||
|
.setMergeScheduler(new MyMergeScheduler()).setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy());
|
||||||
|
LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();
|
||||||
|
lmp.setMaxMergeDocs(20);
|
||||||
|
lmp.setMergeFactor(2);
|
||||||
|
IndexWriter iw = new IndexWriter(dir, conf);
|
||||||
|
iw.setInfoStream(VERBOSE ? System.out : null);
|
||||||
|
Document document = new Document();
|
||||||
|
document.add(newField("tvtest", "a b c", Field.Store.NO, Field.Index.ANALYZED,
|
||||||
|
Field.TermVector.YES));
|
||||||
|
for(int i=0;i<177;i++)
|
||||||
|
iw.addDocument(document);
|
||||||
|
iw.close();
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -114,7 +114,6 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
|
||||||
|
|
||||||
// Make sure reader can open the index:
|
// Make sure reader can open the index:
|
||||||
IndexReader.open(dir, true).close();
|
IndexReader.open(dir, true).close();
|
||||||
_TestUtil.checkIndex(dir);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
dir.close();
|
dir.close();
|
||||||
|
@ -491,7 +490,6 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
|
||||||
w.addDocument(doc);
|
w.addDocument(doc);
|
||||||
w.close();
|
w.close();
|
||||||
|
|
||||||
_TestUtil.checkIndex(dir);
|
|
||||||
dir.close();
|
dir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -28,6 +28,7 @@ import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
|
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||||
import org.apache.lucene.util.Constants;
|
import org.apache.lucene.util.Constants;
|
||||||
import org.apache.lucene.util._TestUtil;
|
import org.apache.lucene.util._TestUtil;
|
||||||
|
|
||||||
|
@ -123,7 +124,8 @@ public class TestIndexWriterOnJRECrash extends TestNRTThreads {
|
||||||
*/
|
*/
|
||||||
public boolean checkIndexes(File file) throws IOException {
|
public boolean checkIndexes(File file) throws IOException {
|
||||||
if (file.isDirectory()) {
|
if (file.isDirectory()) {
|
||||||
Directory dir = newFSDirectory(file);
|
MockDirectoryWrapper dir = newFSDirectory(file);
|
||||||
|
dir.setCheckIndexOnClose(false); // don't double-checkindex
|
||||||
if (IndexReader.indexExists(dir)) {
|
if (IndexReader.indexExists(dir)) {
|
||||||
if (VERBOSE) {
|
if (VERBOSE) {
|
||||||
System.err.println("Checking index: " + file);
|
System.err.println("Checking index: " + file);
|
||||||
|
|
|
@ -0,0 +1,215 @@
|
||||||
|
package org.apache.lucene.index;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright ownership.
|
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
* (the "License"); you may not use this file except in compliance with
|
||||||
|
* the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import org.apache.lucene.analysis.MockAnalyzer;
|
||||||
|
import org.apache.lucene.document.Document;
|
||||||
|
import org.apache.lucene.document.Field;
|
||||||
|
import org.apache.lucene.document.Field.Index;
|
||||||
|
import org.apache.lucene.document.Field.Store;
|
||||||
|
import org.apache.lucene.document.Field.TermVector;
|
||||||
|
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||||
|
import org.apache.lucene.store.Directory;
|
||||||
|
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||||
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
import org.apache.lucene.util._TestUtil;
|
||||||
|
|
||||||
|
public class TestIndexWriterOptimize extends LuceneTestCase {
|
||||||
|
public void testOptimizeMaxNumSegments() throws IOException {
|
||||||
|
|
||||||
|
MockDirectoryWrapper dir = newDirectory();
|
||||||
|
|
||||||
|
final Document doc = new Document();
|
||||||
|
doc.add(newField("content", "aaa", Field.Store.YES, Field.Index.ANALYZED));
|
||||||
|
final int incrMin = TEST_NIGHTLY ? 15 : 40;
|
||||||
|
for(int numDocs=10;numDocs<500;numDocs += _TestUtil.nextInt(random, incrMin, 5*incrMin)) {
|
||||||
|
LogDocMergePolicy ldmp = new LogDocMergePolicy();
|
||||||
|
ldmp.setMinMergeDocs(1);
|
||||||
|
ldmp.setMergeFactor(5);
|
||||||
|
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||||
|
TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||||
|
.setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(2).setMergePolicy(
|
||||||
|
ldmp));
|
||||||
|
for(int j=0;j<numDocs;j++)
|
||||||
|
writer.addDocument(doc);
|
||||||
|
writer.close();
|
||||||
|
|
||||||
|
SegmentInfos sis = new SegmentInfos();
|
||||||
|
sis.read(dir);
|
||||||
|
final int segCount = sis.size();
|
||||||
|
|
||||||
|
ldmp = new LogDocMergePolicy();
|
||||||
|
ldmp.setMergeFactor(5);
|
||||||
|
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT,
|
||||||
|
new MockAnalyzer(random)).setMergePolicy(ldmp));
|
||||||
|
writer.optimize(3);
|
||||||
|
writer.close();
|
||||||
|
|
||||||
|
sis = new SegmentInfos();
|
||||||
|
sis.read(dir);
|
||||||
|
final int optSegCount = sis.size();
|
||||||
|
|
||||||
|
if (segCount < 3)
|
||||||
|
assertEquals(segCount, optSegCount);
|
||||||
|
else
|
||||||
|
assertEquals(3, optSegCount);
|
||||||
|
}
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testOptimizeMaxNumSegments2() throws IOException {
|
||||||
|
MockDirectoryWrapper dir = newDirectory();
|
||||||
|
|
||||||
|
final Document doc = new Document();
|
||||||
|
doc.add(newField("content", "aaa", Field.Store.YES, Field.Index.ANALYZED));
|
||||||
|
|
||||||
|
LogDocMergePolicy ldmp = new LogDocMergePolicy();
|
||||||
|
ldmp.setMinMergeDocs(1);
|
||||||
|
ldmp.setMergeFactor(4);
|
||||||
|
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||||
|
TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||||
|
.setMaxBufferedDocs(2).setMergePolicy(ldmp).setMergeScheduler(new ConcurrentMergeScheduler()));
|
||||||
|
|
||||||
|
for(int iter=0;iter<10;iter++) {
|
||||||
|
for(int i=0;i<19;i++)
|
||||||
|
writer.addDocument(doc);
|
||||||
|
|
||||||
|
writer.commit();
|
||||||
|
writer.waitForMerges();
|
||||||
|
writer.commit();
|
||||||
|
|
||||||
|
SegmentInfos sis = new SegmentInfos();
|
||||||
|
sis.read(dir);
|
||||||
|
|
||||||
|
final int segCount = sis.size();
|
||||||
|
|
||||||
|
writer.optimize(7);
|
||||||
|
writer.commit();
|
||||||
|
writer.waitForMerges();
|
||||||
|
|
||||||
|
sis = new SegmentInfos();
|
||||||
|
sis.read(dir);
|
||||||
|
final int optSegCount = sis.size();
|
||||||
|
|
||||||
|
if (segCount < 7)
|
||||||
|
assertEquals(segCount, optSegCount);
|
||||||
|
else
|
||||||
|
assertEquals(7, optSegCount);
|
||||||
|
}
|
||||||
|
writer.close();
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Make sure optimize doesn't use any more than 1X
|
||||||
|
* starting index size as its temporary free space
|
||||||
|
* required.
|
||||||
|
*/
|
||||||
|
public void testOptimizeTempSpaceUsage() throws IOException {
|
||||||
|
|
||||||
|
MockDirectoryWrapper dir = newDirectory();
|
||||||
|
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(10).setMergePolicy(newLogMergePolicy()));
|
||||||
|
if (VERBOSE) {
|
||||||
|
System.out.println("TEST: config1=" + writer.getConfig());
|
||||||
|
}
|
||||||
|
|
||||||
|
for(int j=0;j<500;j++) {
|
||||||
|
TestIndexWriter.addDocWithIndex(writer, j);
|
||||||
|
}
|
||||||
|
final int termIndexInterval = writer.getConfig().getTermIndexInterval();
|
||||||
|
// force one extra segment w/ different doc store so
|
||||||
|
// we see the doc stores get merged
|
||||||
|
writer.commit();
|
||||||
|
TestIndexWriter.addDocWithIndex(writer, 500);
|
||||||
|
writer.close();
|
||||||
|
|
||||||
|
if (VERBOSE) {
|
||||||
|
System.out.println("TEST: start disk usage");
|
||||||
|
}
|
||||||
|
long startDiskUsage = 0;
|
||||||
|
String[] files = dir.listAll();
|
||||||
|
for(int i=0;i<files.length;i++) {
|
||||||
|
startDiskUsage += dir.fileLength(files[i]);
|
||||||
|
if (VERBOSE) {
|
||||||
|
System.out.println(files[i] + ": " + dir.fileLength(files[i]));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
dir.resetMaxUsedSizeInBytes();
|
||||||
|
dir.setTrackDiskUsage(true);
|
||||||
|
|
||||||
|
// Import to use same term index interval else a
|
||||||
|
// smaller one here could increase the disk usage and
|
||||||
|
// cause a false failure:
|
||||||
|
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND).setTermIndexInterval(termIndexInterval).setMergePolicy(newLogMergePolicy()));
|
||||||
|
writer.setInfoStream(VERBOSE ? System.out : null);
|
||||||
|
writer.optimize();
|
||||||
|
writer.close();
|
||||||
|
long maxDiskUsage = dir.getMaxUsedSizeInBytes();
|
||||||
|
assertTrue("optimize used too much temporary space: starting usage was " + startDiskUsage + " bytes; max temp usage was " + maxDiskUsage + " but should have been " + (4*startDiskUsage) + " (= 4X starting usage)",
|
||||||
|
maxDiskUsage <= 4*startDiskUsage);
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test calling optimize(false) whereby optimize is kicked
|
||||||
|
// off but we don't wait for it to finish (but
|
||||||
|
// writer.close()) does wait
|
||||||
|
public void testBackgroundOptimize() throws IOException {
|
||||||
|
|
||||||
|
Directory dir = newDirectory();
|
||||||
|
for(int pass=0;pass<2;pass++) {
|
||||||
|
IndexWriter writer = new IndexWriter(
|
||||||
|
dir,
|
||||||
|
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||||
|
setOpenMode(OpenMode.CREATE).
|
||||||
|
setMaxBufferedDocs(2).
|
||||||
|
setMergePolicy(newLogMergePolicy(51))
|
||||||
|
);
|
||||||
|
Document doc = new Document();
|
||||||
|
doc.add(newField("field", "aaa", Store.YES, Index.ANALYZED, TermVector.WITH_POSITIONS_OFFSETS));
|
||||||
|
for(int i=0;i<100;i++)
|
||||||
|
writer.addDocument(doc);
|
||||||
|
writer.optimize(false);
|
||||||
|
|
||||||
|
if (0 == pass) {
|
||||||
|
writer.close();
|
||||||
|
IndexReader reader = IndexReader.open(dir, true);
|
||||||
|
assertTrue(reader.isOptimized());
|
||||||
|
reader.close();
|
||||||
|
} else {
|
||||||
|
// Get another segment to flush so we can verify it is
|
||||||
|
// NOT included in the optimization
|
||||||
|
writer.addDocument(doc);
|
||||||
|
writer.addDocument(doc);
|
||||||
|
writer.close();
|
||||||
|
|
||||||
|
IndexReader reader = IndexReader.open(dir, true);
|
||||||
|
assertTrue(!reader.isOptimized());
|
||||||
|
reader.close();
|
||||||
|
|
||||||
|
SegmentInfos infos = new SegmentInfos();
|
||||||
|
infos.read(dir);
|
||||||
|
assertEquals(2, infos.size());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
}
|
|
@ -821,7 +821,6 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
||||||
|
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
_TestUtil.checkIndex(dir1);
|
|
||||||
dir1.close();
|
dir1.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -908,7 +907,6 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
||||||
assertEquals(0, excs.size());
|
assertEquals(0, excs.size());
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
_TestUtil.checkIndex(dir1);
|
|
||||||
r.close();
|
r.close();
|
||||||
dir1.close();
|
dir1.close();
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,337 @@
|
||||||
|
package org.apache.lucene.index;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright ownership.
|
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
* (the "License"); you may not use this file except in compliance with
|
||||||
|
* the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.Iterator;
|
||||||
|
import java.util.Random;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
import org.apache.lucene.analysis.MockAnalyzer;
|
||||||
|
import org.apache.lucene.document.Document;
|
||||||
|
import org.apache.lucene.document.Field;
|
||||||
|
import org.apache.lucene.store.Directory;
|
||||||
|
import org.apache.lucene.util.BytesRef;
|
||||||
|
import org.apache.lucene.util.CharsRef;
|
||||||
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
import org.apache.lucene.util.UnicodeUtil;
|
||||||
|
|
||||||
|
public class TestIndexWriterUnicode extends LuceneTestCase {
|
||||||
|
|
||||||
|
final String[] utf8Data = new String[] {
|
||||||
|
// unpaired low surrogate
|
||||||
|
"ab\udc17cd", "ab\ufffdcd",
|
||||||
|
"\udc17abcd", "\ufffdabcd",
|
||||||
|
"\udc17", "\ufffd",
|
||||||
|
"ab\udc17\udc17cd", "ab\ufffd\ufffdcd",
|
||||||
|
"\udc17\udc17abcd", "\ufffd\ufffdabcd",
|
||||||
|
"\udc17\udc17", "\ufffd\ufffd",
|
||||||
|
|
||||||
|
// unpaired high surrogate
|
||||||
|
"ab\ud917cd", "ab\ufffdcd",
|
||||||
|
"\ud917abcd", "\ufffdabcd",
|
||||||
|
"\ud917", "\ufffd",
|
||||||
|
"ab\ud917\ud917cd", "ab\ufffd\ufffdcd",
|
||||||
|
"\ud917\ud917abcd", "\ufffd\ufffdabcd",
|
||||||
|
"\ud917\ud917", "\ufffd\ufffd",
|
||||||
|
|
||||||
|
// backwards surrogates
|
||||||
|
"ab\udc17\ud917cd", "ab\ufffd\ufffdcd",
|
||||||
|
"\udc17\ud917abcd", "\ufffd\ufffdabcd",
|
||||||
|
"\udc17\ud917", "\ufffd\ufffd",
|
||||||
|
"ab\udc17\ud917\udc17\ud917cd", "ab\ufffd\ud917\udc17\ufffdcd",
|
||||||
|
"\udc17\ud917\udc17\ud917abcd", "\ufffd\ud917\udc17\ufffdabcd",
|
||||||
|
"\udc17\ud917\udc17\ud917", "\ufffd\ud917\udc17\ufffd"
|
||||||
|
};
|
||||||
|
|
||||||
|
private int nextInt(int lim) {
|
||||||
|
return random.nextInt(lim);
|
||||||
|
}
|
||||||
|
|
||||||
|
private int nextInt(int start, int end) {
|
||||||
|
return start + nextInt(end-start);
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean fillUnicode(char[] buffer, char[] expected, int offset, int count) {
|
||||||
|
final int len = offset + count;
|
||||||
|
boolean hasIllegal = false;
|
||||||
|
|
||||||
|
if (offset > 0 && buffer[offset] >= 0xdc00 && buffer[offset] < 0xe000)
|
||||||
|
// Don't start in the middle of a valid surrogate pair
|
||||||
|
offset--;
|
||||||
|
|
||||||
|
for(int i=offset;i<len;i++) {
|
||||||
|
int t = nextInt(6);
|
||||||
|
if (0 == t && i < len-1) {
|
||||||
|
// Make a surrogate pair
|
||||||
|
// High surrogate
|
||||||
|
expected[i] = buffer[i++] = (char) nextInt(0xd800, 0xdc00);
|
||||||
|
// Low surrogate
|
||||||
|
expected[i] = buffer[i] = (char) nextInt(0xdc00, 0xe000);
|
||||||
|
} else if (t <= 1)
|
||||||
|
expected[i] = buffer[i] = (char) nextInt(0x80);
|
||||||
|
else if (2 == t)
|
||||||
|
expected[i] = buffer[i] = (char) nextInt(0x80, 0x800);
|
||||||
|
else if (3 == t)
|
||||||
|
expected[i] = buffer[i] = (char) nextInt(0x800, 0xd800);
|
||||||
|
else if (4 == t)
|
||||||
|
expected[i] = buffer[i] = (char) nextInt(0xe000, 0xffff);
|
||||||
|
else if (5 == t && i < len-1) {
|
||||||
|
// Illegal unpaired surrogate
|
||||||
|
if (nextInt(10) == 7) {
|
||||||
|
if (random.nextBoolean())
|
||||||
|
buffer[i] = (char) nextInt(0xd800, 0xdc00);
|
||||||
|
else
|
||||||
|
buffer[i] = (char) nextInt(0xdc00, 0xe000);
|
||||||
|
expected[i++] = 0xfffd;
|
||||||
|
expected[i] = buffer[i] = (char) nextInt(0x800, 0xd800);
|
||||||
|
hasIllegal = true;
|
||||||
|
} else
|
||||||
|
expected[i] = buffer[i] = (char) nextInt(0x800, 0xd800);
|
||||||
|
} else {
|
||||||
|
expected[i] = buffer[i] = ' ';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return hasIllegal;
|
||||||
|
}
|
||||||
|
|
||||||
|
// both start & end are inclusive
|
||||||
|
private final int getInt(Random r, int start, int end) {
|
||||||
|
return start + r.nextInt(1+end-start);
|
||||||
|
}
|
||||||
|
|
||||||
|
private final String asUnicodeChar(char c) {
|
||||||
|
return "U+" + Integer.toHexString(c);
|
||||||
|
}
|
||||||
|
|
||||||
|
private final String termDesc(String s) {
|
||||||
|
final String s0;
|
||||||
|
assertTrue(s.length() <= 2);
|
||||||
|
if (s.length() == 1) {
|
||||||
|
s0 = asUnicodeChar(s.charAt(0));
|
||||||
|
} else {
|
||||||
|
s0 = asUnicodeChar(s.charAt(0)) + "," + asUnicodeChar(s.charAt(1));
|
||||||
|
}
|
||||||
|
return s0;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void checkTermsOrder(IndexReader r, Set<String> allTerms, boolean isTop) throws IOException {
|
||||||
|
TermsEnum terms = MultiFields.getFields(r).terms("f").iterator();
|
||||||
|
|
||||||
|
BytesRef last = new BytesRef();
|
||||||
|
|
||||||
|
Set<String> seenTerms = new HashSet<String>();
|
||||||
|
|
||||||
|
while(true) {
|
||||||
|
final BytesRef term = terms.next();
|
||||||
|
if (term == null) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
assertTrue(last.compareTo(term) < 0);
|
||||||
|
last.copy(term);
|
||||||
|
|
||||||
|
final String s = term.utf8ToString();
|
||||||
|
assertTrue("term " + termDesc(s) + " was not added to index (count=" + allTerms.size() + ")", allTerms.contains(s));
|
||||||
|
seenTerms.add(s);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isTop) {
|
||||||
|
assertTrue(allTerms.equals(seenTerms));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test seeking:
|
||||||
|
Iterator<String> it = seenTerms.iterator();
|
||||||
|
while(it.hasNext()) {
|
||||||
|
BytesRef tr = new BytesRef(it.next());
|
||||||
|
assertEquals("seek failed for term=" + termDesc(tr.utf8ToString()),
|
||||||
|
TermsEnum.SeekStatus.FOUND,
|
||||||
|
terms.seek(tr));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// LUCENE-510
|
||||||
|
public void testRandomUnicodeStrings() throws Throwable {
|
||||||
|
char[] buffer = new char[20];
|
||||||
|
char[] expected = new char[20];
|
||||||
|
|
||||||
|
BytesRef utf8 = new BytesRef(20);
|
||||||
|
CharsRef utf16 = new CharsRef(20);
|
||||||
|
|
||||||
|
int num = 100000 * RANDOM_MULTIPLIER;
|
||||||
|
for (int iter = 0; iter < num; iter++) {
|
||||||
|
boolean hasIllegal = fillUnicode(buffer, expected, 0, 20);
|
||||||
|
|
||||||
|
UnicodeUtil.UTF16toUTF8(buffer, 0, 20, utf8);
|
||||||
|
if (!hasIllegal) {
|
||||||
|
byte[] b = new String(buffer, 0, 20).getBytes("UTF-8");
|
||||||
|
assertEquals(b.length, utf8.length);
|
||||||
|
for(int i=0;i<b.length;i++)
|
||||||
|
assertEquals(b[i], utf8.bytes[i]);
|
||||||
|
}
|
||||||
|
|
||||||
|
UnicodeUtil.UTF8toUTF16(utf8.bytes, 0, utf8.length, utf16);
|
||||||
|
assertEquals(utf16.length, 20);
|
||||||
|
for(int i=0;i<20;i++)
|
||||||
|
assertEquals(expected[i], utf16.chars[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// LUCENE-510
|
||||||
|
public void testAllUnicodeChars() throws Throwable {
|
||||||
|
|
||||||
|
BytesRef utf8 = new BytesRef(10);
|
||||||
|
CharsRef utf16 = new CharsRef(10);
|
||||||
|
char[] chars = new char[2];
|
||||||
|
for(int ch=0;ch<0x0010FFFF;ch++) {
|
||||||
|
|
||||||
|
if (ch == 0xd800)
|
||||||
|
// Skip invalid code points
|
||||||
|
ch = 0xe000;
|
||||||
|
|
||||||
|
int len = 0;
|
||||||
|
if (ch <= 0xffff) {
|
||||||
|
chars[len++] = (char) ch;
|
||||||
|
} else {
|
||||||
|
chars[len++] = (char) (((ch-0x0010000) >> 10) + UnicodeUtil.UNI_SUR_HIGH_START);
|
||||||
|
chars[len++] = (char) (((ch-0x0010000) & 0x3FFL) + UnicodeUtil.UNI_SUR_LOW_START);
|
||||||
|
}
|
||||||
|
|
||||||
|
UnicodeUtil.UTF16toUTF8(chars, 0, len, utf8);
|
||||||
|
|
||||||
|
String s1 = new String(chars, 0, len);
|
||||||
|
String s2 = new String(utf8.bytes, 0, utf8.length, "UTF-8");
|
||||||
|
assertEquals("codepoint " + ch, s1, s2);
|
||||||
|
|
||||||
|
UnicodeUtil.UTF8toUTF16(utf8.bytes, 0, utf8.length, utf16);
|
||||||
|
assertEquals("codepoint " + ch, s1, new String(utf16.chars, 0, utf16.length));
|
||||||
|
|
||||||
|
byte[] b = s1.getBytes("UTF-8");
|
||||||
|
assertEquals(utf8.length, b.length);
|
||||||
|
for(int j=0;j<utf8.length;j++)
|
||||||
|
assertEquals(utf8.bytes[j], b[j]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testEmbeddedFFFF() throws Throwable {
|
||||||
|
Directory d = newDirectory();
|
||||||
|
IndexWriter w = new IndexWriter(d, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||||
|
Document doc = new Document();
|
||||||
|
doc.add(newField("field", "a a\uffffb", Field.Store.NO, Field.Index.ANALYZED));
|
||||||
|
w.addDocument(doc);
|
||||||
|
doc = new Document();
|
||||||
|
doc.add(newField("field", "a", Field.Store.NO, Field.Index.ANALYZED));
|
||||||
|
w.addDocument(doc);
|
||||||
|
IndexReader r = w.getReader();
|
||||||
|
assertEquals(1, r.docFreq(new Term("field", "a\uffffb")));
|
||||||
|
r.close();
|
||||||
|
w.close();
|
||||||
|
d.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
// LUCENE-510
|
||||||
|
public void testInvalidUTF16() throws Throwable {
|
||||||
|
Directory dir = newDirectory();
|
||||||
|
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new TestIndexWriter.StringSplitAnalyzer()));
|
||||||
|
Document doc = new Document();
|
||||||
|
|
||||||
|
final int count = utf8Data.length/2;
|
||||||
|
for(int i=0;i<count;i++)
|
||||||
|
doc.add(newField("f" + i, utf8Data[2*i], Field.Store.YES, Field.Index.ANALYZED));
|
||||||
|
w.addDocument(doc);
|
||||||
|
w.close();
|
||||||
|
|
||||||
|
IndexReader ir = IndexReader.open(dir, true);
|
||||||
|
Document doc2 = ir.document(0);
|
||||||
|
for(int i=0;i<count;i++) {
|
||||||
|
assertEquals("field " + i + " was not indexed correctly", 1, ir.docFreq(new Term("f"+i, utf8Data[2*i+1])));
|
||||||
|
assertEquals("field " + i + " is incorrect", utf8Data[2*i+1], doc2.getField("f"+i).stringValue());
|
||||||
|
}
|
||||||
|
ir.close();
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Make sure terms, including ones with surrogate pairs,
|
||||||
|
// sort in codepoint sort order by default
|
||||||
|
public void testTermUTF16SortOrder() throws Throwable {
|
||||||
|
Random rnd = random;
|
||||||
|
Directory dir = newDirectory();
|
||||||
|
RandomIndexWriter writer = new RandomIndexWriter(rnd, dir);
|
||||||
|
Document d = new Document();
|
||||||
|
// Single segment
|
||||||
|
Field f = newField("f", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
|
||||||
|
d.add(f);
|
||||||
|
char[] chars = new char[2];
|
||||||
|
final Set<String> allTerms = new HashSet<String>();
|
||||||
|
|
||||||
|
int num = 200 * RANDOM_MULTIPLIER;
|
||||||
|
for (int i = 0; i < num; i++) {
|
||||||
|
|
||||||
|
final String s;
|
||||||
|
if (rnd.nextBoolean()) {
|
||||||
|
// Single char
|
||||||
|
if (rnd.nextBoolean()) {
|
||||||
|
// Above surrogates
|
||||||
|
chars[0] = (char) getInt(rnd, 1+UnicodeUtil.UNI_SUR_LOW_END, 0xffff);
|
||||||
|
} else {
|
||||||
|
// Below surrogates
|
||||||
|
chars[0] = (char) getInt(rnd, 0, UnicodeUtil.UNI_SUR_HIGH_START-1);
|
||||||
|
}
|
||||||
|
s = new String(chars, 0, 1);
|
||||||
|
} else {
|
||||||
|
// Surrogate pair
|
||||||
|
chars[0] = (char) getInt(rnd, UnicodeUtil.UNI_SUR_HIGH_START, UnicodeUtil.UNI_SUR_HIGH_END);
|
||||||
|
assertTrue(((int) chars[0]) >= UnicodeUtil.UNI_SUR_HIGH_START && ((int) chars[0]) <= UnicodeUtil.UNI_SUR_HIGH_END);
|
||||||
|
chars[1] = (char) getInt(rnd, UnicodeUtil.UNI_SUR_LOW_START, UnicodeUtil.UNI_SUR_LOW_END);
|
||||||
|
s = new String(chars, 0, 2);
|
||||||
|
}
|
||||||
|
allTerms.add(s);
|
||||||
|
f.setValue(s);
|
||||||
|
|
||||||
|
writer.addDocument(d);
|
||||||
|
|
||||||
|
if ((1+i) % 42 == 0) {
|
||||||
|
writer.commit();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
IndexReader r = writer.getReader();
|
||||||
|
|
||||||
|
// Test each sub-segment
|
||||||
|
final IndexReader[] subs = r.getSequentialSubReaders();
|
||||||
|
for(int i=0;i<subs.length;i++) {
|
||||||
|
checkTermsOrder(subs[i], allTerms, false);
|
||||||
|
}
|
||||||
|
checkTermsOrder(r, allTerms, true);
|
||||||
|
|
||||||
|
// Test multi segment
|
||||||
|
r.close();
|
||||||
|
|
||||||
|
writer.optimize();
|
||||||
|
|
||||||
|
// Test optimized single segment
|
||||||
|
r = writer.getReader();
|
||||||
|
checkTermsOrder(r, allTerms, true);
|
||||||
|
r.close();
|
||||||
|
|
||||||
|
writer.close();
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
}
|
|
@ -25,6 +25,8 @@ import org.apache.lucene.analysis.MockAnalyzer;
|
||||||
import org.apache.lucene.document.*;
|
import org.apache.lucene.document.*;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
import org.junit.AfterClass;
|
||||||
|
import org.junit.BeforeClass;
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -33,8 +35,8 @@ import org.apache.lucene.util.LuceneTestCase;
|
||||||
*/
|
*/
|
||||||
public class TestLazyBug extends LuceneTestCase {
|
public class TestLazyBug extends LuceneTestCase {
|
||||||
|
|
||||||
public static int NUM_DOCS = 500;
|
public static int NUM_DOCS = TEST_NIGHTLY ? 500 : 50;
|
||||||
public static int NUM_FIELDS = 100;
|
public static int NUM_FIELDS = TEST_NIGHTLY ? 100 : 10;
|
||||||
|
|
||||||
private static String[] data = new String[] {
|
private static String[] data = new String[] {
|
||||||
"now",
|
"now",
|
||||||
|
@ -49,6 +51,19 @@ public class TestLazyBug extends LuceneTestCase {
|
||||||
private static Set<String> dataset = asSet(data);
|
private static Set<String> dataset = asSet(data);
|
||||||
|
|
||||||
private static String MAGIC_FIELD = "f"+(NUM_FIELDS/3);
|
private static String MAGIC_FIELD = "f"+(NUM_FIELDS/3);
|
||||||
|
|
||||||
|
private static Directory directory;
|
||||||
|
|
||||||
|
@BeforeClass
|
||||||
|
public static void beforeClass() throws Exception {
|
||||||
|
directory = makeIndex();
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterClass
|
||||||
|
public static void afterClass() throws Exception {
|
||||||
|
directory.close();
|
||||||
|
directory = null;
|
||||||
|
}
|
||||||
|
|
||||||
private static FieldSelector SELECTOR = new FieldSelector() {
|
private static FieldSelector SELECTOR = new FieldSelector() {
|
||||||
public FieldSelectorResult accept(String f) {
|
public FieldSelectorResult accept(String f) {
|
||||||
|
@ -59,7 +74,7 @@ public class TestLazyBug extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
private Directory makeIndex() throws Exception {
|
private static Directory makeIndex() throws Exception {
|
||||||
Directory dir = newDirectory();
|
Directory dir = newDirectory();
|
||||||
try {
|
try {
|
||||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||||
|
@ -72,7 +87,7 @@ public class TestLazyBug extends LuceneTestCase {
|
||||||
doc.add(newField("f"+f,
|
doc.add(newField("f"+f,
|
||||||
data[f % data.length]
|
data[f % data.length]
|
||||||
+ '#' + data[random.nextInt(data.length)],
|
+ '#' + data[random.nextInt(data.length)],
|
||||||
Field.Store.YES,
|
Field.Store.NO,
|
||||||
Field.Index.ANALYZED));
|
Field.Index.ANALYZED));
|
||||||
}
|
}
|
||||||
writer.addDocument(doc);
|
writer.addDocument(doc);
|
||||||
|
@ -85,8 +100,7 @@ public class TestLazyBug extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void doTest(int[] docs) throws Exception {
|
public void doTest(int[] docs) throws Exception {
|
||||||
Directory dir = makeIndex();
|
IndexReader reader = IndexReader.open(directory, true);
|
||||||
IndexReader reader = IndexReader.open(dir, true);
|
|
||||||
for (int i = 0; i < docs.length; i++) {
|
for (int i = 0; i < docs.length; i++) {
|
||||||
Document d = reader.document(docs[i], SELECTOR);
|
Document d = reader.document(docs[i], SELECTOR);
|
||||||
d.get(MAGIC_FIELD);
|
d.get(MAGIC_FIELD);
|
||||||
|
@ -109,7 +123,6 @@ public class TestLazyBug extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
reader.close();
|
reader.close();
|
||||||
dir.close();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLazyWorks() throws Exception {
|
public void testLazyWorks() throws Exception {
|
||||||
|
|
|
@ -70,7 +70,7 @@ public class TestLongPostings extends LuceneTestCase {
|
||||||
// randomness (ie same seed will point to same dir):
|
// randomness (ie same seed will point to same dir):
|
||||||
Directory dir = newFSDirectory(_TestUtil.getTempDir("longpostings" + "." + random.nextLong()));
|
Directory dir = newFSDirectory(_TestUtil.getTempDir("longpostings" + "." + random.nextLong()));
|
||||||
|
|
||||||
final int NUM_DOCS = (int) ((TEST_NIGHTLY ? 4e6 : (RANDOM_MULTIPLIER*2e4)) * (1+random.nextDouble()));
|
final int NUM_DOCS = (int) ((TEST_NIGHTLY ? 4e6 : (RANDOM_MULTIPLIER*2e3)) * (1+random.nextDouble()));
|
||||||
|
|
||||||
if (VERBOSE) {
|
if (VERBOSE) {
|
||||||
System.out.println("TEST: NUM_DOCS=" + NUM_DOCS);
|
System.out.println("TEST: NUM_DOCS=" + NUM_DOCS);
|
||||||
|
|
|
@ -104,7 +104,7 @@ public class TestNRTThreads extends LuceneTestCase {
|
||||||
|
|
||||||
final LineFileDocs docs = new LineFileDocs(random);
|
final LineFileDocs docs = new LineFileDocs(random);
|
||||||
final File tempDir = _TestUtil.getTempDir("nrtopenfiles");
|
final File tempDir = _TestUtil.getTempDir("nrtopenfiles");
|
||||||
final MockDirectoryWrapper dir = new MockDirectoryWrapper(random, FSDirectory.open(tempDir));
|
final MockDirectoryWrapper dir = newFSDirectory(tempDir);
|
||||||
final IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
|
final IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
|
||||||
|
|
||||||
if (LuceneTestCase.TEST_NIGHTLY) {
|
if (LuceneTestCase.TEST_NIGHTLY) {
|
||||||
|
@ -565,10 +565,8 @@ public class TestNRTThreads extends LuceneTestCase {
|
||||||
|
|
||||||
assertFalse(writer.anyNonBulkMerges);
|
assertFalse(writer.anyNonBulkMerges);
|
||||||
writer.close(false);
|
writer.close(false);
|
||||||
_TestUtil.checkIndex(dir);
|
|
||||||
s.close();
|
s.close();
|
||||||
dir.close();
|
dir.close();
|
||||||
_TestUtil.rmDir(tempDir);
|
|
||||||
docs.close();
|
docs.close();
|
||||||
if (VERBOSE) {
|
if (VERBOSE) {
|
||||||
System.out.println("TEST: done [" + (System.currentTimeMillis()-t0) + " ms]");
|
System.out.println("TEST: done [" + (System.currentTimeMillis()-t0) + " ms]");
|
||||||
|
|
|
@ -64,7 +64,6 @@ public class TestOmitNorms extends LuceneTestCase {
|
||||||
writer.optimize();
|
writer.optimize();
|
||||||
// flush
|
// flush
|
||||||
writer.close();
|
writer.close();
|
||||||
_TestUtil.checkIndex(ram);
|
|
||||||
|
|
||||||
SegmentReader reader = getOnlySegmentReader(IndexReader.open(ram, false));
|
SegmentReader reader = getOnlySegmentReader(IndexReader.open(ram, false));
|
||||||
FieldInfos fi = reader.fieldInfos();
|
FieldInfos fi = reader.fieldInfos();
|
||||||
|
@ -121,8 +120,6 @@ public class TestOmitNorms extends LuceneTestCase {
|
||||||
// flush
|
// flush
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
_TestUtil.checkIndex(ram);
|
|
||||||
|
|
||||||
SegmentReader reader = getOnlySegmentReader(IndexReader.open(ram, false));
|
SegmentReader reader = getOnlySegmentReader(IndexReader.open(ram, false));
|
||||||
FieldInfos fi = reader.fieldInfos();
|
FieldInfos fi = reader.fieldInfos();
|
||||||
assertTrue("OmitNorms field bit should be set.", fi.fieldInfo("f1").omitNorms);
|
assertTrue("OmitNorms field bit should be set.", fi.fieldInfo("f1").omitNorms);
|
||||||
|
@ -170,8 +167,6 @@ public class TestOmitNorms extends LuceneTestCase {
|
||||||
// flush
|
// flush
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
_TestUtil.checkIndex(ram);
|
|
||||||
|
|
||||||
SegmentReader reader = getOnlySegmentReader(IndexReader.open(ram, false));
|
SegmentReader reader = getOnlySegmentReader(IndexReader.open(ram, false));
|
||||||
FieldInfos fi = reader.fieldInfos();
|
FieldInfos fi = reader.fieldInfos();
|
||||||
assertTrue("OmitNorms field bit should not be set.", !fi.fieldInfo("f1").omitNorms);
|
assertTrue("OmitNorms field bit should not be set.", !fi.fieldInfo("f1").omitNorms);
|
||||||
|
@ -218,7 +213,6 @@ public class TestOmitNorms extends LuceneTestCase {
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
assertNoNrm(ram);
|
assertNoNrm(ram);
|
||||||
_TestUtil.checkIndex(ram);
|
|
||||||
ram.close();
|
ram.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -97,7 +97,6 @@ public class TestOmitTf extends LuceneTestCase {
|
||||||
writer.optimize();
|
writer.optimize();
|
||||||
// flush
|
// flush
|
||||||
writer.close();
|
writer.close();
|
||||||
_TestUtil.checkIndex(ram);
|
|
||||||
|
|
||||||
SegmentReader reader = getOnlySegmentReader(IndexReader.open(ram, false));
|
SegmentReader reader = getOnlySegmentReader(IndexReader.open(ram, false));
|
||||||
FieldInfos fi = reader.fieldInfos();
|
FieldInfos fi = reader.fieldInfos();
|
||||||
|
@ -153,8 +152,6 @@ public class TestOmitTf extends LuceneTestCase {
|
||||||
// flush
|
// flush
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
_TestUtil.checkIndex(ram);
|
|
||||||
|
|
||||||
SegmentReader reader = getOnlySegmentReader(IndexReader.open(ram, false));
|
SegmentReader reader = getOnlySegmentReader(IndexReader.open(ram, false));
|
||||||
FieldInfos fi = reader.fieldInfos();
|
FieldInfos fi = reader.fieldInfos();
|
||||||
assertTrue("OmitTermFreqAndPositions field bit should be set.", fi.fieldInfo("f1").omitTermFreqAndPositions);
|
assertTrue("OmitTermFreqAndPositions field bit should be set.", fi.fieldInfo("f1").omitTermFreqAndPositions);
|
||||||
|
@ -200,8 +197,6 @@ public class TestOmitTf extends LuceneTestCase {
|
||||||
// flush
|
// flush
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
_TestUtil.checkIndex(ram);
|
|
||||||
|
|
||||||
SegmentReader reader = getOnlySegmentReader(IndexReader.open(ram, false));
|
SegmentReader reader = getOnlySegmentReader(IndexReader.open(ram, false));
|
||||||
FieldInfos fi = reader.fieldInfos();
|
FieldInfos fi = reader.fieldInfos();
|
||||||
assertTrue("OmitTermFreqAndPositions field bit should not be set.", !fi.fieldInfo("f1").omitTermFreqAndPositions);
|
assertTrue("OmitTermFreqAndPositions field bit should not be set.", !fi.fieldInfo("f1").omitTermFreqAndPositions);
|
||||||
|
@ -245,7 +240,6 @@ public class TestOmitTf extends LuceneTestCase {
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
assertNoPrx(ram);
|
assertNoPrx(ram);
|
||||||
_TestUtil.checkIndex(ram);
|
|
||||||
ram.close();
|
ram.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -282,7 +276,6 @@ public class TestOmitTf extends LuceneTestCase {
|
||||||
writer.optimize();
|
writer.optimize();
|
||||||
// flush
|
// flush
|
||||||
writer.close();
|
writer.close();
|
||||||
_TestUtil.checkIndex(dir);
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Verify the index
|
* Verify the index
|
||||||
|
|
|
@ -62,7 +62,6 @@ public class TestParallelReaderEmptyIndex extends LuceneTestCase {
|
||||||
|
|
||||||
iwOut.optimize();
|
iwOut.optimize();
|
||||||
iwOut.close();
|
iwOut.close();
|
||||||
_TestUtil.checkIndex(rdOut);
|
|
||||||
rdOut.close();
|
rdOut.close();
|
||||||
rd1.close();
|
rd1.close();
|
||||||
rd2.close();
|
rd2.close();
|
||||||
|
@ -122,7 +121,6 @@ public class TestParallelReaderEmptyIndex extends LuceneTestCase {
|
||||||
iwOut.optimize();
|
iwOut.optimize();
|
||||||
iwOut.close();
|
iwOut.close();
|
||||||
|
|
||||||
_TestUtil.checkIndex(rdOut);
|
|
||||||
rdOut.close();
|
rdOut.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -615,8 +615,6 @@ public class TestPayloads extends LuceneTestCase {
|
||||||
writer.optimize();
|
writer.optimize();
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
_TestUtil.checkIndex(dir);
|
|
||||||
|
|
||||||
dir.close();
|
dir.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -36,7 +36,7 @@ public class TestRollingUpdates extends LuceneTestCase {
|
||||||
final LineFileDocs docs = new LineFileDocs(random);
|
final LineFileDocs docs = new LineFileDocs(random);
|
||||||
|
|
||||||
final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||||
final int SIZE = 200 * RANDOM_MULTIPLIER;
|
final int SIZE = (TEST_NIGHTLY ? 200 : 20) * RANDOM_MULTIPLIER;
|
||||||
int id = 0;
|
int id = 0;
|
||||||
IndexReader r = null;
|
IndexReader r = null;
|
||||||
final int numUpdates = (int) (SIZE * (2+random.nextDouble()));
|
final int numUpdates = (int) (SIZE * (2+random.nextDouble()));
|
||||||
|
@ -82,9 +82,8 @@ public class TestRollingUpdates extends LuceneTestCase {
|
||||||
for (int r = 0; r < 3; r++) {
|
for (int r = 0; r < 3; r++) {
|
||||||
final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(
|
final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(
|
||||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2));
|
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2));
|
||||||
final int SIZE = 200 * RANDOM_MULTIPLIER;
|
final int numUpdates = (TEST_NIGHTLY ? 200 : 20) * RANDOM_MULTIPLIER;
|
||||||
final int numUpdates = (int) (SIZE * (2 + random.nextDouble()));
|
int numThreads = _TestUtil.nextInt(random, 2, 6);
|
||||||
int numThreads = 3 + random.nextInt(12);
|
|
||||||
IndexingThread[] threads = new IndexingThread[numThreads];
|
IndexingThread[] threads = new IndexingThread[numThreads];
|
||||||
for (int i = 0; i < numThreads; i++) {
|
for (int i = 0; i < numThreads; i++) {
|
||||||
threads[i] = new IndexingThread(docs, w, numUpdates);
|
threads[i] = new IndexingThread(docs, w, numUpdates);
|
||||||
|
@ -97,6 +96,7 @@ public class TestRollingUpdates extends LuceneTestCase {
|
||||||
|
|
||||||
w.close();
|
w.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
IndexReader open = IndexReader.open(dir);
|
IndexReader open = IndexReader.open(dir);
|
||||||
assertEquals(1, open.numDocs());
|
assertEquals(1, open.numDocs());
|
||||||
open.close();
|
open.close();
|
||||||
|
@ -123,9 +123,10 @@ public class TestRollingUpdates extends LuceneTestCase {
|
||||||
Document doc = new Document();// docs.nextDoc();
|
Document doc = new Document();// docs.nextDoc();
|
||||||
doc.add(newField("id", "test", Index.NOT_ANALYZED));
|
doc.add(newField("id", "test", Index.NOT_ANALYZED));
|
||||||
writer.updateDocument(new Term("id", "test"), doc);
|
writer.updateDocument(new Term("id", "test"), doc);
|
||||||
if (random.nextInt(10) == 0) {
|
if (random.nextInt(3) == 0) {
|
||||||
if (open == null)
|
if (open == null) {
|
||||||
open = IndexReader.open(writer, true);
|
open = IndexReader.open(writer, true);
|
||||||
|
}
|
||||||
IndexReader reader = open.reopen();
|
IndexReader reader = open.reopen();
|
||||||
if (reader != open) {
|
if (reader != open) {
|
||||||
open.close();
|
open.close();
|
||||||
|
@ -134,11 +135,12 @@ public class TestRollingUpdates extends LuceneTestCase {
|
||||||
assertEquals("iter: " + i + " numDocs: "+ open.numDocs() + " del: " + open.numDeletedDocs() + " max: " + open.maxDoc(), 1, open.numDocs());
|
assertEquals("iter: " + i + " numDocs: "+ open.numDocs() + " del: " + open.numDeletedDocs() + " max: " + open.maxDoc(), 1, open.numDocs());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
open.close();
|
if (open != null) {
|
||||||
|
open.close();
|
||||||
|
}
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
fail(e.getMessage());
|
throw new RuntimeException(e);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -287,7 +287,7 @@ public class TestSurrogates extends LuceneTestCase {
|
||||||
|
|
||||||
for(int f=0;f<numField;f++) {
|
for(int f=0;f<numField;f++) {
|
||||||
String field = "f" + f;
|
String field = "f" + f;
|
||||||
final int numTerms = 10000 * RANDOM_MULTIPLIER;
|
final int numTerms = (TEST_NIGHTLY ? 10000 : 1000) * RANDOM_MULTIPLIER;
|
||||||
|
|
||||||
final Set<String> uniqueTerms = new HashSet<String>();
|
final Set<String> uniqueTerms = new HashSet<String>();
|
||||||
|
|
||||||
|
|
|
@ -115,9 +115,9 @@ public class BaseTestRangeFilter extends LuceneTestCase {
|
||||||
/* build an index */
|
/* build an index */
|
||||||
|
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
Field idField = newField(random, "id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
|
Field idField = newField(random, "id", "", Field.Store.YES, Field.Index.NOT_ANALYZED_NO_NORMS);
|
||||||
Field randField = newField(random, "rand", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
|
Field randField = newField(random, "rand", "", Field.Store.YES, Field.Index.NOT_ANALYZED_NO_NORMS);
|
||||||
Field bodyField = newField(random, "body", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
|
Field bodyField = newField(random, "body", "", Field.Store.NO, Field.Index.NOT_ANALYZED_NO_NORMS);
|
||||||
doc.add(idField);
|
doc.add(idField);
|
||||||
doc.add(randField);
|
doc.add(randField);
|
||||||
doc.add(bodyField);
|
doc.add(bodyField);
|
||||||
|
|
|
@ -232,7 +232,7 @@ public class TestBoolean2 extends LuceneTestCase {
|
||||||
try {
|
try {
|
||||||
|
|
||||||
// increase number of iterations for more complete testing
|
// increase number of iterations for more complete testing
|
||||||
int num = 50 * RANDOM_MULTIPLIER;
|
int num = (TEST_NIGHTLY ? 50 : 10) * RANDOM_MULTIPLIER;
|
||||||
for (int i=0; i<num; i++) {
|
for (int i=0; i<num; i++) {
|
||||||
int level = random.nextInt(3);
|
int level = random.nextInt(3);
|
||||||
q1 = randBoolQuery(new Random(random.nextLong()), random.nextBoolean(), level, field, vals, null);
|
q1 = randBoolQuery(new Random(random.nextLong()), random.nextBoolean(), level, field, vals, null);
|
||||||
|
|
|
@ -24,6 +24,8 @@ import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.RandomIndexWriter;
|
import org.apache.lucene.index.RandomIndexWriter;
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
|
import org.junit.AfterClass;
|
||||||
|
import org.junit.BeforeClass;
|
||||||
|
|
||||||
import java.text.DecimalFormat;
|
import java.text.DecimalFormat;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
|
@ -32,14 +34,12 @@ import java.util.Random;
|
||||||
*/
|
*/
|
||||||
public class TestBooleanMinShouldMatch extends LuceneTestCase {
|
public class TestBooleanMinShouldMatch extends LuceneTestCase {
|
||||||
|
|
||||||
private Directory index;
|
private static Directory index;
|
||||||
private IndexReader r;
|
private static IndexReader r;
|
||||||
private IndexSearcher s;
|
private static IndexSearcher s;
|
||||||
|
|
||||||
@Override
|
|
||||||
public void setUp() throws Exception {
|
|
||||||
super.setUp();
|
|
||||||
|
|
||||||
|
@BeforeClass
|
||||||
|
public static void beforeClass() throws Exception {
|
||||||
String[] data = new String [] {
|
String[] data = new String [] {
|
||||||
"A 1 2 3 4 5 6",
|
"A 1 2 3 4 5 6",
|
||||||
"Z 4 5 6",
|
"Z 4 5 6",
|
||||||
|
@ -70,12 +70,14 @@ public class TestBooleanMinShouldMatch extends LuceneTestCase {
|
||||||
//System.out.println("Set up " + getName());
|
//System.out.println("Set up " + getName());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@AfterClass
|
||||||
public void tearDown() throws Exception {
|
public static void afterClass() throws Exception {
|
||||||
s.close();
|
s.close();
|
||||||
|
s = null;
|
||||||
r.close();
|
r.close();
|
||||||
|
r = null;
|
||||||
index.close();
|
index.close();
|
||||||
super.tearDown();
|
index = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -312,7 +314,7 @@ public class TestBooleanMinShouldMatch extends LuceneTestCase {
|
||||||
|
|
||||||
|
|
||||||
// increase number of iterations for more complete testing
|
// increase number of iterations for more complete testing
|
||||||
int num = 50 * RANDOM_MULTIPLIER;
|
int num = (TEST_NIGHTLY ? 50 : 10) * RANDOM_MULTIPLIER;
|
||||||
for (int i=0; i<num; i++) {
|
for (int i=0; i<num; i++) {
|
||||||
int lev = random.nextInt(maxLev);
|
int lev = random.nextInt(maxLev);
|
||||||
final long seed = random.nextLong();
|
final long seed = random.nextLong();
|
||||||
|
|
|
@ -46,7 +46,7 @@ public class TestMultiValuedNumericRangeQuery extends LuceneTestCase {
|
||||||
|
|
||||||
DecimalFormat format = new DecimalFormat("00000000000", new DecimalFormatSymbols(Locale.US));
|
DecimalFormat format = new DecimalFormat("00000000000", new DecimalFormatSymbols(Locale.US));
|
||||||
|
|
||||||
int num = 5000 * RANDOM_MULTIPLIER;
|
int num = (TEST_NIGHTLY ? 5000 : 500) * RANDOM_MULTIPLIER;
|
||||||
for (int l = 0; l < num; l++) {
|
for (int l = 0; l < num; l++) {
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
for (int m=0, c=random.nextInt(10); m<=c; m++) {
|
for (int m=0, c=random.nextInt(10); m<=c; m++) {
|
||||||
|
|
|
@ -44,7 +44,7 @@ public class TestNumericRangeQuery32 extends LuceneTestCase {
|
||||||
// shift the starting of the values to the left, to also have negative values:
|
// shift the starting of the values to the left, to also have negative values:
|
||||||
private static final int startOffset = - 1 << 15;
|
private static final int startOffset = - 1 << 15;
|
||||||
// number of docs to generate for testing
|
// number of docs to generate for testing
|
||||||
private static final int noDocs = 10000 * RANDOM_MULTIPLIER;
|
private static final int noDocs = (TEST_NIGHTLY ? 10000 : 5000) * RANDOM_MULTIPLIER;
|
||||||
|
|
||||||
private static Directory directory = null;
|
private static Directory directory = null;
|
||||||
private static IndexReader reader = null;
|
private static IndexReader reader = null;
|
||||||
|
|
|
@ -41,7 +41,7 @@ public class TestNumericRangeQuery64 extends LuceneTestCase {
|
||||||
// shift the starting of the values to the left, to also have negative values:
|
// shift the starting of the values to the left, to also have negative values:
|
||||||
private static final long startOffset = - 1L << 31;
|
private static final long startOffset = - 1L << 31;
|
||||||
// number of docs to generate for testing
|
// number of docs to generate for testing
|
||||||
private static final int noDocs = 10000 * RANDOM_MULTIPLIER;
|
private static final int noDocs = (TEST_NIGHTLY ? 10000 : 5000) * RANDOM_MULTIPLIER;
|
||||||
|
|
||||||
private static Directory directory = null;
|
private static Directory directory = null;
|
||||||
private static IndexReader reader = null;
|
private static IndexReader reader = null;
|
||||||
|
|
|
@ -24,6 +24,8 @@ import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.RandomIndexWriter;
|
import org.apache.lucene.index.RandomIndexWriter;
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
|
import org.junit.AfterClass;
|
||||||
|
import org.junit.BeforeClass;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* https://issues.apache.org/jira/browse/LUCENE-1974
|
* https://issues.apache.org/jira/browse/LUCENE-1974
|
||||||
|
@ -38,53 +40,48 @@ import org.apache.lucene.store.Directory;
|
||||||
public class TestPrefixInBooleanQuery extends LuceneTestCase {
|
public class TestPrefixInBooleanQuery extends LuceneTestCase {
|
||||||
|
|
||||||
private static final String FIELD = "name";
|
private static final String FIELD = "name";
|
||||||
private Directory directory;
|
private static Directory directory;
|
||||||
private IndexReader reader;
|
private static IndexReader reader;
|
||||||
private IndexSearcher searcher;
|
private static IndexSearcher searcher;
|
||||||
|
|
||||||
@Override
|
@BeforeClass
|
||||||
public void setUp() throws Exception {
|
public static void beforeClass() throws Exception {
|
||||||
super.setUp();
|
|
||||||
directory = newDirectory();
|
directory = newDirectory();
|
||||||
RandomIndexWriter writer = new RandomIndexWriter(random, directory);
|
RandomIndexWriter writer = new RandomIndexWriter(random, directory);
|
||||||
|
|
||||||
|
Document doc = new Document();
|
||||||
|
Field field = newField(FIELD, "meaninglessnames", Field.Store.NO,
|
||||||
|
Field.Index.NOT_ANALYZED_NO_NORMS);
|
||||||
|
doc.add(field);
|
||||||
|
|
||||||
for (int i = 0; i < 5137; ++i) {
|
for (int i = 0; i < 5137; ++i) {
|
||||||
Document doc = new Document();
|
|
||||||
doc.add(newField(FIELD, "meaninglessnames", Field.Store.YES,
|
|
||||||
Field.Index.NOT_ANALYZED));
|
|
||||||
writer.addDocument(doc);
|
|
||||||
}
|
|
||||||
{
|
|
||||||
Document doc = new Document();
|
|
||||||
doc.add(newField(FIELD, "tangfulin", Field.Store.YES,
|
|
||||||
Field.Index.NOT_ANALYZED));
|
|
||||||
writer.addDocument(doc);
|
writer.addDocument(doc);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
field.setValue("tangfulin");
|
||||||
|
writer.addDocument(doc);
|
||||||
|
|
||||||
|
field.setValue("meaninglessnames");
|
||||||
for (int i = 5138; i < 11377; ++i) {
|
for (int i = 5138; i < 11377; ++i) {
|
||||||
Document doc = new Document();
|
|
||||||
doc.add(newField(FIELD, "meaninglessnames", Field.Store.YES,
|
|
||||||
Field.Index.NOT_ANALYZED));
|
|
||||||
writer.addDocument(doc);
|
|
||||||
}
|
|
||||||
{
|
|
||||||
Document doc = new Document();
|
|
||||||
doc.add(newField(FIELD, "tangfulin", Field.Store.YES,
|
|
||||||
Field.Index.NOT_ANALYZED));
|
|
||||||
writer.addDocument(doc);
|
writer.addDocument(doc);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
field.setValue("tangfulin");
|
||||||
|
writer.addDocument(doc);
|
||||||
|
|
||||||
reader = writer.getReader();
|
reader = writer.getReader();
|
||||||
searcher = newSearcher(reader);
|
searcher = newSearcher(reader);
|
||||||
writer.close();
|
writer.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@AfterClass
|
||||||
public void tearDown() throws Exception {
|
public static void afterClass() throws Exception {
|
||||||
searcher.close();
|
searcher.close();
|
||||||
|
searcher = null;
|
||||||
reader.close();
|
reader.close();
|
||||||
|
reader = null;
|
||||||
directory.close();
|
directory.close();
|
||||||
super.tearDown();
|
directory = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testPrefixQuery() throws Exception {
|
public void testPrefixQuery() throws Exception {
|
||||||
|
|
|
@ -126,7 +126,8 @@ public class TestArrayUtil extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testQuickSort() {
|
public void testQuickSort() {
|
||||||
for (int i = 0, c = 500 * RANDOM_MULTIPLIER; i < c; i++) {
|
int num = (TEST_NIGHTLY ? 500 : 50) * RANDOM_MULTIPLIER;
|
||||||
|
for (int i = 0; i < num; i++) {
|
||||||
Integer[] a1 = createRandomArray(1000), a2 = a1.clone();
|
Integer[] a1 = createRandomArray(1000), a2 = a1.clone();
|
||||||
ArrayUtil.quickSort(a1);
|
ArrayUtil.quickSort(a1);
|
||||||
Arrays.sort(a2);
|
Arrays.sort(a2);
|
||||||
|
@ -154,7 +155,8 @@ public class TestArrayUtil extends LuceneTestCase {
|
||||||
|
|
||||||
// This is a test for LUCENE-3054 (which fails without the merge sort fall back with stack overflow in most cases)
|
// This is a test for LUCENE-3054 (which fails without the merge sort fall back with stack overflow in most cases)
|
||||||
public void testQuickToMergeSortFallback() {
|
public void testQuickToMergeSortFallback() {
|
||||||
for (int i = 0, c = 500 * RANDOM_MULTIPLIER; i < c; i++) {
|
int num = (TEST_NIGHTLY ? 500 : 50) * RANDOM_MULTIPLIER;
|
||||||
|
for (int i = 0; i < num; i++) {
|
||||||
Integer[] a1 = createSparseRandomArray(40000), a2 = a1.clone();
|
Integer[] a1 = createSparseRandomArray(40000), a2 = a1.clone();
|
||||||
ArrayUtil.quickSort(a1);
|
ArrayUtil.quickSort(a1);
|
||||||
Arrays.sort(a2);
|
Arrays.sort(a2);
|
||||||
|
@ -163,7 +165,8 @@ public class TestArrayUtil extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testMergeSort() {
|
public void testMergeSort() {
|
||||||
for (int i = 0, c = 500 * RANDOM_MULTIPLIER; i < c; i++) {
|
int num = (TEST_NIGHTLY ? 500 : 50) * RANDOM_MULTIPLIER;
|
||||||
|
for (int i = 0; i < num; i++) {
|
||||||
Integer[] a1 = createRandomArray(1000), a2 = a1.clone();
|
Integer[] a1 = createRandomArray(1000), a2 = a1.clone();
|
||||||
ArrayUtil.mergeSort(a1);
|
ArrayUtil.mergeSort(a1);
|
||||||
Arrays.sort(a2);
|
Arrays.sort(a2);
|
||||||
|
|
|
@ -13,7 +13,7 @@ body, h1, h2, h3, h4, h5, h6, a, button, input, select, option, textarea, th, td
|
||||||
|
|
||||||
body
|
body
|
||||||
{
|
{
|
||||||
padding: 20px;
|
padding: 30px;
|
||||||
text-align: center;
|
text-align: center;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -62,7 +62,6 @@ ul
|
||||||
margin: 0 auto;
|
margin: 0 auto;
|
||||||
margin-bottom: 30px;
|
margin-bottom: 30px;
|
||||||
text-align: left;
|
text-align: left;
|
||||||
width: 1002px;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#header
|
#header
|
||||||
|
@ -109,9 +108,6 @@ ul
|
||||||
|
|
||||||
#main
|
#main
|
||||||
{
|
{
|
||||||
background-image: url( ../img/div.gif );
|
|
||||||
background-position: 200px 0;
|
|
||||||
background-repeat: repeat-y;
|
|
||||||
border: 1px solid #c0c0c0;
|
border: 1px solid #c0c0c0;
|
||||||
min-height: 600px;
|
min-height: 600px;
|
||||||
position: relative;
|
position: relative;
|
||||||
|
@ -186,7 +182,7 @@ ul
|
||||||
#menu-wrapper
|
#menu-wrapper
|
||||||
{
|
{
|
||||||
float: left;
|
float: left;
|
||||||
width: 201px;
|
width: 20%;
|
||||||
}
|
}
|
||||||
|
|
||||||
#menu p.loader
|
#menu p.loader
|
||||||
|
@ -285,7 +281,11 @@ ul
|
||||||
#menu li
|
#menu li
|
||||||
{
|
{
|
||||||
border-bottom: 1px solid #c0c0c0;
|
border-bottom: 1px solid #c0c0c0;
|
||||||
/*border-right: 1px solid #f0f0f0;*/
|
}
|
||||||
|
|
||||||
|
#menu li p
|
||||||
|
{
|
||||||
|
border-right: 1px solid #c0c0c0;
|
||||||
}
|
}
|
||||||
|
|
||||||
#menu li.optional
|
#menu li.optional
|
||||||
|
@ -293,13 +293,16 @@ ul
|
||||||
display: none;
|
display: none;
|
||||||
}
|
}
|
||||||
|
|
||||||
#menu li:last-child
|
#menu .singlecore li:last-child
|
||||||
{
|
{
|
||||||
border-bottom: 0;
|
border-bottom: 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
#menu ul ul
|
#menu ul ul
|
||||||
{
|
{
|
||||||
|
background-image: url( ../img/div.gif );
|
||||||
|
background-position: 100% 0;
|
||||||
|
background-repeat: repeat-y;
|
||||||
display: none;
|
display: none;
|
||||||
padding-top: 5px;
|
padding-top: 5px;
|
||||||
padding-bottom: 10px;
|
padding-bottom: 10px;
|
||||||
|
@ -307,6 +310,7 @@ ul
|
||||||
|
|
||||||
#menu ul .active ul
|
#menu ul .active ul
|
||||||
{
|
{
|
||||||
|
border-bottom: 1px solid #f0f0f0;
|
||||||
display: block;
|
display: block;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -405,7 +409,7 @@ ul
|
||||||
#content-wrapper
|
#content-wrapper
|
||||||
{
|
{
|
||||||
float: right;
|
float: right;
|
||||||
width: 799px;
|
width: 80%;
|
||||||
}
|
}
|
||||||
|
|
||||||
#content
|
#content
|
||||||
|
@ -865,14 +869,14 @@ ul
|
||||||
#content #query
|
#content #query
|
||||||
{
|
{
|
||||||
background-image: url( ../img/div.gif );
|
background-image: url( ../img/div.gif );
|
||||||
background-position: 210px 0;
|
background-position: 22% 0;
|
||||||
background-repeat: repeat-y;
|
background-repeat: repeat-y;
|
||||||
}
|
}
|
||||||
|
|
||||||
#content #query #form
|
#content #query #form
|
||||||
{
|
{
|
||||||
float: left;
|
float: left;
|
||||||
width: 200px;
|
width: 21%;
|
||||||
}
|
}
|
||||||
|
|
||||||
#content #query #form label
|
#content #query #form label
|
||||||
|
@ -960,7 +964,7 @@ ul
|
||||||
{
|
{
|
||||||
display: none;
|
display: none;
|
||||||
float: right;
|
float: right;
|
||||||
width: 560px;
|
width: 77%;
|
||||||
}
|
}
|
||||||
|
|
||||||
#content #query #result #url
|
#content #query #result #url
|
||||||
|
@ -2439,20 +2443,20 @@ ul
|
||||||
#content #replication
|
#content #replication
|
||||||
{
|
{
|
||||||
background-image: url( ../img/div.gif );
|
background-image: url( ../img/div.gif );
|
||||||
background-position: 165px 0;
|
background-position: 21% 0;
|
||||||
background-repeat: repeat-y;
|
background-repeat: repeat-y;
|
||||||
}
|
}
|
||||||
|
|
||||||
#content #replication #frame
|
#content #replication #frame
|
||||||
{
|
{
|
||||||
float: right;
|
float: right;
|
||||||
width: 600px;
|
width: 78%;
|
||||||
}
|
}
|
||||||
|
|
||||||
#content #replication #navigation
|
#content #replication #navigation
|
||||||
{
|
{
|
||||||
float: left;
|
float: left;
|
||||||
width: 161px;
|
width: 20%;
|
||||||
}
|
}
|
||||||
|
|
||||||
#content #replication #error
|
#content #replication #error
|
||||||
|
|
|
@ -4158,9 +4158,17 @@ var solr_admin = function( app_config )
|
||||||
|
|
||||||
if( is_multicore )
|
if( is_multicore )
|
||||||
{
|
{
|
||||||
|
menu_element
|
||||||
|
.addClass( 'multicore' );
|
||||||
|
|
||||||
$( '#cores', menu_element )
|
$( '#cores', menu_element )
|
||||||
.show();
|
.show();
|
||||||
}
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
menu_element
|
||||||
|
.addClass( 'singlecore' );
|
||||||
|
}
|
||||||
|
|
||||||
for( var core_name in response.status )
|
for( var core_name in response.status )
|
||||||
{
|
{
|
||||||
|
|
Loading…
Reference in New Issue