mirror of https://github.com/apache/lucene.git
LUCENE-5199: Improve LuceneTestCase.defaultCodecSupportsDocsWithField to check the actual DocValuesFormat used per-field
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1519685 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
b352735c50
commit
fdee65fd72
|
@ -327,7 +327,7 @@ public class TestMultiDocValues extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testDocsWithField() throws Exception {
|
||||
assumeTrue("codec does not support docsWithField", defaultCodecSupportsDocsWithField());
|
||||
assumeTrue("codec does not support docsWithField", defaultCodecSupportsDocsWithField("numbers"));
|
||||
Directory dir = newDirectory();
|
||||
|
||||
IndexWriterConfig iwc = newIndexWriterConfig(random(), TEST_VERSION_CURRENT, null);
|
||||
|
|
|
@ -635,7 +635,7 @@ public class TestRangeAccumulator extends FacetTestCase {
|
|||
|
||||
// LUCENE-5178
|
||||
public void testMissingValues() throws Exception {
|
||||
assumeTrue("codec does not support docsWithField", defaultCodecSupportsDocsWithField());
|
||||
assumeTrue("codec does not support docsWithField", defaultCodecSupportsDocsWithField("field"));
|
||||
Directory d = newDirectory();
|
||||
RandomIndexWriter w = new RandomIndexWriter(random(), d);
|
||||
Document doc = new Document();
|
||||
|
|
|
@ -674,7 +674,7 @@ public abstract class BaseDocValuesFormatTestCase extends LuceneTestCase {
|
|||
iwriter.close();
|
||||
|
||||
SortedDocValues dv = getOnlySegmentReader(ireader).getSortedDocValues("field");
|
||||
if (defaultCodecSupportsDocsWithField()) {
|
||||
if (defaultCodecSupportsDocsWithField("field")) {
|
||||
assertEquals(-1, dv.getOrd(0));
|
||||
assertEquals(0, dv.getValueCount());
|
||||
} else {
|
||||
|
@ -734,7 +734,7 @@ public abstract class BaseDocValuesFormatTestCase extends LuceneTestCase {
|
|||
BytesRef scratch = new BytesRef();
|
||||
dv.lookupOrd(dv.getOrd(0), scratch);
|
||||
assertEquals(new BytesRef("hello world 2"), scratch);
|
||||
if (defaultCodecSupportsDocsWithField()) {
|
||||
if (defaultCodecSupportsDocsWithField("dv")) {
|
||||
assertEquals(-1, dv.getOrd(1));
|
||||
}
|
||||
dv.get(1, scratch);
|
||||
|
@ -1092,7 +1092,8 @@ public abstract class BaseDocValuesFormatTestCase extends LuceneTestCase {
|
|||
public void testRandomSortedBytes() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriterConfig cfg = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
|
||||
if (!defaultCodecSupportsDocsWithField()) {
|
||||
final boolean defaultCodecSupportsDocsWithField = defaultCodecSupportsDocsWithField("field");
|
||||
if (!defaultCodecSupportsDocsWithField) {
|
||||
// if the codec doesnt support missing, we expect missing to be mapped to byte[]
|
||||
// by the impersonator, but we have to give it a chance to merge them to this
|
||||
cfg.setMergePolicy(newLogMergePolicy());
|
||||
|
@ -1121,14 +1122,14 @@ public abstract class BaseDocValuesFormatTestCase extends LuceneTestCase {
|
|||
doc.add(newTextField("id", "noValue", Field.Store.YES));
|
||||
w.addDocument(doc);
|
||||
}
|
||||
if (!defaultCodecSupportsDocsWithField()) {
|
||||
if (!defaultCodecSupportsDocsWithField) {
|
||||
BytesRef bytesRef = new BytesRef();
|
||||
hash.add(bytesRef); // add empty value for the gaps
|
||||
}
|
||||
if (rarely()) {
|
||||
w.commit();
|
||||
}
|
||||
if (!defaultCodecSupportsDocsWithField()) {
|
||||
if (!defaultCodecSupportsDocsWithField) {
|
||||
// if the codec doesnt support missing, we expect missing to be mapped to byte[]
|
||||
// by the impersonator, but we have to give it a chance to merge them to this
|
||||
w.forceMerge(1);
|
||||
|
@ -1253,7 +1254,7 @@ public abstract class BaseDocValuesFormatTestCase extends LuceneTestCase {
|
|||
}
|
||||
|
||||
private void doTestMissingVsFieldCache(LongProducer longs) throws Exception {
|
||||
assumeTrue("Codec does not support getDocsWithField", defaultCodecSupportsDocsWithField());
|
||||
assumeTrue("Codec does not support getDocsWithField", defaultCodecSupportsDocsWithField("indexed", "dv"));
|
||||
Directory dir = newDirectory();
|
||||
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, conf);
|
||||
|
@ -2356,7 +2357,7 @@ public abstract class BaseDocValuesFormatTestCase extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testTwoNumbersOneMissing() throws IOException {
|
||||
assumeTrue("Codec does not support getDocsWithField", defaultCodecSupportsDocsWithField());
|
||||
assumeTrue("Codec does not support getDocsWithField", defaultCodecSupportsDocsWithField("dv1"));
|
||||
Directory directory = newDirectory();
|
||||
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, null);
|
||||
conf.setMergePolicy(newLogMergePolicy());
|
||||
|
@ -2385,7 +2386,7 @@ public abstract class BaseDocValuesFormatTestCase extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testTwoNumbersOneMissingWithMerging() throws IOException {
|
||||
assumeTrue("Codec does not support getDocsWithField", defaultCodecSupportsDocsWithField());
|
||||
assumeTrue("Codec does not support getDocsWithField", defaultCodecSupportsDocsWithField("dv1"));
|
||||
Directory directory = newDirectory();
|
||||
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, null);
|
||||
conf.setMergePolicy(newLogMergePolicy());
|
||||
|
@ -2415,7 +2416,7 @@ public abstract class BaseDocValuesFormatTestCase extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testThreeNumbersOneMissingWithMerging() throws IOException {
|
||||
assumeTrue("Codec does not support getDocsWithField", defaultCodecSupportsDocsWithField());
|
||||
assumeTrue("Codec does not support getDocsWithField", defaultCodecSupportsDocsWithField("dv1"));
|
||||
Directory directory = newDirectory();
|
||||
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, null);
|
||||
conf.setMergePolicy(newLogMergePolicy());
|
||||
|
@ -2451,7 +2452,7 @@ public abstract class BaseDocValuesFormatTestCase extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testTwoBytesOneMissing() throws IOException {
|
||||
assumeTrue("Codec does not support getDocsWithField", defaultCodecSupportsDocsWithField());
|
||||
assumeTrue("Codec does not support getDocsWithField", defaultCodecSupportsDocsWithField("dv1"));
|
||||
Directory directory = newDirectory();
|
||||
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, null);
|
||||
conf.setMergePolicy(newLogMergePolicy());
|
||||
|
@ -2483,7 +2484,7 @@ public abstract class BaseDocValuesFormatTestCase extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testTwoBytesOneMissingWithMerging() throws IOException {
|
||||
assumeTrue("Codec does not support getDocsWithField", defaultCodecSupportsDocsWithField());
|
||||
assumeTrue("Codec does not support getDocsWithField", defaultCodecSupportsDocsWithField("dv1"));
|
||||
Directory directory = newDirectory();
|
||||
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, null);
|
||||
conf.setMergePolicy(newLogMergePolicy());
|
||||
|
@ -2516,7 +2517,7 @@ public abstract class BaseDocValuesFormatTestCase extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testThreeBytesOneMissingWithMerging() throws IOException {
|
||||
assumeTrue("Codec does not support getDocsWithField", defaultCodecSupportsDocsWithField());
|
||||
assumeTrue("Codec does not support getDocsWithField", defaultCodecSupportsDocsWithField("dv1"));
|
||||
Directory directory = newDirectory();
|
||||
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, null);
|
||||
conf.setMergePolicy(newLogMergePolicy());
|
||||
|
@ -2813,7 +2814,7 @@ public abstract class BaseDocValuesFormatTestCase extends LuceneTestCase {
|
|||
|
||||
/** Tests dv against stored fields with threads (all types + missing) */
|
||||
public void testThreads2() throws Exception {
|
||||
assumeTrue("Codec does not support getDocsWithField", defaultCodecSupportsDocsWithField());
|
||||
assumeTrue("Codec does not support getDocsWithField", defaultCodecSupportsDocsWithField("dvBin", "dvSorted", "dvNum", "dvSortedSet"));
|
||||
assumeTrue("Codec does not support SORTED_SET", defaultCodecSupportsSortedSet());
|
||||
Directory dir = newDirectory();
|
||||
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
|
||||
|
|
|
@ -29,6 +29,7 @@ import java.util.logging.Logger;
|
|||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.codecs.Codec;
|
||||
import org.apache.lucene.codecs.DocValuesFormat;
|
||||
import org.apache.lucene.document.Field.Store;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
|
@ -1375,11 +1376,19 @@ public abstract class LuceneTestCase extends Assert {
|
|||
|
||||
/** Returns true if the codec "supports" docsWithField
|
||||
* (other codecs return MatchAllBits, because you couldnt write missing values before) */
|
||||
public static boolean defaultCodecSupportsDocsWithField() {
|
||||
public static boolean defaultCodecSupportsDocsWithField(String... fields) {
|
||||
String name = Codec.getDefault().getName();
|
||||
if (name.equals("Lucene40") || name.equals("Lucene41") || name.equals("Lucene42")) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// check that the actual DocValuesFormat for each field supports docsWithField
|
||||
for (String field : fields) {
|
||||
String format = _TestUtil.getDocValuesFormat(field);
|
||||
if (format.equals("Lucene40") || format.equals("Lucene41") || format.equals("Lucene42")) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue