valuesRef = new HoldsOneThing<>();
- Uninvert u = new Uninvert() {
+ Uninvert u = new Uninvert(parser instanceof PointParser) {
private long minValue;
private long currentValue;
private GrowableWriter values;
@@ -542,7 +678,7 @@ class FieldCacheImpl implements FieldCache {
u.uninvert(reader, key.field, setDocsWithField);
if (setDocsWithField) {
- wrapper.setDocsWithField(reader, key.field, u.docsWithField);
+ wrapper.setDocsWithField(reader, key.field, u.docsWithField, parser);
}
GrowableWriterAndMinValue values = valuesRef.get();
if (values == null) {
@@ -872,7 +1008,7 @@ class FieldCacheImpl implements FieldCache {
public int length() {
return maxDoc;
}
- });
+ }, null);
}
// maybe an int-only impl?
return new BinaryDocValuesImpl(bytes.freeze(true), offsetReader);
diff --git a/lucene/misc/src/java/org/apache/lucene/uninverting/UninvertingReader.java b/lucene/misc/src/java/org/apache/lucene/uninverting/UninvertingReader.java
index 9f96b4f7744..10d1a5b9b3c 100644
--- a/lucene/misc/src/java/org/apache/lucene/uninverting/UninvertingReader.java
+++ b/lucene/misc/src/java/org/apache/lucene/uninverting/UninvertingReader.java
@@ -58,33 +58,69 @@ public class UninvertingReader extends FilterLeafReader {
*/
public static enum Type {
/**
- * Single-valued Integer, (e.g. indexed with {@link org.apache.lucene.document.LegacyIntField})
+ * Single-valued Integer, (e.g. indexed with {@link org.apache.lucene.document.IntPoint})
*
* Fields with this type act as if they were indexed with
* {@link NumericDocValuesField}.
*/
- INTEGER,
+ INTEGER_POINT,
+ /**
+ * Single-valued Integer, (e.g. indexed with {@link org.apache.lucene.document.LongPoint})
+ *
+ * Fields with this type act as if they were indexed with
+ * {@link NumericDocValuesField}.
+ */
+ LONG_POINT,
+ /**
+ * Single-valued Integer, (e.g. indexed with {@link org.apache.lucene.document.FloatPoint})
+ *
+ * Fields with this type act as if they were indexed with
+ * {@link NumericDocValuesField}.
+ */
+ FLOAT_POINT,
+ /**
+ * Single-valued Integer, (e.g. indexed with {@link org.apache.lucene.document.DoublePoint})
+ *
+ * Fields with this type act as if they were indexed with
+ * {@link NumericDocValuesField}.
+ */
+ DOUBLE_POINT,
+ /**
+ * Single-valued Integer, (e.g. indexed with {@link org.apache.lucene.document.LegacyIntField})
+ *
+ * Fields with this type act as if they were indexed with
+ * {@link NumericDocValuesField}.
+ * @deprecated Index with points and use {@link #INTEGER_POINT} instead.
+ */
+ @Deprecated
+ LEGACY_INTEGER,
/**
* Single-valued Long, (e.g. indexed with {@link org.apache.lucene.document.LegacyLongField})
*
* Fields with this type act as if they were indexed with
* {@link NumericDocValuesField}.
+ * @deprecated Index with points and use {@link #LONG_POINT} instead.
*/
- LONG,
+ @Deprecated
+ LEGACY_LONG,
/**
* Single-valued Float, (e.g. indexed with {@link org.apache.lucene.document.LegacyFloatField})
*
* Fields with this type act as if they were indexed with
* {@link NumericDocValuesField}.
+ * @deprecated Index with points and use {@link #FLOAT_POINT} instead.
*/
- FLOAT,
+ @Deprecated
+ LEGACY_FLOAT,
/**
* Single-valued Double, (e.g. indexed with {@link org.apache.lucene.document.LegacyDoubleField})
*
* Fields with this type act as if they were indexed with
* {@link NumericDocValuesField}.
+ * @deprecated Index with points and use {@link #DOUBLE_POINT} instead.
*/
- DOUBLE,
+ @Deprecated
+ LEGACY_DOUBLE,
/**
* Single-valued Binary, (e.g. indexed with {@link StringField})
*
@@ -181,14 +217,29 @@ public class UninvertingReader extends FilterLeafReader {
ArrayList filteredInfos = new ArrayList<>();
for (FieldInfo fi : in.getFieldInfos()) {
DocValuesType type = fi.getDocValuesType();
- if (fi.getIndexOptions() != IndexOptions.NONE && fi.getDocValuesType() == DocValuesType.NONE) {
+ if (type == DocValuesType.NONE) {
Type t = mapping.get(fi.name);
if (t != null) {
+ if (t == Type.INTEGER_POINT || t == Type.LONG_POINT || t == Type.FLOAT_POINT || t == Type.DOUBLE_POINT) {
+ // type uses points
+ if (fi.getPointDimensionCount() == 0) {
+ continue;
+ }
+ } else {
+ // type uses inverted index
+ if (fi.getIndexOptions() == IndexOptions.NONE) {
+ continue;
+ }
+ }
switch(t) {
- case INTEGER:
- case LONG:
- case FLOAT:
- case DOUBLE:
+ case INTEGER_POINT:
+ case LONG_POINT:
+ case FLOAT_POINT:
+ case DOUBLE_POINT:
+ case LEGACY_INTEGER:
+ case LEGACY_LONG:
+ case LEGACY_FLOAT:
+ case LEGACY_DOUBLE:
type = DocValuesType.NUMERIC;
break;
case BINARY:
@@ -226,10 +277,14 @@ public class UninvertingReader extends FilterLeafReader {
Type v = getType(field);
if (v != null) {
switch (v) {
- case INTEGER: return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.NUMERIC_UTILS_INT_PARSER, true);
- case FLOAT: return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.NUMERIC_UTILS_FLOAT_PARSER, true);
- case LONG: return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.NUMERIC_UTILS_LONG_PARSER, true);
- case DOUBLE: return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, true);
+ case INTEGER_POINT: return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.INT_POINT_PARSER, true);
+ case FLOAT_POINT: return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.FLOAT_POINT_PARSER, true);
+ case LONG_POINT: return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.LONG_POINT_PARSER, true);
+ case DOUBLE_POINT: return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.DOUBLE_POINT_PARSER, true);
+ case LEGACY_INTEGER: return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.LEGACY_INT_PARSER, true);
+ case LEGACY_FLOAT: return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.LEGACY_FLOAT_PARSER, true);
+ case LEGACY_LONG: return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.LEGACY_LONG_PARSER, true);
+ case LEGACY_DOUBLE: return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.LEGACY_DOUBLE_PARSER, true);
}
}
return super.getNumericDocValues(field);
@@ -275,8 +330,20 @@ public class UninvertingReader extends FilterLeafReader {
@Override
public Bits getDocsWithField(String field) throws IOException {
- if (getType(field) != null) {
- return FieldCache.DEFAULT.getDocsWithField(in, field);
+ Type v = getType(field);
+ if (v != null) {
+ switch (v) {
+ case INTEGER_POINT: return FieldCache.DEFAULT.getDocsWithField(in, field, FieldCache.INT_POINT_PARSER);
+ case FLOAT_POINT: return FieldCache.DEFAULT.getDocsWithField(in, field, FieldCache.FLOAT_POINT_PARSER);
+ case LONG_POINT: return FieldCache.DEFAULT.getDocsWithField(in, field, FieldCache.LONG_POINT_PARSER);
+ case DOUBLE_POINT: return FieldCache.DEFAULT.getDocsWithField(in, field, FieldCache.DOUBLE_POINT_PARSER);
+ case LEGACY_INTEGER: return FieldCache.DEFAULT.getDocsWithField(in, field, FieldCache.LEGACY_INT_PARSER);
+ case LEGACY_FLOAT: return FieldCache.DEFAULT.getDocsWithField(in, field, FieldCache.LEGACY_FLOAT_PARSER);
+ case LEGACY_LONG: return FieldCache.DEFAULT.getDocsWithField(in, field, FieldCache.LEGACY_LONG_PARSER);
+ case LEGACY_DOUBLE: return FieldCache.DEFAULT.getDocsWithField(in, field, FieldCache.LEGACY_DOUBLE_PARSER);
+ default:
+ return FieldCache.DEFAULT.getDocsWithField(in, field, null);
+ }
} else {
return in.getDocsWithField(field);
}
diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestDocTermOrds.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestDocTermOrds.java
index 8c1fae7b646..4861cd35ca2 100644
--- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestDocTermOrds.java
+++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestDocTermOrds.java
@@ -353,7 +353,7 @@ public class TestDocTermOrds extends LuceneTestCase {
TestUtil.nextInt(random(), 2, 10));
- final NumericDocValues docIDToID = FieldCache.DEFAULT.getNumerics(r, "id", FieldCache.NUMERIC_UTILS_INT_PARSER, false);
+ final NumericDocValues docIDToID = FieldCache.DEFAULT.getNumerics(r, "id", FieldCache.LEGACY_INT_PARSER, false);
/*
for(int docID=0;docID {
- FieldCache.DEFAULT.getNumerics(ar, "binary", FieldCache.NUMERIC_UTILS_INT_PARSER, false);
+ FieldCache.DEFAULT.getNumerics(ar, "binary", FieldCache.INT_POINT_PARSER, false);
});
BinaryDocValues binary = FieldCache.DEFAULT.getTerms(ar, "binary", true);
@@ -460,12 +442,12 @@ public class TestFieldCache extends LuceneTestCase {
new DocTermOrds(ar, null, "binary");
});
- Bits bits = FieldCache.DEFAULT.getDocsWithField(ar, "binary");
+ Bits bits = FieldCache.DEFAULT.getDocsWithField(ar, "binary", null);
assertTrue(bits.get(0));
// Sorted type: can be retrieved via getTerms(), getTermsIndex(), getDocTermOrds()
expectThrows(IllegalStateException.class, () -> {
- FieldCache.DEFAULT.getNumerics(ar, "sorted", FieldCache.NUMERIC_UTILS_INT_PARSER, false);
+ FieldCache.DEFAULT.getNumerics(ar, "sorted", FieldCache.INT_POINT_PARSER, false);
});
expectThrows(IllegalStateException.class, () -> {
@@ -488,11 +470,11 @@ public class TestFieldCache extends LuceneTestCase {
assertEquals(SortedSetDocValues.NO_MORE_ORDS, sortedSet.nextOrd());
assertEquals(1, sortedSet.getValueCount());
- bits = FieldCache.DEFAULT.getDocsWithField(ar, "sorted");
+ bits = FieldCache.DEFAULT.getDocsWithField(ar, "sorted", null);
assertTrue(bits.get(0));
// Numeric type: can be retrieved via getInts() and so on
- NumericDocValues numeric = FieldCache.DEFAULT.getNumerics(ar, "numeric", FieldCache.NUMERIC_UTILS_INT_PARSER, false);
+ NumericDocValues numeric = FieldCache.DEFAULT.getNumerics(ar, "numeric", FieldCache.INT_POINT_PARSER, false);
assertEquals(42, numeric.get(0));
expectThrows(IllegalStateException.class, () -> {
@@ -511,12 +493,12 @@ public class TestFieldCache extends LuceneTestCase {
new DocTermOrds(ar, null, "numeric");
});
- bits = FieldCache.DEFAULT.getDocsWithField(ar, "numeric");
+ bits = FieldCache.DEFAULT.getDocsWithField(ar, "numeric", null);
assertTrue(bits.get(0));
// SortedSet type: can be retrieved via getDocTermOrds()
expectThrows(IllegalStateException.class, () -> {
- FieldCache.DEFAULT.getNumerics(ar, "sortedset", FieldCache.NUMERIC_UTILS_INT_PARSER, false);
+ FieldCache.DEFAULT.getNumerics(ar, "sortedset", FieldCache.INT_POINT_PARSER, false);
});
expectThrows(IllegalStateException.class, () -> {
@@ -538,7 +520,7 @@ public class TestFieldCache extends LuceneTestCase {
assertEquals(SortedSetDocValues.NO_MORE_ORDS, sortedSet.nextOrd());
assertEquals(2, sortedSet.getValueCount());
- bits = FieldCache.DEFAULT.getDocsWithField(ar, "sortedset");
+ bits = FieldCache.DEFAULT.getDocsWithField(ar, "sortedset", null);
assertTrue(bits.get(0));
ir.close();
@@ -559,16 +541,16 @@ public class TestFieldCache extends LuceneTestCase {
cache.purgeAllCaches();
assertEquals(0, cache.getCacheEntries().length);
- NumericDocValues ints = cache.getNumerics(ar, "bogusints", FieldCache.NUMERIC_UTILS_INT_PARSER, true);
+ NumericDocValues ints = cache.getNumerics(ar, "bogusints", FieldCache.INT_POINT_PARSER, true);
assertEquals(0, ints.get(0));
- NumericDocValues longs = cache.getNumerics(ar, "boguslongs", FieldCache.NUMERIC_UTILS_LONG_PARSER, true);
+ NumericDocValues longs = cache.getNumerics(ar, "boguslongs", FieldCache.LONG_POINT_PARSER, true);
assertEquals(0, longs.get(0));
- NumericDocValues floats = cache.getNumerics(ar, "bogusfloats", FieldCache.NUMERIC_UTILS_FLOAT_PARSER, true);
+ NumericDocValues floats = cache.getNumerics(ar, "bogusfloats", FieldCache.FLOAT_POINT_PARSER, true);
assertEquals(0, floats.get(0));
- NumericDocValues doubles = cache.getNumerics(ar, "bogusdoubles", FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, true);
+ NumericDocValues doubles = cache.getNumerics(ar, "bogusdoubles", FieldCache.DOUBLE_POINT_PARSER, true);
assertEquals(0, doubles.get(0));
BinaryDocValues binaries = cache.getTerms(ar, "bogusterms", true);
@@ -584,7 +566,7 @@ public class TestFieldCache extends LuceneTestCase {
sortedSet.setDocument(0);
assertEquals(SortedSetDocValues.NO_MORE_ORDS, sortedSet.nextOrd());
- Bits bits = cache.getDocsWithField(ar, "bogusbits");
+ Bits bits = cache.getDocsWithField(ar, "bogusbits", null);
assertFalse(bits.get(0));
// check that we cached nothing
@@ -617,16 +599,16 @@ public class TestFieldCache extends LuceneTestCase {
cache.purgeAllCaches();
assertEquals(0, cache.getCacheEntries().length);
- NumericDocValues ints = cache.getNumerics(ar, "bogusints", FieldCache.NUMERIC_UTILS_INT_PARSER, true);
+ NumericDocValues ints = cache.getNumerics(ar, "bogusints", FieldCache.INT_POINT_PARSER, true);
assertEquals(0, ints.get(0));
- NumericDocValues longs = cache.getNumerics(ar, "boguslongs", FieldCache.NUMERIC_UTILS_LONG_PARSER, true);
+ NumericDocValues longs = cache.getNumerics(ar, "boguslongs", FieldCache.LONG_POINT_PARSER, true);
assertEquals(0, longs.get(0));
- NumericDocValues floats = cache.getNumerics(ar, "bogusfloats", FieldCache.NUMERIC_UTILS_FLOAT_PARSER, true);
+ NumericDocValues floats = cache.getNumerics(ar, "bogusfloats", FieldCache.FLOAT_POINT_PARSER, true);
assertEquals(0, floats.get(0));
- NumericDocValues doubles = cache.getNumerics(ar, "bogusdoubles", FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, true);
+ NumericDocValues doubles = cache.getNumerics(ar, "bogusdoubles", FieldCache.DOUBLE_POINT_PARSER, true);
assertEquals(0, doubles.get(0));
BinaryDocValues binaries = cache.getTerms(ar, "bogusterms", true);
@@ -642,7 +624,7 @@ public class TestFieldCache extends LuceneTestCase {
sortedSet.setDocument(0);
assertEquals(SortedSetDocValues.NO_MORE_ORDS, sortedSet.nextOrd());
- Bits bits = cache.getDocsWithField(ar, "bogusbits");
+ Bits bits = cache.getDocsWithField(ar, "bogusbits", null);
assertFalse(bits.get(0));
// check that we cached nothing
@@ -658,8 +640,10 @@ public class TestFieldCache extends LuceneTestCase {
cfg.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, cfg);
Document doc = new Document();
- LegacyLongField field = new LegacyLongField("f", 0L, Store.YES);
+ LongPoint field = new LongPoint("f", 0L);
+ StoredField field2 = new StoredField("f", 0L);
doc.add(field);
+ doc.add(field2);
final long[] values = new long[TestUtil.nextInt(random(), 1, 10)];
for (int i = 0; i < values.length; ++i) {
final long v;
@@ -683,12 +667,13 @@ public class TestFieldCache extends LuceneTestCase {
iw.addDocument(new Document());
} else {
field.setLongValue(v);
+ field2.setLongValue(v);
iw.addDocument(doc);
}
}
iw.forceMerge(1);
final DirectoryReader reader = iw.getReader();
- final NumericDocValues longs = FieldCache.DEFAULT.getNumerics(getOnlySegmentReader(reader), "f", FieldCache.NUMERIC_UTILS_LONG_PARSER, false);
+ final NumericDocValues longs = FieldCache.DEFAULT.getNumerics(getOnlySegmentReader(reader), "f", FieldCache.LONG_POINT_PARSER, false);
for (int i = 0; i < values.length; ++i) {
assertEquals(values[i], longs.get(i));
}
@@ -704,7 +689,7 @@ public class TestFieldCache extends LuceneTestCase {
cfg.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, cfg);
Document doc = new Document();
- LegacyIntField field = new LegacyIntField("f", 0, Store.YES);
+ IntPoint field = new IntPoint("f", 0);
doc.add(field);
final int[] values = new int[TestUtil.nextInt(random(), 1, 10)];
for (int i = 0; i < values.length; ++i) {
@@ -734,7 +719,7 @@ public class TestFieldCache extends LuceneTestCase {
}
iw.forceMerge(1);
final DirectoryReader reader = iw.getReader();
- final NumericDocValues ints = FieldCache.DEFAULT.getNumerics(getOnlySegmentReader(reader), "f", FieldCache.NUMERIC_UTILS_INT_PARSER, false);
+ final NumericDocValues ints = FieldCache.DEFAULT.getNumerics(getOnlySegmentReader(reader), "f", FieldCache.INT_POINT_PARSER, false);
for (int i = 0; i < values.length; ++i) {
assertEquals(values[i], ints.get(i));
}
diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheReopen.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheReopen.java
index a85731ff4d0..0d5584e4544 100644
--- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheReopen.java
+++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheReopen.java
@@ -18,8 +18,7 @@ package org.apache.lucene.uninverting;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
-import org.apache.lucene.document.Field;
-import org.apache.lucene.document.LegacyIntField;
+import org.apache.lucene.document.IntPoint;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexWriter;
@@ -42,14 +41,14 @@ public class TestFieldCacheReopen extends LuceneTestCase {
setMergePolicy(newLogMergePolicy(10))
);
Document doc = new Document();
- doc.add(new LegacyIntField("number", 17, Field.Store.NO));
+ doc.add(new IntPoint("number", 17));
writer.addDocument(doc);
writer.commit();
// Open reader1
DirectoryReader r = DirectoryReader.open(dir);
LeafReader r1 = getOnlySegmentReader(r);
- final NumericDocValues ints = FieldCache.DEFAULT.getNumerics(r1, "number", FieldCache.NUMERIC_UTILS_INT_PARSER, false);
+ final NumericDocValues ints = FieldCache.DEFAULT.getNumerics(r1, "number", FieldCache.INT_POINT_PARSER, false);
assertEquals(17, ints.get(0));
// Add new segment
@@ -61,7 +60,7 @@ public class TestFieldCacheReopen extends LuceneTestCase {
assertNotNull(r2);
r.close();
LeafReader sub0 = r2.leaves().get(0).reader();
- final NumericDocValues ints2 = FieldCache.DEFAULT.getNumerics(sub0, "number", FieldCache.NUMERIC_UTILS_INT_PARSER, false);
+ final NumericDocValues ints2 = FieldCache.DEFAULT.getNumerics(sub0, "number", FieldCache.INT_POINT_PARSER, false);
r2.close();
assertTrue(ints == ints2);
diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSanityChecker.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSanityChecker.java
index f7dc0489ca7..f5c62e291c3 100644
--- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSanityChecker.java
+++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSanityChecker.java
@@ -94,11 +94,11 @@ public class TestFieldCacheSanityChecker extends LuceneTestCase {
FieldCache cache = FieldCache.DEFAULT;
cache.purgeAllCaches();
- cache.getNumerics(readerA, "theDouble", FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, false);
- cache.getNumerics(readerAclone, "theDouble", FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, false);
- cache.getNumerics(readerB, "theDouble", FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, false);
+ cache.getNumerics(readerA, "theDouble", FieldCache.LEGACY_DOUBLE_PARSER, false);
+ cache.getNumerics(readerAclone, "theDouble", FieldCache.LEGACY_DOUBLE_PARSER, false);
+ cache.getNumerics(readerB, "theDouble", FieldCache.LEGACY_DOUBLE_PARSER, false);
- cache.getNumerics(readerX, "theInt", FieldCache.NUMERIC_UTILS_INT_PARSER, false);
+ cache.getNumerics(readerX, "theInt", FieldCache.LEGACY_INT_PARSER, false);
// // //
@@ -117,7 +117,7 @@ public class TestFieldCacheSanityChecker extends LuceneTestCase {
FieldCache cache = FieldCache.DEFAULT;
cache.purgeAllCaches();
- cache.getNumerics(readerX, "theInt", FieldCache.NUMERIC_UTILS_INT_PARSER, false);
+ cache.getNumerics(readerX, "theInt", FieldCache.LEGACY_INT_PARSER, false);
cache.getTerms(readerX, "theInt", false);
// // //
diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSort.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSort.java
index 717d36424e5..f46bdde0c93 100644
--- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSort.java
+++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSort.java
@@ -23,11 +23,16 @@ import java.util.Map;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
+import org.apache.lucene.document.DoublePoint;
import org.apache.lucene.document.LegacyDoubleField;
import org.apache.lucene.document.Field;
+import org.apache.lucene.document.FloatPoint;
+import org.apache.lucene.document.IntPoint;
import org.apache.lucene.document.LegacyFloatField;
import org.apache.lucene.document.LegacyIntField;
import org.apache.lucene.document.LegacyLongField;
+import org.apache.lucene.document.LongPoint;
+import org.apache.lucene.document.StoredField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
@@ -449,19 +454,22 @@ public class TestFieldCacheSort extends LuceneTestCase {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document();
- doc.add(new LegacyIntField("value", 300000, Field.Store.YES));
+ doc.add(new IntPoint("value", 300000));
+ doc.add(new StoredField("value", 300000));
writer.addDocument(doc);
doc = new Document();
- doc.add(new LegacyIntField("value", -1, Field.Store.YES));
+ doc.add(new IntPoint("value", -1));
+ doc.add(new StoredField("value", -1));
writer.addDocument(doc);
doc = new Document();
- doc.add(new LegacyIntField("value", 4, Field.Store.YES));
+ doc.add(new IntPoint("value", 4));
+ doc.add(new StoredField("value", 4));
writer.addDocument(doc);
IndexReader ir = UninvertingReader.wrap(writer.getReader(),
- Collections.singletonMap("value", Type.INTEGER));
+ Collections.singletonMap("value", Type.INTEGER_POINT));
writer.close();
- IndexSearcher searcher = newSearcher(ir);
+ IndexSearcher searcher = newSearcher(ir, false);
Sort sort = new Sort(new SortField("value", SortField.Type.INT));
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
@@ -482,16 +490,18 @@ public class TestFieldCacheSort extends LuceneTestCase {
Document doc = new Document();
writer.addDocument(doc);
doc = new Document();
- doc.add(new LegacyIntField("value", -1, Field.Store.YES));
+ doc.add(new IntPoint("value", -1));
+ doc.add(new StoredField("value", -1));
writer.addDocument(doc);
doc = new Document();
- doc.add(new LegacyIntField("value", 4, Field.Store.YES));
+ doc.add(new IntPoint("value", 4));
+ doc.add(new StoredField("value", 4));
writer.addDocument(doc);
IndexReader ir = UninvertingReader.wrap(writer.getReader(),
- Collections.singletonMap("value", Type.INTEGER));
+ Collections.singletonMap("value", Type.INTEGER_POINT));
writer.close();
- IndexSearcher searcher = newSearcher(ir);
+ IndexSearcher searcher = newSearcher(ir, false);
Sort sort = new Sort(new SortField("value", SortField.Type.INT));
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
@@ -512,16 +522,18 @@ public class TestFieldCacheSort extends LuceneTestCase {
Document doc = new Document();
writer.addDocument(doc);
doc = new Document();
- doc.add(new LegacyIntField("value", -1, Field.Store.YES));
+ doc.add(new IntPoint("value", -1));
+ doc.add(new StoredField("value", -1));
writer.addDocument(doc);
doc = new Document();
- doc.add(new LegacyIntField("value", 4, Field.Store.YES));
+ doc.add(new IntPoint("value", 4));
+ doc.add(new StoredField("value", 4));
writer.addDocument(doc);
IndexReader ir = UninvertingReader.wrap(writer.getReader(),
- Collections.singletonMap("value", Type.INTEGER));
+ Collections.singletonMap("value", Type.INTEGER_POINT));
writer.close();
- IndexSearcher searcher = newSearcher(ir);
+ IndexSearcher searcher = newSearcher(ir, false);
SortField sortField = new SortField("value", SortField.Type.INT);
sortField.setMissingValue(Integer.MAX_VALUE);
Sort sort = new Sort(sortField);
@@ -539,6 +551,40 @@ public class TestFieldCacheSort extends LuceneTestCase {
/** Tests sorting on type int in reverse */
public void testIntReverse() throws IOException {
+ Directory dir = newDirectory();
+ RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+ Document doc = new Document();
+ doc.add(new IntPoint("value", 300000));
+ doc.add(new StoredField("value", 300000));
+ writer.addDocument(doc);
+ doc = new Document();
+ doc.add(new IntPoint("value", -1));
+ doc.add(new StoredField("value", -1));
+ writer.addDocument(doc);
+ doc = new Document();
+ doc.add(new IntPoint("value", 4));
+ doc.add(new StoredField("value", 4));
+ writer.addDocument(doc);
+ IndexReader ir = UninvertingReader.wrap(writer.getReader(),
+ Collections.singletonMap("value", Type.INTEGER_POINT));
+ writer.close();
+
+ IndexSearcher searcher = newSearcher(ir, false);
+ Sort sort = new Sort(new SortField("value", SortField.Type.INT, true));
+
+ TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+ assertEquals(3, td.totalHits);
+ // reverse numeric order
+ assertEquals("300000", searcher.doc(td.scoreDocs[0].doc).get("value"));
+ assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
+ assertEquals("-1", searcher.doc(td.scoreDocs[2].doc).get("value"));
+ TestUtil.checkReader(ir);
+ ir.close();
+ dir.close();
+ }
+
+ /** Tests sorting on type legacy int */
+ public void testLegacyInt() throws IOException {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document();
@@ -551,7 +597,100 @@ public class TestFieldCacheSort extends LuceneTestCase {
doc.add(new LegacyIntField("value", 4, Field.Store.YES));
writer.addDocument(doc);
IndexReader ir = UninvertingReader.wrap(writer.getReader(),
- Collections.singletonMap("value", Type.INTEGER));
+ Collections.singletonMap("value", Type.LEGACY_INTEGER));
+ writer.close();
+
+ IndexSearcher searcher = newSearcher(ir);
+ Sort sort = new Sort(new SortField("value", SortField.Type.INT));
+
+ TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+ assertEquals(3, td.totalHits);
+ // numeric order
+ assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
+ assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
+ assertEquals("300000", searcher.doc(td.scoreDocs[2].doc).get("value"));
+ TestUtil.checkReader(ir);
+ ir.close();
+ dir.close();
+ }
+
+ /** Tests sorting on type legacy int with a missing value */
+ public void testLegacyIntMissing() throws IOException {
+ Directory dir = newDirectory();
+ RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+ Document doc = new Document();
+ writer.addDocument(doc);
+ doc = new Document();
+ doc.add(new LegacyIntField("value", -1, Field.Store.YES));
+ writer.addDocument(doc);
+ doc = new Document();
+ doc.add(new LegacyIntField("value", 4, Field.Store.YES));
+ writer.addDocument(doc);
+ IndexReader ir = UninvertingReader.wrap(writer.getReader(),
+ Collections.singletonMap("value", Type.LEGACY_INTEGER));
+ writer.close();
+
+ IndexSearcher searcher = newSearcher(ir);
+ Sort sort = new Sort(new SortField("value", SortField.Type.INT));
+
+ TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+ assertEquals(3, td.totalHits);
+ // null is treated as a 0
+ assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
+ assertNull(searcher.doc(td.scoreDocs[1].doc).get("value"));
+ assertEquals("4", searcher.doc(td.scoreDocs[2].doc).get("value"));
+ TestUtil.checkReader(ir);
+ ir.close();
+ dir.close();
+ }
+
+ /** Tests sorting on type legacy int, specifying the missing value should be treated as Integer.MAX_VALUE */
+ public void testLegacyIntMissingLast() throws IOException {
+ Directory dir = newDirectory();
+ RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+ Document doc = new Document();
+ writer.addDocument(doc);
+ doc = new Document();
+ doc.add(new LegacyIntField("value", -1, Field.Store.YES));
+ writer.addDocument(doc);
+ doc = new Document();
+ doc.add(new LegacyIntField("value", 4, Field.Store.YES));
+ writer.addDocument(doc);
+ IndexReader ir = UninvertingReader.wrap(writer.getReader(),
+ Collections.singletonMap("value", Type.LEGACY_INTEGER));
+ writer.close();
+
+ IndexSearcher searcher = newSearcher(ir);
+ SortField sortField = new SortField("value", SortField.Type.INT);
+ sortField.setMissingValue(Integer.MAX_VALUE);
+ Sort sort = new Sort(sortField);
+
+ TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+ assertEquals(3, td.totalHits);
+ // null is treated as a Integer.MAX_VALUE
+ assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
+ assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
+ assertNull(searcher.doc(td.scoreDocs[2].doc).get("value"));
+ TestUtil.checkReader(ir);
+ ir.close();
+ dir.close();
+ }
+
+ /** Tests sorting on type legacy int in reverse */
+ public void testLegacyIntReverse() throws IOException {
+ Directory dir = newDirectory();
+ RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+ Document doc = new Document();
+ doc.add(new LegacyIntField("value", 300000, Field.Store.YES));
+ writer.addDocument(doc);
+ doc = new Document();
+ doc.add(new LegacyIntField("value", -1, Field.Store.YES));
+ writer.addDocument(doc);
+ doc = new Document();
+ doc.add(new LegacyIntField("value", 4, Field.Store.YES));
+ writer.addDocument(doc);
+ IndexReader ir = UninvertingReader.wrap(writer.getReader(),
+ Collections.singletonMap("value", Type.LEGACY_INTEGER));
writer.close();
IndexSearcher searcher = newSearcher(ir);
@@ -573,19 +712,22 @@ public class TestFieldCacheSort extends LuceneTestCase {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document();
- doc.add(new LegacyLongField("value", 3000000000L, Field.Store.YES));
+ doc.add(new LongPoint("value", 3000000000L));
+ doc.add(new StoredField("value", 3000000000L));
writer.addDocument(doc);
doc = new Document();
- doc.add(new LegacyLongField("value", -1, Field.Store.YES));
+ doc.add(new LongPoint("value", -1));
+ doc.add(new StoredField("value", -1));
writer.addDocument(doc);
doc = new Document();
- doc.add(new LegacyLongField("value", 4, Field.Store.YES));
+ doc.add(new LongPoint("value", 4));
+ doc.add(new StoredField("value", 4));
writer.addDocument(doc);
IndexReader ir = UninvertingReader.wrap(writer.getReader(),
- Collections.singletonMap("value", Type.LONG));
+ Collections.singletonMap("value", Type.LONG_POINT));
writer.close();
- IndexSearcher searcher = newSearcher(ir);
+ IndexSearcher searcher = newSearcher(ir, false);
Sort sort = new Sort(new SortField("value", SortField.Type.LONG));
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
@@ -606,16 +748,18 @@ public class TestFieldCacheSort extends LuceneTestCase {
Document doc = new Document();
writer.addDocument(doc);
doc = new Document();
- doc.add(new LegacyLongField("value", -1, Field.Store.YES));
+ doc.add(new LongPoint("value", -1));
+ doc.add(new StoredField("value", -1));
writer.addDocument(doc);
doc = new Document();
- doc.add(new LegacyLongField("value", 4, Field.Store.YES));
+ doc.add(new LongPoint("value", 4));
+ doc.add(new StoredField("value", 4));
writer.addDocument(doc);
IndexReader ir = UninvertingReader.wrap(writer.getReader(),
- Collections.singletonMap("value", Type.LONG));
+ Collections.singletonMap("value", Type.LONG_POINT));
writer.close();
- IndexSearcher searcher = newSearcher(ir);
+ IndexSearcher searcher = newSearcher(ir, false);
Sort sort = new Sort(new SortField("value", SortField.Type.LONG));
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
@@ -636,16 +780,18 @@ public class TestFieldCacheSort extends LuceneTestCase {
Document doc = new Document();
writer.addDocument(doc);
doc = new Document();
- doc.add(new LegacyLongField("value", -1, Field.Store.YES));
+ doc.add(new LongPoint("value", -1));
+ doc.add(new StoredField("value", -1));
writer.addDocument(doc);
doc = new Document();
- doc.add(new LegacyLongField("value", 4, Field.Store.YES));
+ doc.add(new LongPoint("value", 4));
+ doc.add(new StoredField("value", 4));
writer.addDocument(doc);
IndexReader ir = UninvertingReader.wrap(writer.getReader(),
- Collections.singletonMap("value", Type.LONG));
+ Collections.singletonMap("value", Type.LONG_POINT));
writer.close();
- IndexSearcher searcher = newSearcher(ir);
+ IndexSearcher searcher = newSearcher(ir, false);
SortField sortField = new SortField("value", SortField.Type.LONG);
sortField.setMissingValue(Long.MAX_VALUE);
Sort sort = new Sort(sortField);
@@ -663,6 +809,40 @@ public class TestFieldCacheSort extends LuceneTestCase {
/** Tests sorting on type long in reverse */
public void testLongReverse() throws IOException {
+ Directory dir = newDirectory();
+ RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+ Document doc = new Document();
+ doc.add(new LongPoint("value", 3000000000L));
+ doc.add(new StoredField("value", 3000000000L));
+ writer.addDocument(doc);
+ doc = new Document();
+ doc.add(new LongPoint("value", -1));
+ doc.add(new StoredField("value", -1));
+ writer.addDocument(doc);
+ doc = new Document();
+ doc.add(new LongPoint("value", 4));
+ doc.add(new StoredField("value", 4));
+ writer.addDocument(doc);
+ IndexReader ir = UninvertingReader.wrap(writer.getReader(),
+ Collections.singletonMap("value", Type.LONG_POINT));
+ writer.close();
+
+ IndexSearcher searcher = newSearcher(ir, false);
+ Sort sort = new Sort(new SortField("value", SortField.Type.LONG, true));
+
+ TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+ assertEquals(3, td.totalHits);
+ // reverse numeric order
+ assertEquals("3000000000", searcher.doc(td.scoreDocs[0].doc).get("value"));
+ assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
+ assertEquals("-1", searcher.doc(td.scoreDocs[2].doc).get("value"));
+ TestUtil.checkReader(ir);
+ ir.close();
+ dir.close();
+ }
+
+ /** Tests sorting on type legacy long */
+ public void testLegacyLong() throws IOException {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document();
@@ -675,7 +855,100 @@ public class TestFieldCacheSort extends LuceneTestCase {
doc.add(new LegacyLongField("value", 4, Field.Store.YES));
writer.addDocument(doc);
IndexReader ir = UninvertingReader.wrap(writer.getReader(),
- Collections.singletonMap("value", Type.LONG));
+ Collections.singletonMap("value", Type.LEGACY_LONG));
+ writer.close();
+
+ IndexSearcher searcher = newSearcher(ir);
+ Sort sort = new Sort(new SortField("value", SortField.Type.LONG));
+
+ TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+ assertEquals(3, td.totalHits);
+ // numeric order
+ assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
+ assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
+ assertEquals("3000000000", searcher.doc(td.scoreDocs[2].doc).get("value"));
+ TestUtil.checkReader(ir);
+ ir.close();
+ dir.close();
+ }
+
+ /** Tests sorting on type legacy long with a missing value */
+ public void testLegacyLongMissing() throws IOException {
+ Directory dir = newDirectory();
+ RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+ Document doc = new Document();
+ writer.addDocument(doc);
+ doc = new Document();
+ doc.add(new LegacyLongField("value", -1, Field.Store.YES));
+ writer.addDocument(doc);
+ doc = new Document();
+ doc.add(new LegacyLongField("value", 4, Field.Store.YES));
+ writer.addDocument(doc);
+ IndexReader ir = UninvertingReader.wrap(writer.getReader(),
+ Collections.singletonMap("value", Type.LEGACY_LONG));
+ writer.close();
+
+ IndexSearcher searcher = newSearcher(ir);
+ Sort sort = new Sort(new SortField("value", SortField.Type.LONG));
+
+ TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+ assertEquals(3, td.totalHits);
+ // null is treated as 0
+ assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
+ assertNull(searcher.doc(td.scoreDocs[1].doc).get("value"));
+ assertEquals("4", searcher.doc(td.scoreDocs[2].doc).get("value"));
+ TestUtil.checkReader(ir);
+ ir.close();
+ dir.close();
+ }
+
+ /** Tests sorting on type legacy long, specifying the missing value should be treated as Long.MAX_VALUE */
+ public void testLegacyLongMissingLast() throws IOException {
+ Directory dir = newDirectory();
+ RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+ Document doc = new Document();
+ writer.addDocument(doc);
+ doc = new Document();
+ doc.add(new LegacyLongField("value", -1, Field.Store.YES));
+ writer.addDocument(doc);
+ doc = new Document();
+ doc.add(new LegacyLongField("value", 4, Field.Store.YES));
+ writer.addDocument(doc);
+ IndexReader ir = UninvertingReader.wrap(writer.getReader(),
+ Collections.singletonMap("value", Type.LEGACY_LONG));
+ writer.close();
+
+ IndexSearcher searcher = newSearcher(ir);
+ SortField sortField = new SortField("value", SortField.Type.LONG);
+ sortField.setMissingValue(Long.MAX_VALUE);
+ Sort sort = new Sort(sortField);
+
+ TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+ assertEquals(3, td.totalHits);
+ // null is treated as Long.MAX_VALUE
+ assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
+ assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
+ assertNull(searcher.doc(td.scoreDocs[2].doc).get("value"));
+ TestUtil.checkReader(ir);
+ ir.close();
+ dir.close();
+ }
+
+ /** Tests sorting on type legacy long in reverse */
+ public void testLegacyLongReverse() throws IOException {
+ Directory dir = newDirectory();
+ RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+ Document doc = new Document();
+ doc.add(new LegacyLongField("value", 3000000000L, Field.Store.YES));
+ writer.addDocument(doc);
+ doc = new Document();
+ doc.add(new LegacyLongField("value", -1, Field.Store.YES));
+ writer.addDocument(doc);
+ doc = new Document();
+ doc.add(new LegacyLongField("value", 4, Field.Store.YES));
+ writer.addDocument(doc);
+ IndexReader ir = UninvertingReader.wrap(writer.getReader(),
+ Collections.singletonMap("value", Type.LEGACY_LONG));
writer.close();
IndexSearcher searcher = newSearcher(ir);
@@ -697,19 +970,22 @@ public class TestFieldCacheSort extends LuceneTestCase {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document();
- doc.add(new LegacyFloatField("value", 30.1f, Field.Store.YES));
+ doc.add(new FloatPoint("value", 30.1f));
+ doc.add(new StoredField("value", 30.1f));
writer.addDocument(doc);
doc = new Document();
- doc.add(new LegacyFloatField("value", -1.3f, Field.Store.YES));
+ doc.add(new FloatPoint("value", -1.3f));
+ doc.add(new StoredField("value", -1.3f));
writer.addDocument(doc);
doc = new Document();
- doc.add(new LegacyFloatField("value", 4.2f, Field.Store.YES));
+ doc.add(new FloatPoint("value", 4.2f));
+ doc.add(new StoredField("value", 4.2f));
writer.addDocument(doc);
IndexReader ir = UninvertingReader.wrap(writer.getReader(),
- Collections.singletonMap("value", Type.FLOAT));
+ Collections.singletonMap("value", Type.FLOAT_POINT));
writer.close();
- IndexSearcher searcher = newSearcher(ir);
+ IndexSearcher searcher = newSearcher(ir, false);
Sort sort = new Sort(new SortField("value", SortField.Type.FLOAT));
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
@@ -730,16 +1006,18 @@ public class TestFieldCacheSort extends LuceneTestCase {
Document doc = new Document();
writer.addDocument(doc);
doc = new Document();
- doc.add(new LegacyFloatField("value", -1.3f, Field.Store.YES));
+ doc.add(new FloatPoint("value", -1.3f));
+ doc.add(new StoredField("value", -1.3f));
writer.addDocument(doc);
doc = new Document();
- doc.add(new LegacyFloatField("value", 4.2f, Field.Store.YES));
+ doc.add(new FloatPoint("value", 4.2f));
+ doc.add(new StoredField("value", 4.2f));
writer.addDocument(doc);
IndexReader ir = UninvertingReader.wrap(writer.getReader(),
- Collections.singletonMap("value", Type.FLOAT));
+ Collections.singletonMap("value", Type.FLOAT_POINT));
writer.close();
- IndexSearcher searcher = newSearcher(ir);
+ IndexSearcher searcher = newSearcher(ir, false);
Sort sort = new Sort(new SortField("value", SortField.Type.FLOAT));
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
@@ -760,16 +1038,18 @@ public class TestFieldCacheSort extends LuceneTestCase {
Document doc = new Document();
writer.addDocument(doc);
doc = new Document();
- doc.add(new LegacyFloatField("value", -1.3f, Field.Store.YES));
+ doc.add(new FloatPoint("value", -1.3f));
+ doc.add(new StoredField("value", -1.3f));
writer.addDocument(doc);
doc = new Document();
- doc.add(new LegacyFloatField("value", 4.2f, Field.Store.YES));
+ doc.add(new FloatPoint("value", 4.2f));
+ doc.add(new StoredField("value", 4.2f));
writer.addDocument(doc);
IndexReader ir = UninvertingReader.wrap(writer.getReader(),
- Collections.singletonMap("value", Type.FLOAT));
+ Collections.singletonMap("value", Type.FLOAT_POINT));
writer.close();
- IndexSearcher searcher = newSearcher(ir);
+ IndexSearcher searcher = newSearcher(ir, false);
SortField sortField = new SortField("value", SortField.Type.FLOAT);
sortField.setMissingValue(Float.MAX_VALUE);
Sort sort = new Sort(sortField);
@@ -787,6 +1067,40 @@ public class TestFieldCacheSort extends LuceneTestCase {
/** Tests sorting on type float in reverse */
public void testFloatReverse() throws IOException {
+ Directory dir = newDirectory();
+ RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+ Document doc = new Document();
+ doc.add(new FloatPoint("value", 30.1f));
+ doc.add(new StoredField("value", 30.1f));
+ writer.addDocument(doc);
+ doc = new Document();
+ doc.add(new FloatPoint("value", -1.3f));
+ doc.add(new StoredField("value", -1.3f));
+ writer.addDocument(doc);
+ doc = new Document();
+ doc.add(new FloatPoint("value", 4.2f));
+ doc.add(new StoredField("value", 4.2f));
+ writer.addDocument(doc);
+ IndexReader ir = UninvertingReader.wrap(writer.getReader(),
+ Collections.singletonMap("value", Type.FLOAT_POINT));
+ writer.close();
+
+ IndexSearcher searcher = newSearcher(ir, false);
+ Sort sort = new Sort(new SortField("value", SortField.Type.FLOAT, true));
+
+ TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+ assertEquals(3, td.totalHits);
+ // reverse numeric order
+ assertEquals("30.1", searcher.doc(td.scoreDocs[0].doc).get("value"));
+ assertEquals("4.2", searcher.doc(td.scoreDocs[1].doc).get("value"));
+ assertEquals("-1.3", searcher.doc(td.scoreDocs[2].doc).get("value"));
+ TestUtil.checkReader(ir);
+ ir.close();
+ dir.close();
+ }
+
+ /** Tests sorting on type legacy float */
+ public void testLegacyFloat() throws IOException {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document();
@@ -799,7 +1113,100 @@ public class TestFieldCacheSort extends LuceneTestCase {
doc.add(new LegacyFloatField("value", 4.2f, Field.Store.YES));
writer.addDocument(doc);
IndexReader ir = UninvertingReader.wrap(writer.getReader(),
- Collections.singletonMap("value", Type.FLOAT));
+ Collections.singletonMap("value", Type.LEGACY_FLOAT));
+ writer.close();
+
+ IndexSearcher searcher = newSearcher(ir);
+ Sort sort = new Sort(new SortField("value", SortField.Type.FLOAT));
+
+ TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+ assertEquals(3, td.totalHits);
+ // numeric order
+ assertEquals("-1.3", searcher.doc(td.scoreDocs[0].doc).get("value"));
+ assertEquals("4.2", searcher.doc(td.scoreDocs[1].doc).get("value"));
+ assertEquals("30.1", searcher.doc(td.scoreDocs[2].doc).get("value"));
+ TestUtil.checkReader(ir);
+ ir.close();
+ dir.close();
+ }
+
+ /** Tests sorting on type legacy float with a missing value */
+ public void testLegacyFloatMissing() throws IOException {
+ Directory dir = newDirectory();
+ RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+ Document doc = new Document();
+ writer.addDocument(doc);
+ doc = new Document();
+ doc.add(new LegacyFloatField("value", -1.3f, Field.Store.YES));
+ writer.addDocument(doc);
+ doc = new Document();
+ doc.add(new LegacyFloatField("value", 4.2f, Field.Store.YES));
+ writer.addDocument(doc);
+ IndexReader ir = UninvertingReader.wrap(writer.getReader(),
+ Collections.singletonMap("value", Type.LEGACY_FLOAT));
+ writer.close();
+
+ IndexSearcher searcher = newSearcher(ir);
+ Sort sort = new Sort(new SortField("value", SortField.Type.FLOAT));
+
+ TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+ assertEquals(3, td.totalHits);
+ // null is treated as 0
+ assertEquals("-1.3", searcher.doc(td.scoreDocs[0].doc).get("value"));
+ assertNull(searcher.doc(td.scoreDocs[1].doc).get("value"));
+ assertEquals("4.2", searcher.doc(td.scoreDocs[2].doc).get("value"));
+ TestUtil.checkReader(ir);
+ ir.close();
+ dir.close();
+ }
+
+ /** Tests sorting on type legacy float, specifying the missing value should be treated as Float.MAX_VALUE */
+ public void testLegacyFloatMissingLast() throws IOException {
+ Directory dir = newDirectory();
+ RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+ Document doc = new Document();
+ writer.addDocument(doc);
+ doc = new Document();
+ doc.add(new LegacyFloatField("value", -1.3f, Field.Store.YES));
+ writer.addDocument(doc);
+ doc = new Document();
+ doc.add(new LegacyFloatField("value", 4.2f, Field.Store.YES));
+ writer.addDocument(doc);
+ IndexReader ir = UninvertingReader.wrap(writer.getReader(),
+ Collections.singletonMap("value", Type.LEGACY_FLOAT));
+ writer.close();
+
+ IndexSearcher searcher = newSearcher(ir);
+ SortField sortField = new SortField("value", SortField.Type.FLOAT);
+ sortField.setMissingValue(Float.MAX_VALUE);
+ Sort sort = new Sort(sortField);
+
+ TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+ assertEquals(3, td.totalHits);
+ // null is treated as Float.MAX_VALUE
+ assertEquals("-1.3", searcher.doc(td.scoreDocs[0].doc).get("value"));
+ assertEquals("4.2", searcher.doc(td.scoreDocs[1].doc).get("value"));
+ assertNull(searcher.doc(td.scoreDocs[2].doc).get("value"));
+ TestUtil.checkReader(ir);
+ ir.close();
+ dir.close();
+ }
+
+ /** Tests sorting on type legacy float in reverse */
+ public void testLegacyFloatReverse() throws IOException {
+ Directory dir = newDirectory();
+ RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+ Document doc = new Document();
+ doc.add(new LegacyFloatField("value", 30.1f, Field.Store.YES));
+ writer.addDocument(doc);
+ doc = new Document();
+ doc.add(new LegacyFloatField("value", -1.3f, Field.Store.YES));
+ writer.addDocument(doc);
+ doc = new Document();
+ doc.add(new LegacyFloatField("value", 4.2f, Field.Store.YES));
+ writer.addDocument(doc);
+ IndexReader ir = UninvertingReader.wrap(writer.getReader(),
+ Collections.singletonMap("value", Type.LEGACY_FLOAT));
writer.close();
IndexSearcher searcher = newSearcher(ir);
@@ -821,22 +1228,26 @@ public class TestFieldCacheSort extends LuceneTestCase {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document();
- doc.add(new LegacyDoubleField("value", 30.1, Field.Store.YES));
+ doc.add(new DoublePoint("value", 30.1));
+ doc.add(new StoredField("value", 30.1));
writer.addDocument(doc);
doc = new Document();
- doc.add(new LegacyDoubleField("value", -1.3, Field.Store.YES));
+ doc.add(new DoublePoint("value", -1.3));
+ doc.add(new StoredField("value", -1.3));
writer.addDocument(doc);
doc = new Document();
- doc.add(new LegacyDoubleField("value", 4.2333333333333, Field.Store.YES));
+ doc.add(new DoublePoint("value", 4.2333333333333));
+ doc.add(new StoredField("value", 4.2333333333333));
writer.addDocument(doc);
doc = new Document();
- doc.add(new LegacyDoubleField("value", 4.2333333333332, Field.Store.YES));
+ doc.add(new DoublePoint("value", 4.2333333333332));
+ doc.add(new StoredField("value", 4.2333333333332));
writer.addDocument(doc);
IndexReader ir = UninvertingReader.wrap(writer.getReader(),
- Collections.singletonMap("value", Type.DOUBLE));
+ Collections.singletonMap("value", Type.DOUBLE_POINT));
writer.close();
- IndexSearcher searcher = newSearcher(ir);
+ IndexSearcher searcher = newSearcher(ir, false);
Sort sort = new Sort(new SortField("value", SortField.Type.DOUBLE));
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
@@ -856,17 +1267,19 @@ public class TestFieldCacheSort extends LuceneTestCase {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document();
- doc.add(new LegacyDoubleField("value", +0d, Field.Store.YES));
+ doc.add(new DoublePoint("value", +0d));
+ doc.add(new StoredField("value", +0d));
writer.addDocument(doc);
doc = new Document();
- doc.add(new LegacyDoubleField("value", -0d, Field.Store.YES));
+ doc.add(new DoublePoint("value", -0d));
+ doc.add(new StoredField("value", -0d));
writer.addDocument(doc);
doc = new Document();
IndexReader ir = UninvertingReader.wrap(writer.getReader(),
- Collections.singletonMap("value", Type.DOUBLE));
+ Collections.singletonMap("value", Type.DOUBLE_POINT));
writer.close();
- IndexSearcher searcher = newSearcher(ir);
+ IndexSearcher searcher = newSearcher(ir, false);
Sort sort = new Sort(new SortField("value", SortField.Type.DOUBLE));
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
@@ -891,19 +1304,22 @@ public class TestFieldCacheSort extends LuceneTestCase {
Document doc = new Document();
writer.addDocument(doc);
doc = new Document();
- doc.add(new LegacyDoubleField("value", -1.3, Field.Store.YES));
+ doc.add(new DoublePoint("value", -1.3));
+ doc.add(new StoredField("value", -1.3));
writer.addDocument(doc);
doc = new Document();
- doc.add(new LegacyDoubleField("value", 4.2333333333333, Field.Store.YES));
+ doc.add(new DoublePoint("value", 4.2333333333333));
+ doc.add(new StoredField("value", 4.2333333333333));
writer.addDocument(doc);
doc = new Document();
- doc.add(new LegacyDoubleField("value", 4.2333333333332, Field.Store.YES));
+ doc.add(new DoublePoint("value", 4.2333333333332));
+ doc.add(new StoredField("value", 4.2333333333332));
writer.addDocument(doc);
IndexReader ir = UninvertingReader.wrap(writer.getReader(),
- Collections.singletonMap("value", Type.DOUBLE));
+ Collections.singletonMap("value", Type.DOUBLE_POINT));
writer.close();
- IndexSearcher searcher = newSearcher(ir);
+ IndexSearcher searcher = newSearcher(ir, false);
Sort sort = new Sort(new SortField("value", SortField.Type.DOUBLE));
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
@@ -925,19 +1341,22 @@ public class TestFieldCacheSort extends LuceneTestCase {
Document doc = new Document();
writer.addDocument(doc);
doc = new Document();
- doc.add(new LegacyDoubleField("value", -1.3, Field.Store.YES));
+ doc.add(new DoublePoint("value", -1.3));
+ doc.add(new StoredField("value", -1.3));
writer.addDocument(doc);
doc = new Document();
- doc.add(new LegacyDoubleField("value", 4.2333333333333, Field.Store.YES));
+ doc.add(new DoublePoint("value", 4.2333333333333));
+ doc.add(new StoredField("value", 4.2333333333333));
writer.addDocument(doc);
doc = new Document();
- doc.add(new LegacyDoubleField("value", 4.2333333333332, Field.Store.YES));
+ doc.add(new DoublePoint("value", 4.2333333333332));
+ doc.add(new StoredField("value", 4.2333333333332));
writer.addDocument(doc);
IndexReader ir = UninvertingReader.wrap(writer.getReader(),
- Collections.singletonMap("value", Type.DOUBLE));
+ Collections.singletonMap("value", Type.DOUBLE_POINT));
writer.close();
- IndexSearcher searcher = newSearcher(ir);
+ IndexSearcher searcher = newSearcher(ir, false);
SortField sortField = new SortField("value", SortField.Type.DOUBLE);
sortField.setMissingValue(Double.MAX_VALUE);
Sort sort = new Sort(sortField);
@@ -956,6 +1375,45 @@ public class TestFieldCacheSort extends LuceneTestCase {
/** Tests sorting on type double in reverse */
public void testDoubleReverse() throws IOException {
+ Directory dir = newDirectory();
+ RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+ Document doc = new Document();
+ doc.add(new DoublePoint("value", 30.1));
+ doc.add(new StoredField("value", 30.1));
+ writer.addDocument(doc);
+ doc = new Document();
+ doc.add(new DoublePoint("value", -1.3));
+ doc.add(new StoredField("value", -1.3));
+ writer.addDocument(doc);
+ doc = new Document();
+ doc.add(new DoublePoint("value", 4.2333333333333));
+ doc.add(new StoredField("value", 4.2333333333333));
+ writer.addDocument(doc);
+ doc = new Document();
+ doc.add(new DoublePoint("value", 4.2333333333332));
+ doc.add(new StoredField("value", 4.2333333333332));
+ writer.addDocument(doc);
+ IndexReader ir = UninvertingReader.wrap(writer.getReader(),
+ Collections.singletonMap("value", Type.DOUBLE_POINT));
+ writer.close();
+
+ IndexSearcher searcher = newSearcher(ir, false);
+ Sort sort = new Sort(new SortField("value", SortField.Type.DOUBLE, true));
+
+ TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+ assertEquals(4, td.totalHits);
+ // numeric order
+ assertEquals("30.1", searcher.doc(td.scoreDocs[0].doc).get("value"));
+ assertEquals("4.2333333333333", searcher.doc(td.scoreDocs[1].doc).get("value"));
+ assertEquals("4.2333333333332", searcher.doc(td.scoreDocs[2].doc).get("value"));
+ assertEquals("-1.3", searcher.doc(td.scoreDocs[3].doc).get("value"));
+ TestUtil.checkReader(ir);
+ ir.close();
+ dir.close();
+ }
+
+ /** Tests sorting on type legacy double */
+ public void testLegacyDouble() throws IOException {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document();
@@ -971,7 +1429,145 @@ public class TestFieldCacheSort extends LuceneTestCase {
doc.add(new LegacyDoubleField("value", 4.2333333333332, Field.Store.YES));
writer.addDocument(doc);
IndexReader ir = UninvertingReader.wrap(writer.getReader(),
- Collections.singletonMap("value", Type.DOUBLE));
+ Collections.singletonMap("value", Type.LEGACY_DOUBLE));
+ writer.close();
+
+ IndexSearcher searcher = newSearcher(ir);
+ Sort sort = new Sort(new SortField("value", SortField.Type.DOUBLE));
+
+ TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+ assertEquals(4, td.totalHits);
+ // numeric order
+ assertEquals("-1.3", searcher.doc(td.scoreDocs[0].doc).get("value"));
+ assertEquals("4.2333333333332", searcher.doc(td.scoreDocs[1].doc).get("value"));
+ assertEquals("4.2333333333333", searcher.doc(td.scoreDocs[2].doc).get("value"));
+ assertEquals("30.1", searcher.doc(td.scoreDocs[3].doc).get("value"));
+ TestUtil.checkReader(ir);
+ ir.close();
+ dir.close();
+ }
+
+ /** Tests sorting on type legacy double with +/- zero */
+ public void testLegacyDoubleSignedZero() throws IOException {
+ Directory dir = newDirectory();
+ RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+ Document doc = new Document();
+ doc.add(new LegacyDoubleField("value", +0d, Field.Store.YES));
+ writer.addDocument(doc);
+ doc = new Document();
+ doc.add(new LegacyDoubleField("value", -0d, Field.Store.YES));
+ writer.addDocument(doc);
+ doc = new Document();
+ IndexReader ir = UninvertingReader.wrap(writer.getReader(),
+ Collections.singletonMap("value", Type.LEGACY_DOUBLE));
+ writer.close();
+
+ IndexSearcher searcher = newSearcher(ir);
+ Sort sort = new Sort(new SortField("value", SortField.Type.DOUBLE));
+
+ TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+ assertEquals(2, td.totalHits);
+ // numeric order
+ double v0 = searcher.doc(td.scoreDocs[0].doc).getField("value").numericValue().doubleValue();
+ double v1 = searcher.doc(td.scoreDocs[1].doc).getField("value").numericValue().doubleValue();
+ assertEquals(0, v0, 0d);
+ assertEquals(0, v1, 0d);
+ // check sign bits
+ assertEquals(1, Double.doubleToLongBits(v0) >>> 63);
+ assertEquals(0, Double.doubleToLongBits(v1) >>> 63);
+ TestUtil.checkReader(ir);
+ ir.close();
+ dir.close();
+ }
+
+ /** Tests sorting on type legacy double with a missing value */
+ public void testLegacyDoubleMissing() throws IOException {
+ Directory dir = newDirectory();
+ RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+ Document doc = new Document();
+ writer.addDocument(doc);
+ doc = new Document();
+ doc.add(new LegacyDoubleField("value", -1.3, Field.Store.YES));
+ writer.addDocument(doc);
+ doc = new Document();
+ doc.add(new LegacyDoubleField("value", 4.2333333333333, Field.Store.YES));
+ writer.addDocument(doc);
+ doc = new Document();
+ doc.add(new LegacyDoubleField("value", 4.2333333333332, Field.Store.YES));
+ writer.addDocument(doc);
+ IndexReader ir = UninvertingReader.wrap(writer.getReader(),
+ Collections.singletonMap("value", Type.LEGACY_DOUBLE));
+ writer.close();
+
+ IndexSearcher searcher = newSearcher(ir);
+ Sort sort = new Sort(new SortField("value", SortField.Type.DOUBLE));
+
+ TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+ assertEquals(4, td.totalHits);
+ // null treated as a 0
+ assertEquals("-1.3", searcher.doc(td.scoreDocs[0].doc).get("value"));
+ assertNull(searcher.doc(td.scoreDocs[1].doc).get("value"));
+ assertEquals("4.2333333333332", searcher.doc(td.scoreDocs[2].doc).get("value"));
+ assertEquals("4.2333333333333", searcher.doc(td.scoreDocs[3].doc).get("value"));
+ TestUtil.checkReader(ir);
+ ir.close();
+ dir.close();
+ }
+
+ /** Tests sorting on type legacy double, specifying the missing value should be treated as Double.MAX_VALUE */
+ public void testLegacyDoubleMissingLast() throws IOException {
+ Directory dir = newDirectory();
+ RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+ Document doc = new Document();
+ writer.addDocument(doc);
+ doc = new Document();
+ doc.add(new LegacyDoubleField("value", -1.3, Field.Store.YES));
+ writer.addDocument(doc);
+ doc = new Document();
+ doc.add(new LegacyDoubleField("value", 4.2333333333333, Field.Store.YES));
+ writer.addDocument(doc);
+ doc = new Document();
+ doc.add(new LegacyDoubleField("value", 4.2333333333332, Field.Store.YES));
+ writer.addDocument(doc);
+ IndexReader ir = UninvertingReader.wrap(writer.getReader(),
+ Collections.singletonMap("value", Type.LEGACY_DOUBLE));
+ writer.close();
+
+ IndexSearcher searcher = newSearcher(ir);
+ SortField sortField = new SortField("value", SortField.Type.DOUBLE);
+ sortField.setMissingValue(Double.MAX_VALUE);
+ Sort sort = new Sort(sortField);
+
+ TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+ assertEquals(4, td.totalHits);
+ // null treated as Double.MAX_VALUE
+ assertEquals("-1.3", searcher.doc(td.scoreDocs[0].doc).get("value"));
+ assertEquals("4.2333333333332", searcher.doc(td.scoreDocs[1].doc).get("value"));
+ assertEquals("4.2333333333333", searcher.doc(td.scoreDocs[2].doc).get("value"));
+ assertNull(searcher.doc(td.scoreDocs[3].doc).get("value"));
+ TestUtil.checkReader(ir);
+ ir.close();
+ dir.close();
+ }
+
+ /** Tests sorting on type legacy double in reverse */
+ public void testLegacyDoubleReverse() throws IOException {
+ Directory dir = newDirectory();
+ RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+ Document doc = new Document();
+ doc.add(new LegacyDoubleField("value", 30.1, Field.Store.YES));
+ writer.addDocument(doc);
+ doc = new Document();
+ doc.add(new LegacyDoubleField("value", -1.3, Field.Store.YES));
+ writer.addDocument(doc);
+ doc = new Document();
+ doc.add(new LegacyDoubleField("value", 4.2333333333333, Field.Store.YES));
+ writer.addDocument(doc);
+ doc = new Document();
+ doc.add(new LegacyDoubleField("value", 4.2333333333332, Field.Store.YES));
+ writer.addDocument(doc);
+ IndexReader ir = UninvertingReader.wrap(writer.getReader(),
+ Collections.singletonMap("value", Type.LEGACY_DOUBLE));
writer.close();
IndexSearcher searcher = newSearcher(ir);
@@ -1062,7 +1658,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
}
IndexReader r = UninvertingReader.wrap(DirectoryReader.open(w),
- Collections.singletonMap("id", Type.INTEGER));
+ Collections.singletonMap("id", Type.LEGACY_INTEGER));
w.close();
Query q = new TermQuery(new Term("body", "text"));
IndexSearcher s = newSearcher(r);
diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSortRandom.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSortRandom.java
index 0b6292d3c65..f3bd455e691 100644
--- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSortRandom.java
+++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSortRandom.java
@@ -30,7 +30,8 @@ import java.util.Set;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
-import org.apache.lucene.document.LegacyIntField;
+import org.apache.lucene.document.IntPoint;
+import org.apache.lucene.document.StoredField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.IndexReader;
@@ -118,7 +119,8 @@ public class TestFieldCacheSortRandom extends LuceneTestCase {
docValues.add(null);
}
- doc.add(new LegacyIntField("id", numDocs, Field.Store.YES));
+ doc.add(new IntPoint("id", numDocs));
+ doc.add(new StoredField("id", numDocs));
writer.addDocument(doc);
numDocs++;
@@ -130,7 +132,7 @@ public class TestFieldCacheSortRandom extends LuceneTestCase {
Map mapping = new HashMap<>();
mapping.put("stringdv", Type.SORTED);
- mapping.put("id", Type.INTEGER);
+ mapping.put("id", Type.INTEGER_POINT);
final IndexReader r = UninvertingReader.wrap(writer.getReader(), mapping);
writer.close();
if (VERBOSE) {
diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheVsDocValues.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheVsDocValues.java
index 1b1452289cc..23b7d0c7a91 100644
--- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheVsDocValues.java
+++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheVsDocValues.java
@@ -458,8 +458,8 @@ public class TestFieldCacheVsDocValues extends LuceneTestCase {
DirectoryReader ir = DirectoryReader.open(dir);
for (LeafReaderContext context : ir.leaves()) {
LeafReader r = context.reader();
- Bits expected = FieldCache.DEFAULT.getDocsWithField(r, "indexed");
- Bits actual = FieldCache.DEFAULT.getDocsWithField(r, "dv");
+ Bits expected = FieldCache.DEFAULT.getDocsWithField(r, "indexed", null);
+ Bits actual = FieldCache.DEFAULT.getDocsWithField(r, "dv", null);
assertEquals(expected, actual);
}
ir.close();
diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheWithThreads.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheWithThreads.java
index 9b05ee11201..e716419de7c 100644
--- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheWithThreads.java
+++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheWithThreads.java
@@ -42,6 +42,7 @@ import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
+// TODO: what happened to this test... its not actually uninverting?
public class TestFieldCacheWithThreads extends LuceneTestCase {
public void test() throws Exception {
@@ -83,7 +84,7 @@ public class TestFieldCacheWithThreads extends LuceneTestCase {
public void run() {
try {
//NumericDocValues ndv = ar.getNumericDocValues("number");
- NumericDocValues ndv = FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.NUMERIC_UTILS_LONG_PARSER, false);
+ NumericDocValues ndv = FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.LONG_POINT_PARSER, false);
//BinaryDocValues bdv = ar.getBinaryDocValues("bytes");
BinaryDocValues bdv = FieldCache.DEFAULT.getTerms(ar, "bytes", false);
SortedDocValues sdv = FieldCache.DEFAULT.getTermsIndex(ar, "sorted");
@@ -93,16 +94,16 @@ public class TestFieldCacheWithThreads extends LuceneTestCase {
int docID = threadRandom.nextInt(numDocs);
switch(threadRandom.nextInt(4)) {
case 0:
- assertEquals(numbers.get(docID).longValue(), FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.NUMERIC_UTILS_INT_PARSER, false).get(docID));
+ assertEquals(numbers.get(docID).longValue(), FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.INT_POINT_PARSER, false).get(docID));
break;
case 1:
- assertEquals(numbers.get(docID).longValue(), FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.NUMERIC_UTILS_LONG_PARSER, false).get(docID));
+ assertEquals(numbers.get(docID).longValue(), FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.LONG_POINT_PARSER, false).get(docID));
break;
case 2:
- assertEquals(numbers.get(docID).longValue(), FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.NUMERIC_UTILS_FLOAT_PARSER, false).get(docID));
+ assertEquals(numbers.get(docID).longValue(), FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.FLOAT_POINT_PARSER, false).get(docID));
break;
case 3:
- assertEquals(numbers.get(docID).longValue(), FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, false).get(docID));
+ assertEquals(numbers.get(docID).longValue(), FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.DOUBLE_POINT_PARSER, false).get(docID));
break;
}
BytesRef term = bdv.get(docID);
diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestLegacyFieldCache.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestLegacyFieldCache.java
new file mode 100644
index 00000000000..c4ef1c4f4b7
--- /dev/null
+++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestLegacyFieldCache.java
@@ -0,0 +1,498 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.uninverting;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.PrintStream;
+
+import java.util.concurrent.CyclicBarrier;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import org.apache.lucene.analysis.MockAnalyzer;
+import org.apache.lucene.document.BinaryDocValuesField;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.LegacyDoubleField;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.Field.Store;
+import org.apache.lucene.document.LegacyFloatField;
+import org.apache.lucene.document.LegacyIntField;
+import org.apache.lucene.document.LegacyLongField;
+import org.apache.lucene.document.NumericDocValuesField;
+import org.apache.lucene.document.SortedDocValuesField;
+import org.apache.lucene.document.SortedSetDocValuesField;
+import org.apache.lucene.document.StoredField;
+import org.apache.lucene.index.LeafReader;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.NumericDocValues;
+import org.apache.lucene.index.RandomIndexWriter;
+import org.apache.lucene.index.SlowCompositeReaderWrapper;
+import org.apache.lucene.index.Terms;
+import org.apache.lucene.index.TermsEnum;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.Bits;
+import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.IOUtils;
+import org.apache.lucene.util.LegacyNumericUtils;
+import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util.TestUtil;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+
+/** random assortment of tests against legacy numerics */
+public class TestLegacyFieldCache extends LuceneTestCase {
+ private static LeafReader reader;
+ private static int NUM_DOCS;
+ private static Directory directory;
+
+ @BeforeClass
+ public static void beforeClass() throws Exception {
+ NUM_DOCS = atLeast(500);
+ directory = newDirectory();
+ RandomIndexWriter writer= new RandomIndexWriter(random(), directory, newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
+ long theLong = Long.MAX_VALUE;
+ double theDouble = Double.MAX_VALUE;
+ int theInt = Integer.MAX_VALUE;
+ float theFloat = Float.MAX_VALUE;
+ if (VERBOSE) {
+ System.out.println("TEST: setUp");
+ }
+ for (int i = 0; i < NUM_DOCS; i++){
+ Document doc = new Document();
+ doc.add(new LegacyLongField("theLong", theLong--, Field.Store.NO));
+ doc.add(new LegacyDoubleField("theDouble", theDouble--, Field.Store.NO));
+ doc.add(new LegacyIntField("theInt", theInt--, Field.Store.NO));
+ doc.add(new LegacyFloatField("theFloat", theFloat--, Field.Store.NO));
+ if (i%2 == 0) {
+ doc.add(new LegacyIntField("sparse", i, Field.Store.NO));
+ }
+
+ if (i%2 == 0) {
+ doc.add(new LegacyIntField("numInt", i, Field.Store.NO));
+ }
+ writer.addDocument(doc);
+ }
+ IndexReader r = writer.getReader();
+ reader = SlowCompositeReaderWrapper.wrap(r);
+ TestUtil.checkReader(reader);
+ writer.close();
+ }
+
+ @AfterClass
+ public static void afterClass() throws Exception {
+ reader.close();
+ reader = null;
+ directory.close();
+ directory = null;
+ }
+
+ public void testInfoStream() throws Exception {
+ try {
+ FieldCache cache = FieldCache.DEFAULT;
+ ByteArrayOutputStream bos = new ByteArrayOutputStream(1024);
+ cache.setInfoStream(new PrintStream(bos, false, IOUtils.UTF_8));
+ cache.getNumerics(reader, "theDouble", FieldCache.LEGACY_DOUBLE_PARSER, false);
+ cache.getNumerics(reader, "theDouble", new FieldCache.Parser() {
+ @Override
+ public TermsEnum termsEnum(Terms terms) throws IOException {
+ return LegacyNumericUtils.filterPrefixCodedLongs(terms.iterator());
+ }
+ @Override
+ public long parseValue(BytesRef term) {
+ int val = (int) LegacyNumericUtils.prefixCodedToLong(term);
+ if (val<0) val ^= 0x7fffffff;
+ return val;
+ }
+ }, false);
+ assertTrue(bos.toString(IOUtils.UTF_8).indexOf("WARNING") != -1);
+ } finally {
+ FieldCache.DEFAULT.setInfoStream(null);
+ FieldCache.DEFAULT.purgeAllCaches();
+ }
+ }
+
+ public void test() throws IOException {
+ FieldCache cache = FieldCache.DEFAULT;
+ NumericDocValues doubles = cache.getNumerics(reader, "theDouble", FieldCache.LEGACY_DOUBLE_PARSER, random().nextBoolean());
+ assertSame("Second request to cache return same array", doubles, cache.getNumerics(reader, "theDouble", FieldCache.LEGACY_DOUBLE_PARSER, random().nextBoolean()));
+ for (int i = 0; i < NUM_DOCS; i++) {
+ assertEquals(Double.doubleToLongBits(Double.MAX_VALUE - i), doubles.get(i));
+ }
+
+ NumericDocValues longs = cache.getNumerics(reader, "theLong", FieldCache.LEGACY_LONG_PARSER, random().nextBoolean());
+ assertSame("Second request to cache return same array", longs, cache.getNumerics(reader, "theLong", FieldCache.LEGACY_LONG_PARSER, random().nextBoolean()));
+ for (int i = 0; i < NUM_DOCS; i++) {
+ assertEquals(Long.MAX_VALUE - i, longs.get(i));
+ }
+
+ NumericDocValues ints = cache.getNumerics(reader, "theInt", FieldCache.LEGACY_INT_PARSER, random().nextBoolean());
+ assertSame("Second request to cache return same array", ints, cache.getNumerics(reader, "theInt", FieldCache.LEGACY_INT_PARSER, random().nextBoolean()));
+ for (int i = 0; i < NUM_DOCS; i++) {
+ assertEquals(Integer.MAX_VALUE - i, ints.get(i));
+ }
+
+ NumericDocValues floats = cache.getNumerics(reader, "theFloat", FieldCache.LEGACY_FLOAT_PARSER, random().nextBoolean());
+ assertSame("Second request to cache return same array", floats, cache.getNumerics(reader, "theFloat", FieldCache.LEGACY_FLOAT_PARSER, random().nextBoolean()));
+ for (int i = 0; i < NUM_DOCS; i++) {
+ assertEquals(Float.floatToIntBits(Float.MAX_VALUE - i), floats.get(i));
+ }
+
+ Bits docsWithField = cache.getDocsWithField(reader, "theLong", null);
+ assertSame("Second request to cache return same array", docsWithField, cache.getDocsWithField(reader, "theLong", null));
+ assertTrue("docsWithField(theLong) must be class Bits.MatchAllBits", docsWithField instanceof Bits.MatchAllBits);
+ assertTrue("docsWithField(theLong) Size: " + docsWithField.length() + " is not: " + NUM_DOCS, docsWithField.length() == NUM_DOCS);
+ for (int i = 0; i < docsWithField.length(); i++) {
+ assertTrue(docsWithField.get(i));
+ }
+
+ docsWithField = cache.getDocsWithField(reader, "sparse", null);
+ assertSame("Second request to cache return same array", docsWithField, cache.getDocsWithField(reader, "sparse", null));
+ assertFalse("docsWithField(sparse) must not be class Bits.MatchAllBits", docsWithField instanceof Bits.MatchAllBits);
+ assertTrue("docsWithField(sparse) Size: " + docsWithField.length() + " is not: " + NUM_DOCS, docsWithField.length() == NUM_DOCS);
+ for (int i = 0; i < docsWithField.length(); i++) {
+ assertEquals(i%2 == 0, docsWithField.get(i));
+ }
+
+ FieldCache.DEFAULT.purgeByCacheKey(reader.getCoreCacheKey());
+ }
+
+ public void testEmptyIndex() throws Exception {
+ Directory dir = newDirectory();
+ IndexWriter writer= new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setMaxBufferedDocs(500));
+ writer.close();
+ IndexReader r = DirectoryReader.open(dir);
+ LeafReader reader = SlowCompositeReaderWrapper.wrap(r);
+ TestUtil.checkReader(reader);
+ FieldCache.DEFAULT.getTerms(reader, "foobar", true);
+ FieldCache.DEFAULT.getTermsIndex(reader, "foobar");
+ FieldCache.DEFAULT.purgeByCacheKey(reader.getCoreCacheKey());
+ r.close();
+ dir.close();
+ }
+
+ public void testDocsWithField() throws Exception {
+ FieldCache cache = FieldCache.DEFAULT;
+ cache.purgeAllCaches();
+ assertEquals(0, cache.getCacheEntries().length);
+ cache.getNumerics(reader, "theDouble", FieldCache.LEGACY_DOUBLE_PARSER, true);
+
+ // The double[] takes one slots, and docsWithField should also
+ // have been populated:
+ assertEquals(2, cache.getCacheEntries().length);
+ Bits bits = cache.getDocsWithField(reader, "theDouble", FieldCache.LEGACY_DOUBLE_PARSER);
+
+ // No new entries should appear:
+ assertEquals(2, cache.getCacheEntries().length);
+ assertTrue(bits instanceof Bits.MatchAllBits);
+
+ NumericDocValues ints = cache.getNumerics(reader, "sparse", FieldCache.LEGACY_INT_PARSER, true);
+ assertEquals(4, cache.getCacheEntries().length);
+ Bits docsWithField = cache.getDocsWithField(reader, "sparse", FieldCache.LEGACY_INT_PARSER);
+ assertEquals(4, cache.getCacheEntries().length);
+ for (int i = 0; i < docsWithField.length(); i++) {
+ if (i%2 == 0) {
+ assertTrue(docsWithField.get(i));
+ assertEquals(i, ints.get(i));
+ } else {
+ assertFalse(docsWithField.get(i));
+ }
+ }
+
+ NumericDocValues numInts = cache.getNumerics(reader, "numInt", FieldCache.LEGACY_INT_PARSER, random().nextBoolean());
+ docsWithField = cache.getDocsWithField(reader, "numInt", FieldCache.LEGACY_INT_PARSER);
+ for (int i = 0; i < docsWithField.length(); i++) {
+ if (i%2 == 0) {
+ assertTrue(docsWithField.get(i));
+ assertEquals(i, numInts.get(i));
+ } else {
+ assertFalse(docsWithField.get(i));
+ }
+ }
+ }
+
+ public void testGetDocsWithFieldThreadSafety() throws Exception {
+ final FieldCache cache = FieldCache.DEFAULT;
+ cache.purgeAllCaches();
+
+ int NUM_THREADS = 3;
+ Thread[] threads = new Thread[NUM_THREADS];
+ final AtomicBoolean failed = new AtomicBoolean();
+ final AtomicInteger iters = new AtomicInteger();
+ final int NUM_ITER = 200 * RANDOM_MULTIPLIER;
+ final CyclicBarrier restart = new CyclicBarrier(NUM_THREADS,
+ new Runnable() {
+ @Override
+ public void run() {
+ cache.purgeAllCaches();
+ iters.incrementAndGet();
+ }
+ });
+ for(int threadIDX=0;threadIDX= NUM_ITER) {
+ break;
+ }
+ } else if (op == 1) {
+ Bits docsWithField = cache.getDocsWithField(reader, "sparse", null);
+ for (int i = 0; i < docsWithField.length(); i++) {
+ assertEquals(i%2 == 0, docsWithField.get(i));
+ }
+ } else {
+ NumericDocValues ints = cache.getNumerics(reader, "sparse", FieldCache.LEGACY_INT_PARSER, true);
+ Bits docsWithField = cache.getDocsWithField(reader, "sparse", null);
+ for (int i = 0; i < docsWithField.length(); i++) {
+ if (i%2 == 0) {
+ assertTrue(docsWithField.get(i));
+ assertEquals(i, ints.get(i));
+ } else {
+ assertFalse(docsWithField.get(i));
+ }
+ }
+ }
+ }
+ } catch (Throwable t) {
+ failed.set(true);
+ restart.reset();
+ throw new RuntimeException(t);
+ }
+ }
+ };
+ threads[threadIDX].start();
+ }
+
+ for(int threadIDX=0;threadIDX {
+ FieldCache.DEFAULT.getNumerics(ar, "binary", FieldCache.LEGACY_INT_PARSER, false);
+ });
+
+ // Sorted type: can be retrieved via getTerms(), getTermsIndex(), getDocTermOrds()
+ expectThrows(IllegalStateException.class, () -> {
+ FieldCache.DEFAULT.getNumerics(ar, "sorted", FieldCache.LEGACY_INT_PARSER, false);
+ });
+
+ // Numeric type: can be retrieved via getInts() and so on
+ NumericDocValues numeric = FieldCache.DEFAULT.getNumerics(ar, "numeric", FieldCache.LEGACY_INT_PARSER, false);
+ assertEquals(42, numeric.get(0));
+
+ // SortedSet type: can be retrieved via getDocTermOrds()
+ expectThrows(IllegalStateException.class, () -> {
+ FieldCache.DEFAULT.getNumerics(ar, "sortedset", FieldCache.LEGACY_INT_PARSER, false);
+ });
+
+ ir.close();
+ dir.close();
+ }
+
+ public void testNonexistantFields() throws Exception {
+ Directory dir = newDirectory();
+ RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
+ Document doc = new Document();
+ iw.addDocument(doc);
+ DirectoryReader ir = iw.getReader();
+ iw.close();
+
+ LeafReader ar = getOnlySegmentReader(ir);
+
+ final FieldCache cache = FieldCache.DEFAULT;
+ cache.purgeAllCaches();
+ assertEquals(0, cache.getCacheEntries().length);
+
+ NumericDocValues ints = cache.getNumerics(ar, "bogusints", FieldCache.LEGACY_INT_PARSER, true);
+ assertEquals(0, ints.get(0));
+
+ NumericDocValues longs = cache.getNumerics(ar, "boguslongs", FieldCache.LEGACY_LONG_PARSER, true);
+ assertEquals(0, longs.get(0));
+
+ NumericDocValues floats = cache.getNumerics(ar, "bogusfloats", FieldCache.LEGACY_FLOAT_PARSER, true);
+ assertEquals(0, floats.get(0));
+
+ NumericDocValues doubles = cache.getNumerics(ar, "bogusdoubles", FieldCache.LEGACY_DOUBLE_PARSER, true);
+ assertEquals(0, doubles.get(0));
+
+ // check that we cached nothing
+ assertEquals(0, cache.getCacheEntries().length);
+ ir.close();
+ dir.close();
+ }
+
+ public void testNonIndexedFields() throws Exception {
+ Directory dir = newDirectory();
+ RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
+ Document doc = new Document();
+ doc.add(new StoredField("bogusbytes", "bogus"));
+ doc.add(new StoredField("bogusshorts", "bogus"));
+ doc.add(new StoredField("bogusints", "bogus"));
+ doc.add(new StoredField("boguslongs", "bogus"));
+ doc.add(new StoredField("bogusfloats", "bogus"));
+ doc.add(new StoredField("bogusdoubles", "bogus"));
+ doc.add(new StoredField("bogusbits", "bogus"));
+ iw.addDocument(doc);
+ DirectoryReader ir = iw.getReader();
+ iw.close();
+
+ LeafReader ar = getOnlySegmentReader(ir);
+
+ final FieldCache cache = FieldCache.DEFAULT;
+ cache.purgeAllCaches();
+ assertEquals(0, cache.getCacheEntries().length);
+
+ NumericDocValues ints = cache.getNumerics(ar, "bogusints", FieldCache.LEGACY_INT_PARSER, true);
+ assertEquals(0, ints.get(0));
+
+ NumericDocValues longs = cache.getNumerics(ar, "boguslongs", FieldCache.LEGACY_LONG_PARSER, true);
+ assertEquals(0, longs.get(0));
+
+ NumericDocValues floats = cache.getNumerics(ar, "bogusfloats", FieldCache.LEGACY_FLOAT_PARSER, true);
+ assertEquals(0, floats.get(0));
+
+ NumericDocValues doubles = cache.getNumerics(ar, "bogusdoubles", FieldCache.LEGACY_DOUBLE_PARSER, true);
+ assertEquals(0, doubles.get(0));
+
+ // check that we cached nothing
+ assertEquals(0, cache.getCacheEntries().length);
+ ir.close();
+ dir.close();
+ }
+
+ // Make sure that the use of GrowableWriter doesn't prevent from using the full long range
+ public void testLongFieldCache() throws IOException {
+ Directory dir = newDirectory();
+ IndexWriterConfig cfg = newIndexWriterConfig(new MockAnalyzer(random()));
+ cfg.setMergePolicy(newLogMergePolicy());
+ RandomIndexWriter iw = new RandomIndexWriter(random(), dir, cfg);
+ Document doc = new Document();
+ LegacyLongField field = new LegacyLongField("f", 0L, Store.YES);
+ doc.add(field);
+ final long[] values = new long[TestUtil.nextInt(random(), 1, 10)];
+ for (int i = 0; i < values.length; ++i) {
+ final long v;
+ switch (random().nextInt(10)) {
+ case 0:
+ v = Long.MIN_VALUE;
+ break;
+ case 1:
+ v = 0;
+ break;
+ case 2:
+ v = Long.MAX_VALUE;
+ break;
+ default:
+ v = TestUtil.nextLong(random(), -10, 10);
+ break;
+ }
+ values[i] = v;
+ if (v == 0 && random().nextBoolean()) {
+ // missing
+ iw.addDocument(new Document());
+ } else {
+ field.setLongValue(v);
+ iw.addDocument(doc);
+ }
+ }
+ iw.forceMerge(1);
+ final DirectoryReader reader = iw.getReader();
+ final NumericDocValues longs = FieldCache.DEFAULT.getNumerics(getOnlySegmentReader(reader), "f", FieldCache.LEGACY_LONG_PARSER, false);
+ for (int i = 0; i < values.length; ++i) {
+ assertEquals(values[i], longs.get(i));
+ }
+ reader.close();
+ iw.close();
+ dir.close();
+ }
+
+ // Make sure that the use of GrowableWriter doesn't prevent from using the full int range
+ public void testIntFieldCache() throws IOException {
+ Directory dir = newDirectory();
+ IndexWriterConfig cfg = newIndexWriterConfig(new MockAnalyzer(random()));
+ cfg.setMergePolicy(newLogMergePolicy());
+ RandomIndexWriter iw = new RandomIndexWriter(random(), dir, cfg);
+ Document doc = new Document();
+ LegacyIntField field = new LegacyIntField("f", 0, Store.YES);
+ doc.add(field);
+ final int[] values = new int[TestUtil.nextInt(random(), 1, 10)];
+ for (int i = 0; i < values.length; ++i) {
+ final int v;
+ switch (random().nextInt(10)) {
+ case 0:
+ v = Integer.MIN_VALUE;
+ break;
+ case 1:
+ v = 0;
+ break;
+ case 2:
+ v = Integer.MAX_VALUE;
+ break;
+ default:
+ v = TestUtil.nextInt(random(), -10, 10);
+ break;
+ }
+ values[i] = v;
+ if (v == 0 && random().nextBoolean()) {
+ // missing
+ iw.addDocument(new Document());
+ } else {
+ field.setIntValue(v);
+ iw.addDocument(doc);
+ }
+ }
+ iw.forceMerge(1);
+ final DirectoryReader reader = iw.getReader();
+ final NumericDocValues ints = FieldCache.DEFAULT.getNumerics(getOnlySegmentReader(reader), "f", FieldCache.LEGACY_INT_PARSER, false);
+ for (int i = 0; i < values.length; ++i) {
+ assertEquals(values[i], ints.get(i));
+ }
+ reader.close();
+ iw.close();
+ dir.close();
+ }
+
+}
diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms32.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms32.java
index bc85db4b371..a0cddf88c6b 100644
--- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms32.java
+++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms32.java
@@ -96,9 +96,9 @@ public class TestNumericTerms32 extends LuceneTestCase {
}
Map map = new HashMap<>();
- map.put("field2", Type.INTEGER);
- map.put("field4", Type.INTEGER);
- map.put("field8", Type.INTEGER);
+ map.put("field2", Type.LEGACY_INTEGER);
+ map.put("field4", Type.LEGACY_INTEGER);
+ map.put("field8", Type.LEGACY_INTEGER);
reader = UninvertingReader.wrap(writer.getReader(), map);
searcher=newSearcher(reader);
writer.close();
diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms64.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms64.java
index d9fcc92eedd..0724d86feeb 100644
--- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms64.java
+++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms64.java
@@ -100,10 +100,10 @@ public class TestNumericTerms64 extends LuceneTestCase {
writer.addDocument(doc);
}
Map map = new HashMap<>();
- map.put("field2", Type.LONG);
- map.put("field4", Type.LONG);
- map.put("field6", Type.LONG);
- map.put("field8", Type.LONG);
+ map.put("field2", Type.LEGACY_LONG);
+ map.put("field4", Type.LEGACY_LONG);
+ map.put("field6", Type.LEGACY_LONG);
+ map.put("field8", Type.LEGACY_LONG);
reader = UninvertingReader.wrap(writer.getReader(), map);
searcher=newSearcher(reader);
writer.close();
diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestUninvertingReader.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestUninvertingReader.java
index 99df329284d..0a1cf3d4048 100644
--- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestUninvertingReader.java
+++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestUninvertingReader.java
@@ -363,8 +363,9 @@ public class TestUninvertingReader extends LuceneTestCase {
iw.close();
Map uninvertingMap = new HashMap<>();
- uninvertingMap.put("int", Type.INTEGER);
- uninvertingMap.put("dv", Type.INTEGER);
+ uninvertingMap.put("int", Type.LEGACY_INTEGER);
+ uninvertingMap.put("dv", Type.LEGACY_INTEGER);
+ uninvertingMap.put("dint", Type.INTEGER_POINT);
DirectoryReader ir = UninvertingReader.wrap(DirectoryReader.open(dir),
uninvertingMap);
@@ -376,6 +377,7 @@ public class TestUninvertingReader extends LuceneTestCase {
assertEquals(0, intFInfo.getPointNumBytes());
FieldInfo dintFInfo = leafReader.getFieldInfos().fieldInfo("dint");
+ assertEquals(DocValuesType.NUMERIC, dintFInfo.getDocValuesType());
assertEquals(1, dintFInfo.getPointDimensionCount());
assertEquals(4, dintFInfo.getPointNumBytes());
diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialTestCase.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialTestCase.java
index 529e98bc558..8ccb9afdbe5 100644
--- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialTestCase.java
+++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialTestCase.java
@@ -73,8 +73,8 @@ public abstract class SpatialTestCase extends LuceneTestCase {
super.setUp();
// TODO: change this module to index docvalues instead of uninverting
uninvertMap.clear();
- uninvertMap.put("pointvector__x", Type.DOUBLE);
- uninvertMap.put("pointvector__y", Type.DOUBLE);
+ uninvertMap.put("pointvector__x", Type.LEGACY_DOUBLE);
+ uninvertMap.put("pointvector__y", Type.LEGACY_DOUBLE);
directory = newDirectory();
final Random random = random();
diff --git a/solr/core/src/java/org/apache/solr/schema/EnumField.java b/solr/core/src/java/org/apache/solr/schema/EnumField.java
index e1fb42057be..cbf1d4eb9e8 100644
--- a/solr/core/src/java/org/apache/solr/schema/EnumField.java
+++ b/solr/core/src/java/org/apache/solr/schema/EnumField.java
@@ -194,7 +194,7 @@ public class EnumField extends PrimitiveFieldType {
if (sf.multiValued()) {
return Type.SORTED_SET_INTEGER;
} else {
- return Type.INTEGER;
+ return Type.LEGACY_INTEGER;
}
}
diff --git a/solr/core/src/java/org/apache/solr/schema/TrieField.java b/solr/core/src/java/org/apache/solr/schema/TrieField.java
index 572bf888470..c4899a10604 100644
--- a/solr/core/src/java/org/apache/solr/schema/TrieField.java
+++ b/solr/core/src/java/org/apache/solr/schema/TrieField.java
@@ -203,14 +203,14 @@ public class TrieField extends PrimitiveFieldType {
} else {
switch (type) {
case INTEGER:
- return Type.INTEGER;
+ return Type.LEGACY_INTEGER;
case LONG:
case DATE:
- return Type.LONG;
+ return Type.LEGACY_LONG;
case FLOAT:
- return Type.FLOAT;
+ return Type.LEGACY_FLOAT;
case DOUBLE:
- return Type.DOUBLE;
+ return Type.LEGACY_DOUBLE;
default:
throw new AssertionError();
}