diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt index 468cbd627c0..42d161677b5 100644 --- a/lucene/CHANGES.txt +++ b/lucene/CHANGES.txt @@ -39,25 +39,28 @@ New Features * LUCENE-6825: Add low-level support for block-KD trees (Mike McCandless) -* LUCENE-6852: Add support for dimensionally indexed values to index, - document and codec APIs, including a simple text implementation. - (Mike McCandless) +* LUCENE-6852, LUCENE-6975: Add support for points (dimensionally + indexed values) to index, document and codec APIs, including a + simple text implementation. (Mike McCandless) -* LUCENE-6861: Create Lucene60Codec, supporting dimensional values. +* LUCENE-6861: Create Lucene60Codec, supporting points. (Mike McCandless) * LUCENE-6879: Allow to define custom CharTokenizer instances without subclassing using Java 8 lambdas or method references. (Uwe Schindler) -* LUCENE-6881: Cutover all BKD implementations to dimensional values +* LUCENE-6881: Cutover all BKD implementations to points (Mike McCandless) * LUCENE-6837: Add N-best output support to JapaneseTokenizer. (Hiroharu Konno via Christian Moen) -* LUCENE-6962: Add per-dimension min/max to dimensional values +* LUCENE-6962: Add per-dimension min/max to points (Mike McCandless) +* LUCENE-6975: Add ExactPointQuery, to match a single N-dimensional + point (Robert Muir, Mike McCandless) + API Changes * LUCENE-6067: Accountable.getChildResources has a default @@ -82,18 +85,18 @@ API Changes McCandless) * LUCENE-6917: Deprecate and rename NumericXXX classes to - LegacyNumericXXX in favor of dimensional values (Mike McCandless) + LegacyNumericXXX in favor of points (Mike McCandless) * LUCENE-6947: SortField.missingValue is now protected. You can read its value using the new SortField.getMissingValue getter. (Adrien Grand) Optimizations -* LUCENE-6891: Use prefix coding when writing dimensional values in +* LUCENE-6891: Use prefix coding when writing points in each leaf block in the default codec, to reduce the index size (Mike McCandless) -* LUCENE-6901: Optimize dimensional values indexing: use faster +* LUCENE-6901: Optimize points indexing: use faster IntroSorter instead of InPlaceMergeSorter, and specialize 1D merging to merge sort the already sorted segments instead of re-indexing (Mike McCandless) diff --git a/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene50/Lucene50Codec.java b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene50/Lucene50Codec.java index faf46d03105..95796745f3d 100644 --- a/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene50/Lucene50Codec.java +++ b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene50/Lucene50Codec.java @@ -21,7 +21,7 @@ import java.util.Objects; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.CompoundFormat; -import org.apache.lucene.codecs.DimensionalFormat; +import org.apache.lucene.codecs.PointFormat; import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.FieldInfosFormat; import org.apache.lucene.codecs.FilterCodec; @@ -154,8 +154,8 @@ public class Lucene50Codec extends Codec { } @Override - public final DimensionalFormat dimensionalFormat() { - return DimensionalFormat.EMPTY; + public final PointFormat pointFormat() { + return PointFormat.EMPTY; } private final PostingsFormat defaultFormat = PostingsFormat.forName("Lucene50"); diff --git a/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene53/Lucene53Codec.java b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene53/Lucene53Codec.java index dfd0f223af9..1ec140640f0 100644 --- a/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene53/Lucene53Codec.java +++ b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene53/Lucene53Codec.java @@ -21,7 +21,7 @@ import java.util.Objects; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.CompoundFormat; -import org.apache.lucene.codecs.DimensionalFormat; +import org.apache.lucene.codecs.PointFormat; import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.FieldInfosFormat; import org.apache.lucene.codecs.FilterCodec; @@ -160,8 +160,8 @@ public class Lucene53Codec extends Codec { } @Override - public final DimensionalFormat dimensionalFormat() { - return DimensionalFormat.EMPTY; + public final PointFormat pointFormat() { + return PointFormat.EMPTY; } private final PostingsFormat defaultFormat = PostingsFormat.forName("Lucene50"); diff --git a/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene54/Lucene54Codec.java b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene54/Lucene54Codec.java index bb129ac2c08..4ca25219545 100644 --- a/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene54/Lucene54Codec.java +++ b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene54/Lucene54Codec.java @@ -21,7 +21,7 @@ import java.util.Objects; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.CompoundFormat; -import org.apache.lucene.codecs.DimensionalFormat; +import org.apache.lucene.codecs.PointFormat; import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.FieldInfosFormat; import org.apache.lucene.codecs.FilterCodec; @@ -160,8 +160,8 @@ public class Lucene54Codec extends Codec { } @Override - public final DimensionalFormat dimensionalFormat() { - return DimensionalFormat.EMPTY; + public final PointFormat pointFormat() { + return PointFormat.EMPTY; } private final PostingsFormat defaultFormat = PostingsFormat.forName("Lucene50"); diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextBKDReader.java b/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextBKDReader.java index 6e073d60a59..f07c4a3afaf 100644 --- a/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextBKDReader.java +++ b/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextBKDReader.java @@ -20,16 +20,16 @@ package org.apache.lucene.codecs.simpletext; import java.io.IOException; import java.nio.charset.StandardCharsets; -import org.apache.lucene.index.DimensionalValues.IntersectVisitor; +import org.apache.lucene.index.PointValues.IntersectVisitor; import org.apache.lucene.store.IndexInput; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.StringHelper; import org.apache.lucene.util.bkd.BKDReader; -import static org.apache.lucene.codecs.simpletext.SimpleTextDimensionalWriter.BLOCK_COUNT; -import static org.apache.lucene.codecs.simpletext.SimpleTextDimensionalWriter.BLOCK_DOC_ID; -import static org.apache.lucene.codecs.simpletext.SimpleTextDimensionalWriter.BLOCK_VALUE; +import static org.apache.lucene.codecs.simpletext.SimpleTextPointWriter.BLOCK_COUNT; +import static org.apache.lucene.codecs.simpletext.SimpleTextPointWriter.BLOCK_DOC_ID; +import static org.apache.lucene.codecs.simpletext.SimpleTextPointWriter.BLOCK_VALUE; class SimpleTextBKDReader extends BKDReader { diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextCodec.java b/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextCodec.java index f8285c16ec1..89cd859b6d9 100644 --- a/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextCodec.java +++ b/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextCodec.java @@ -19,7 +19,7 @@ package org.apache.lucene.codecs.simpletext; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.CompoundFormat; -import org.apache.lucene.codecs.DimensionalFormat; +import org.apache.lucene.codecs.PointFormat; import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.FieldInfosFormat; import org.apache.lucene.codecs.LiveDocsFormat; @@ -45,7 +45,7 @@ public final class SimpleTextCodec extends Codec { private final LiveDocsFormat liveDocs = new SimpleTextLiveDocsFormat(); private final DocValuesFormat dvFormat = new SimpleTextDocValuesFormat(); private final CompoundFormat compoundFormat = new SimpleTextCompoundFormat(); - private final DimensionalFormat dimensionalFormat = new SimpleTextDimensionalFormat(); + private final PointFormat pointFormat = new SimpleTextPointFormat(); public SimpleTextCodec() { super("SimpleText"); @@ -97,7 +97,7 @@ public final class SimpleTextCodec extends Codec { } @Override - public DimensionalFormat dimensionalFormat() { - return dimensionalFormat; + public PointFormat pointFormat() { + return pointFormat; } } diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextFieldInfosFormat.java b/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextFieldInfosFormat.java index dc68f7244c3..109966a9673 100644 --- a/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextFieldInfosFormat.java +++ b/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextFieldInfosFormat.java @@ -232,11 +232,11 @@ public class SimpleTextFieldInfosFormat extends FieldInfosFormat { } SimpleTextUtil.write(out, DIM_COUNT); - SimpleTextUtil.write(out, Integer.toString(fi.getDimensionCount()), scratch); + SimpleTextUtil.write(out, Integer.toString(fi.getPointDimensionCount()), scratch); SimpleTextUtil.writeNewline(out); SimpleTextUtil.write(out, DIM_NUM_BYTES); - SimpleTextUtil.write(out, Integer.toString(fi.getDimensionNumBytes()), scratch); + SimpleTextUtil.write(out, Integer.toString(fi.getPointNumBytes()), scratch); SimpleTextUtil.writeNewline(out); } SimpleTextUtil.writeChecksum(out, scratch); diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextDimensionalFormat.java b/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextPointFormat.java similarity index 65% rename from lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextDimensionalFormat.java rename to lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextPointFormat.java index 56e7579808f..089ba4f6f5a 100644 --- a/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextDimensionalFormat.java +++ b/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextPointFormat.java @@ -19,9 +19,9 @@ package org.apache.lucene.codecs.simpletext; import java.io.IOException; -import org.apache.lucene.codecs.DimensionalFormat; -import org.apache.lucene.codecs.DimensionalReader; -import org.apache.lucene.codecs.DimensionalWriter; +import org.apache.lucene.codecs.PointFormat; +import org.apache.lucene.codecs.PointReader; +import org.apache.lucene.codecs.PointWriter; import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.SegmentWriteState; @@ -33,21 +33,21 @@ import org.apache.lucene.index.SegmentWriteState; * any text editor, and even edit it to alter your index. * * @lucene.experimental */ -public final class SimpleTextDimensionalFormat extends DimensionalFormat { +public final class SimpleTextPointFormat extends PointFormat { @Override - public DimensionalWriter fieldsWriter(SegmentWriteState state) throws IOException { - return new SimpleTextDimensionalWriter(state); + public PointWriter fieldsWriter(SegmentWriteState state) throws IOException { + return new SimpleTextPointWriter(state); } @Override - public DimensionalReader fieldsReader(SegmentReadState state) throws IOException { - return new SimpleTextDimensionalReader(state); + public PointReader fieldsReader(SegmentReadState state) throws IOException { + return new SimpleTextPointReader(state); } - /** Extension of dimensional data file */ - static final String DIMENSIONAL_EXTENSION = "dim"; + /** Extension of points data file */ + static final String POINT_EXTENSION = "dim"; - /** Extension of dimensional index file */ - static final String DIMENSIONAL_INDEX_EXTENSION = "dii"; + /** Extension of points index file */ + static final String POINT_INDEX_EXTENSION = "dii"; } diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextDimensionalReader.java b/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextPointReader.java similarity index 72% rename from lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextDimensionalReader.java rename to lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextPointReader.java index 800e174a497..222805e8aba 100644 --- a/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextDimensionalReader.java +++ b/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextPointReader.java @@ -22,7 +22,7 @@ import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.Map; -import org.apache.lucene.codecs.DimensionalReader; +import org.apache.lucene.codecs.PointReader; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.IndexFileNames; @@ -36,32 +36,32 @@ import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.StringHelper; import org.apache.lucene.util.bkd.BKDReader; -import static org.apache.lucene.codecs.simpletext.SimpleTextDimensionalWriter.BLOCK_FP; -import static org.apache.lucene.codecs.simpletext.SimpleTextDimensionalWriter.BYTES_PER_DIM; -import static org.apache.lucene.codecs.simpletext.SimpleTextDimensionalWriter.FIELD_COUNT; -import static org.apache.lucene.codecs.simpletext.SimpleTextDimensionalWriter.FIELD_FP; -import static org.apache.lucene.codecs.simpletext.SimpleTextDimensionalWriter.FIELD_FP_NAME; -import static org.apache.lucene.codecs.simpletext.SimpleTextDimensionalWriter.INDEX_COUNT; -import static org.apache.lucene.codecs.simpletext.SimpleTextDimensionalWriter.MAX_LEAF_POINTS; -import static org.apache.lucene.codecs.simpletext.SimpleTextDimensionalWriter.MAX_VALUE; -import static org.apache.lucene.codecs.simpletext.SimpleTextDimensionalWriter.MIN_VALUE; -import static org.apache.lucene.codecs.simpletext.SimpleTextDimensionalWriter.NUM_DIMS; -import static org.apache.lucene.codecs.simpletext.SimpleTextDimensionalWriter.SPLIT_COUNT; -import static org.apache.lucene.codecs.simpletext.SimpleTextDimensionalWriter.SPLIT_DIM; -import static org.apache.lucene.codecs.simpletext.SimpleTextDimensionalWriter.SPLIT_VALUE; +import static org.apache.lucene.codecs.simpletext.SimpleTextPointWriter.BLOCK_FP; +import static org.apache.lucene.codecs.simpletext.SimpleTextPointWriter.BYTES_PER_DIM; +import static org.apache.lucene.codecs.simpletext.SimpleTextPointWriter.FIELD_COUNT; +import static org.apache.lucene.codecs.simpletext.SimpleTextPointWriter.FIELD_FP; +import static org.apache.lucene.codecs.simpletext.SimpleTextPointWriter.FIELD_FP_NAME; +import static org.apache.lucene.codecs.simpletext.SimpleTextPointWriter.INDEX_COUNT; +import static org.apache.lucene.codecs.simpletext.SimpleTextPointWriter.MAX_LEAF_POINTS; +import static org.apache.lucene.codecs.simpletext.SimpleTextPointWriter.MAX_VALUE; +import static org.apache.lucene.codecs.simpletext.SimpleTextPointWriter.MIN_VALUE; +import static org.apache.lucene.codecs.simpletext.SimpleTextPointWriter.NUM_DIMS; +import static org.apache.lucene.codecs.simpletext.SimpleTextPointWriter.SPLIT_COUNT; +import static org.apache.lucene.codecs.simpletext.SimpleTextPointWriter.SPLIT_DIM; +import static org.apache.lucene.codecs.simpletext.SimpleTextPointWriter.SPLIT_VALUE; -class SimpleTextDimensionalReader extends DimensionalReader { +class SimpleTextPointReader extends PointReader { private final IndexInput dataIn; final SegmentReadState readState; final Map readers = new HashMap<>(); final BytesRefBuilder scratch = new BytesRefBuilder(); - public SimpleTextDimensionalReader(SegmentReadState readState) throws IOException { + public SimpleTextPointReader(SegmentReadState readState) throws IOException { // Initialize readers now: - String fileName = IndexFileNames.segmentFileName(readState.segmentInfo.name, readState.segmentSuffix, SimpleTextDimensionalFormat.DIMENSIONAL_EXTENSION); + String fileName = IndexFileNames.segmentFileName(readState.segmentInfo.name, readState.segmentSuffix, SimpleTextPointFormat.POINT_EXTENSION); dataIn = readState.directory.openInput(fileName, IOContext.DEFAULT); - String indexFileName = IndexFileNames.segmentFileName(readState.segmentInfo.name, readState.segmentSuffix, SimpleTextDimensionalFormat.DIMENSIONAL_INDEX_EXTENSION); + String indexFileName = IndexFileNames.segmentFileName(readState.segmentInfo.name, readState.segmentSuffix, SimpleTextPointFormat.POINT_INDEX_EXTENSION); try (ChecksumIndexInput in = readState.directory.openChecksumInput(indexFileName, IOContext.DEFAULT)) { readLine(in); int count = parseInt(FIELD_COUNT); @@ -78,7 +78,7 @@ class SimpleTextDimensionalReader extends DimensionalReader { } private BKDReader initReader(long fp) throws IOException { - // NOTE: matches what writeIndex does in SimpleTextDimensionalWriter + // NOTE: matches what writeIndex does in SimpleTextPointWriter dataIn.seek(fp); readLine(dataIn); int numDims = parseInt(NUM_DIMS); @@ -151,8 +151,8 @@ class SimpleTextDimensionalReader extends DimensionalReader { if (fieldInfo == null) { throw new IllegalArgumentException("field=\"" + fieldName + "\" is unrecognized"); } - if (fieldInfo.getDimensionCount() == 0) { - throw new IllegalArgumentException("field=\"" + fieldName + "\" did not index dimensional values"); + if (fieldInfo.getPointDimensionCount() == 0) { + throw new IllegalArgumentException("field=\"" + fieldName + "\" did not index points"); } return readers.get(fieldName); } @@ -162,8 +162,8 @@ class SimpleTextDimensionalReader extends DimensionalReader { public void intersect(String fieldName, IntersectVisitor visitor) throws IOException { BKDReader bkdReader = getBKDReader(fieldName); if (bkdReader == null) { - // Schema ghost corner case! This field did index dimensional values in the past, but - // now all docs having this dimensional field were deleted in this segment: + // Schema ghost corner case! This field did index points in the past, but + // now all docs having this field were deleted in this segment: return; } bkdReader.intersect(visitor); @@ -203,15 +203,15 @@ class SimpleTextDimensionalReader extends DimensionalReader { @Override public String toString() { - return "SimpleTextDimensionalReader(segment=" + readState.segmentInfo.name + " maxDoc=" + readState.segmentInfo.maxDoc() + ")"; + return "SimpleTextPointReader(segment=" + readState.segmentInfo.name + " maxDoc=" + readState.segmentInfo.maxDoc() + ")"; } @Override public byte[] getMinPackedValue(String fieldName) { BKDReader bkdReader = getBKDReader(fieldName); if (bkdReader == null) { - // Schema ghost corner case! This field did index dimensional values in the past, but - // now all docs having this dimensional field were deleted in this segment: + // Schema ghost corner case! This field did index points in the past, but + // now all docs having this field were deleted in this segment: return null; } return bkdReader.getMinPackedValue(); @@ -221,8 +221,8 @@ class SimpleTextDimensionalReader extends DimensionalReader { public byte[] getMaxPackedValue(String fieldName) { BKDReader bkdReader = getBKDReader(fieldName); if (bkdReader == null) { - // Schema ghost corner case! This field did index dimensional values in the past, but - // now all docs having this dimensional field were deleted in this segment: + // Schema ghost corner case! This field did index points in the past, but + // now all docs having this field were deleted in this segment: return null; } return bkdReader.getMaxPackedValue(); @@ -232,8 +232,8 @@ class SimpleTextDimensionalReader extends DimensionalReader { public int getNumDimensions(String fieldName) { BKDReader bkdReader = getBKDReader(fieldName); if (bkdReader == null) { - // Schema ghost corner case! This field did index dimensional values in the past, but - // now all docs having this dimensional field were deleted in this segment: + // Schema ghost corner case! This field did index points in the past, but + // now all docs having this field were deleted in this segment: return 0; } return bkdReader.getNumDimensions(); @@ -243,8 +243,8 @@ class SimpleTextDimensionalReader extends DimensionalReader { public int getBytesPerDimension(String fieldName) { BKDReader bkdReader = getBKDReader(fieldName); if (bkdReader == null) { - // Schema ghost corner case! This field did index dimensional values in the past, but - // now all docs having this dimensional field were deleted in this segment: + // Schema ghost corner case! This field did index points in the past, but + // now all docs having this field were deleted in this segment: return 0; } return bkdReader.getBytesPerDimension(); diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextDimensionalWriter.java b/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextPointWriter.java similarity index 87% rename from lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextDimensionalWriter.java rename to lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextPointWriter.java index 268fddcf182..fc533da39a9 100644 --- a/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextDimensionalWriter.java +++ b/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextPointWriter.java @@ -21,10 +21,10 @@ import java.io.IOException; import java.util.HashMap; import java.util.Map; -import org.apache.lucene.codecs.DimensionalReader; -import org.apache.lucene.codecs.DimensionalWriter; -import org.apache.lucene.index.DimensionalValues.IntersectVisitor; -import org.apache.lucene.index.DimensionalValues.Relation; +import org.apache.lucene.codecs.PointReader; +import org.apache.lucene.codecs.PointWriter; +import org.apache.lucene.index.PointValues.IntersectVisitor; +import org.apache.lucene.index.PointValues.Relation; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.SegmentWriteState; @@ -33,7 +33,7 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.bkd.BKDWriter; -class SimpleTextDimensionalWriter extends DimensionalWriter { +class SimpleTextPointWriter extends PointWriter { final static BytesRef NUM_DIMS = new BytesRef("num dims "); final static BytesRef BYTES_PER_DIM = new BytesRef("bytes per dim "); @@ -57,20 +57,20 @@ class SimpleTextDimensionalWriter extends DimensionalWriter { final SegmentWriteState writeState; final Map indexFPs = new HashMap<>(); - public SimpleTextDimensionalWriter(SegmentWriteState writeState) throws IOException { - String fileName = IndexFileNames.segmentFileName(writeState.segmentInfo.name, writeState.segmentSuffix, SimpleTextDimensionalFormat.DIMENSIONAL_EXTENSION); + public SimpleTextPointWriter(SegmentWriteState writeState) throws IOException { + String fileName = IndexFileNames.segmentFileName(writeState.segmentInfo.name, writeState.segmentSuffix, SimpleTextPointFormat.POINT_EXTENSION); dataOut = writeState.directory.createOutput(fileName, writeState.context); this.writeState = writeState; } @Override - public void writeField(FieldInfo fieldInfo, DimensionalReader values) throws IOException { + public void writeField(FieldInfo fieldInfo, PointReader values) throws IOException { // We use the normal BKDWriter, but subclass to customize how it writes the index and blocks to disk: BKDWriter writer = new BKDWriter(writeState.directory, writeState.segmentInfo.name, - fieldInfo.getDimensionCount(), - fieldInfo.getDimensionNumBytes(), + fieldInfo.getPointDimensionCount(), + fieldInfo.getPointNumBytes(), BKDWriter.DEFAULT_MAX_POINTS_IN_LEAF_NODE, BKDWriter.DEFAULT_MAX_MB_SORT_IN_HEAP) { @@ -108,8 +108,8 @@ class SimpleTextDimensionalWriter extends DimensionalWriter { newline(out); } - assert (splitPackedValues.length % (1 + fieldInfo.getDimensionNumBytes())) == 0; - int count = splitPackedValues.length / (1 + fieldInfo.getDimensionNumBytes()); + assert (splitPackedValues.length % (1 + fieldInfo.getPointNumBytes())) == 0; + int count = splitPackedValues.length / (1 + fieldInfo.getPointNumBytes()); assert count == leafBlockFPs.length; write(out, SPLIT_COUNT); @@ -118,10 +118,10 @@ class SimpleTextDimensionalWriter extends DimensionalWriter { for(int i=0;i 0) { indexFPs.put(fieldInfo.name, writer.finish(dataOut)); } @@ -204,7 +204,7 @@ class SimpleTextDimensionalWriter extends DimensionalWriter { dataOut = null; // Write index file - String fileName = IndexFileNames.segmentFileName(writeState.segmentInfo.name, writeState.segmentSuffix, SimpleTextDimensionalFormat.DIMENSIONAL_INDEX_EXTENSION); + String fileName = IndexFileNames.segmentFileName(writeState.segmentInfo.name, writeState.segmentSuffix, SimpleTextPointFormat.POINT_INDEX_EXTENSION); try (IndexOutput indexOut = writeState.directory.createOutput(fileName, writeState.context)) { int count = indexFPs.size(); write(indexOut, FIELD_COUNT); diff --git a/lucene/core/src/java/org/apache/lucene/analysis/LegacyNumericTokenStream.java b/lucene/core/src/java/org/apache/lucene/analysis/LegacyNumericTokenStream.java index 1a7077d11ed..e7abf1f3f19 100644 --- a/lucene/core/src/java/org/apache/lucene/analysis/LegacyNumericTokenStream.java +++ b/lucene/core/src/java/org/apache/lucene/analysis/LegacyNumericTokenStream.java @@ -23,7 +23,6 @@ import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute; import org.apache.lucene.analysis.tokenattributes.TypeAttribute; -import org.apache.lucene.index.DimensionalValues; import org.apache.lucene.util.Attribute; import org.apache.lucene.util.AttributeFactory; import org.apache.lucene.util.AttributeImpl; @@ -86,7 +85,7 @@ import org.apache.lucene.util.LegacyNumericUtils; * href="../search/LegacyNumericRangeQuery.html#precisionStepDesc">precisionStep * parameter as well as how numeric fields work under the hood.

* - * @deprecated Please switch to {@link DimensionalValues} instead + * @deprecated Please switch to {@link org.apache.lucene.index.PointValues} instead * * @since 2.9 */ diff --git a/lucene/core/src/java/org/apache/lucene/codecs/Codec.java b/lucene/core/src/java/org/apache/lucene/codecs/Codec.java index 2c37fbfe957..0613441deb3 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/Codec.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/Codec.java @@ -108,8 +108,8 @@ public abstract class Codec implements NamedSPILoader.NamedSPI { /** Encodes/decodes compound files */ public abstract CompoundFormat compoundFormat(); - /** Encodes/decodes dimensional index */ - public abstract DimensionalFormat dimensionalFormat(); + /** Encodes/decodes points index */ + public abstract PointFormat pointFormat(); /** looks up a codec by name */ public static Codec forName(String name) { diff --git a/lucene/core/src/java/org/apache/lucene/codecs/FilterCodec.java b/lucene/core/src/java/org/apache/lucene/codecs/FilterCodec.java index 3465450eb29..f188d50385a 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/FilterCodec.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/FilterCodec.java @@ -105,7 +105,7 @@ public abstract class FilterCodec extends Codec { } @Override - public DimensionalFormat dimensionalFormat() { - return delegate.dimensionalFormat(); + public PointFormat pointFormat() { + return delegate.pointFormat(); } } diff --git a/lucene/core/src/java/org/apache/lucene/codecs/DimensionalFormat.java b/lucene/core/src/java/org/apache/lucene/codecs/PointFormat.java similarity index 74% rename from lucene/core/src/java/org/apache/lucene/codecs/DimensionalFormat.java rename to lucene/core/src/java/org/apache/lucene/codecs/PointFormat.java index f69b088d5d1..953bc4d2f02 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/DimensionalFormat.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/PointFormat.java @@ -23,19 +23,19 @@ import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.SegmentWriteState; /** - * Encodes/decodes indexed dimensional data. + * Encodes/decodes indexed points. * * @lucene.experimental */ -public abstract class DimensionalFormat { +public abstract class PointFormat { /** - * Creates a new dimensional format. + * Creates a new point format. */ - protected DimensionalFormat() { + protected PointFormat() { } /** Writes a new segment */ - public abstract DimensionalWriter fieldsWriter(SegmentWriteState state) throws IOException; + public abstract PointWriter fieldsWriter(SegmentWriteState state) throws IOException; /** Reads a segment. NOTE: by the time this call * returns, it must hold open any files it will need to @@ -46,18 +46,18 @@ public abstract class DimensionalFormat { * IOExceptions are expected and will automatically cause a retry of the * segment opening logic with the newly revised segments. * */ - public abstract DimensionalReader fieldsReader(SegmentReadState state) throws IOException; + public abstract PointReader fieldsReader(SegmentReadState state) throws IOException; - /** A {@code DimensionalFormat} that has nothing indexed */ - public static final DimensionalFormat EMPTY = new DimensionalFormat() { + /** A {@code PointFormat} that has nothing indexed */ + public static final PointFormat EMPTY = new PointFormat() { @Override - public DimensionalWriter fieldsWriter(SegmentWriteState state) { + public PointWriter fieldsWriter(SegmentWriteState state) { throw new UnsupportedOperationException(); } @Override - public DimensionalReader fieldsReader(SegmentReadState state) { - return new DimensionalReader() { + public PointReader fieldsReader(SegmentReadState state) { + return new PointReader() { @Override public void close() { } @@ -73,27 +73,27 @@ public abstract class DimensionalFormat { @Override public void intersect(String fieldName, IntersectVisitor visitor) { - throw new IllegalArgumentException("field=\"" + fieldName + "\" was not indexed with dimensional values"); + throw new IllegalArgumentException("field=\"" + fieldName + "\" was not indexed with points"); } @Override public byte[] getMinPackedValue(String fieldName) { - throw new IllegalArgumentException("field=\"" + fieldName + "\" was not indexed with dimensional values"); + throw new IllegalArgumentException("field=\"" + fieldName + "\" was not indexed with points"); } @Override public byte[] getMaxPackedValue(String fieldName) { - throw new IllegalArgumentException("field=\"" + fieldName + "\" was not indexed with dimensional values"); + throw new IllegalArgumentException("field=\"" + fieldName + "\" was not indexed with points"); } @Override public int getNumDimensions(String fieldName) { - throw new IllegalArgumentException("field=\"" + fieldName + "\" was not indexed with dimensional values"); + throw new IllegalArgumentException("field=\"" + fieldName + "\" was not indexed with points"); } @Override public int getBytesPerDimension(String fieldName) { - throw new IllegalArgumentException("field=\"" + fieldName + "\" was not indexed with dimensional values"); + throw new IllegalArgumentException("field=\"" + fieldName + "\" was not indexed with points"); } }; } diff --git a/lucene/core/src/java/org/apache/lucene/codecs/DimensionalReader.java b/lucene/core/src/java/org/apache/lucene/codecs/PointReader.java similarity index 83% rename from lucene/core/src/java/org/apache/lucene/codecs/DimensionalReader.java rename to lucene/core/src/java/org/apache/lucene/codecs/PointReader.java index 7d6eb3c1d4c..aa2553fb6ac 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/DimensionalReader.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/PointReader.java @@ -20,17 +20,17 @@ package org.apache.lucene.codecs; import java.io.Closeable; import java.io.IOException; -import org.apache.lucene.index.DimensionalValues; +import org.apache.lucene.index.PointValues; import org.apache.lucene.util.Accountable; -/** Abstract API to visit dimensional values. +/** Abstract API to visit point values. * * @lucene.experimental */ -public abstract class DimensionalReader extends DimensionalValues implements Closeable, Accountable { +public abstract class PointReader extends PointValues implements Closeable, Accountable { /** Sole constructor. (For invocation by subclass constructors, typically implicit.) */ - protected DimensionalReader() {} + protected PointReader() {} /** * Checks consistency of this reader. @@ -45,7 +45,7 @@ public abstract class DimensionalReader extends DimensionalValues implements Clo * Returns an instance optimized for merging. *

* The default implementation returns {@code this} */ - public DimensionalReader getMergeInstance() throws IOException { + public PointReader getMergeInstance() throws IOException { return this; } } diff --git a/lucene/core/src/java/org/apache/lucene/codecs/DimensionalWriter.java b/lucene/core/src/java/org/apache/lucene/codecs/PointWriter.java similarity index 85% rename from lucene/core/src/java/org/apache/lucene/codecs/DimensionalWriter.java rename to lucene/core/src/java/org/apache/lucene/codecs/PointWriter.java index 32a80ca36b7..8e946018799 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/DimensionalWriter.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/PointWriter.java @@ -23,40 +23,40 @@ import java.io.IOException; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.MergeState; -/** Abstract API to write dimensional values +/** Abstract API to write points * * @lucene.experimental */ -public abstract class DimensionalWriter implements Closeable { +public abstract class PointWriter implements Closeable { /** Sole constructor. (For invocation by subclass * constructors, typically implicit.) */ - protected DimensionalWriter() { + protected PointWriter() { } /** Write all values contained in the provided reader */ - public abstract void writeField(FieldInfo fieldInfo, DimensionalReader values) throws IOException; + public abstract void writeField(FieldInfo fieldInfo, PointReader values) throws IOException; /** Default naive merge implemenation for one field: it just re-indexes all the values * from the incoming segment. The default codec overrides this for 1D fields and uses * a faster but more complex implementation. */ protected void mergeOneField(MergeState mergeState, FieldInfo fieldInfo) throws IOException { writeField(fieldInfo, - new DimensionalReader() { + new PointReader() { @Override public void intersect(String fieldName, IntersectVisitor mergedVisitor) throws IOException { if (fieldName.equals(fieldInfo.name) == false) { throw new IllegalArgumentException("field name must match the field being merged"); } - for (int i=0;i *

  • Attributes: a key-value map of codec-private attributes.
  • - *
  • DimensionCount, DimensionNumBytes: these are non-zero only if the field is - * indexed dimensionally, e.g. using {@link DimensionalLongField}
  • + *
  • PointDimensionCount, PointNumBytes: these are non-zero only if the field is + * indexed as points, e.g. using {@link org.apache.lucene.document.LongPoint}
  • * * * @lucene.experimental @@ -149,18 +148,18 @@ public final class Lucene60FieldInfosFormat extends FieldInfosFormat { attributes = lastAttributes; } lastAttributes = attributes; - int dimensionCount = input.readVInt(); - int dimensionNumBytes; - if (dimensionCount != 0) { - dimensionNumBytes = input.readVInt(); + int pointDimensionCount = input.readVInt(); + int pointNumBytes; + if (pointDimensionCount != 0) { + pointNumBytes = input.readVInt(); } else { - dimensionNumBytes = 0; + pointNumBytes = 0; } try { infos[i] = new FieldInfo(name, fieldNumber, storeTermVector, omitNorms, storePayloads, indexOptions, docValuesType, dvGen, attributes, - dimensionCount, dimensionNumBytes); + pointDimensionCount, pointNumBytes); infos[i].checkConsistency(); } catch (IllegalStateException e) { throw new CorruptIndexException("invalid fieldinfo for field: " + name + ", fieldNumber=" + fieldNumber, input, e); @@ -286,10 +285,10 @@ public final class Lucene60FieldInfosFormat extends FieldInfosFormat { output.writeByte(docValuesByte(fi.getDocValuesType())); output.writeLong(fi.getDocValuesGen()); output.writeMapOfStrings(fi.attributes()); - int dimensionCount = fi.getDimensionCount(); - output.writeVInt(dimensionCount); - if (dimensionCount != 0) { - output.writeVInt(fi.getDimensionNumBytes()); + int pointDimensionCount = fi.getPointDimensionCount(); + output.writeVInt(pointDimensionCount); + if (pointDimensionCount != 0) { + output.writeVInt(fi.getPointNumBytes()); } } CodecUtil.writeFooter(output); diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene60/Lucene60DimensionalFormat.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene60/Lucene60PointFormat.java similarity index 82% rename from lucene/core/src/java/org/apache/lucene/codecs/lucene60/Lucene60DimensionalFormat.java rename to lucene/core/src/java/org/apache/lucene/codecs/lucene60/Lucene60PointFormat.java index 00e967209ca..61ce8fbdc4d 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/lucene60/Lucene60DimensionalFormat.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene60/Lucene60PointFormat.java @@ -20,14 +20,14 @@ package org.apache.lucene.codecs.lucene60; import java.io.IOException; import org.apache.lucene.codecs.CodecUtil; -import org.apache.lucene.codecs.DimensionalFormat; -import org.apache.lucene.codecs.DimensionalReader; -import org.apache.lucene.codecs.DimensionalWriter; +import org.apache.lucene.codecs.PointFormat; +import org.apache.lucene.codecs.PointReader; +import org.apache.lucene.codecs.PointWriter; import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.SegmentWriteState; /** - * Lucene 6.0 dimensional format, which encodes dimensional values in a block KD-tree structure + * Lucene 6.0 point format, which encodes dimensional values in a block KD-tree structure * for fast shape intersection filtering. See this paper for details. * *

    This data structure is written as a series of blocks on disk, with an in-memory perfectly balanced @@ -71,9 +71,9 @@ import org.apache.lucene.index.SegmentWriteState; * @lucene.experimental */ -public final class Lucene60DimensionalFormat extends DimensionalFormat { +public final class Lucene60PointFormat extends PointFormat { - static final String CODEC_NAME = "Lucene60DimensionalFormat"; + static final String CODEC_NAME = "Lucene60PointFormat"; /** * Filename extension for the leaf blocks @@ -92,16 +92,16 @@ public final class Lucene60DimensionalFormat extends DimensionalFormat { static final int INDEX_VERSION_CURRENT = INDEX_VERSION_START; /** Sole constructor */ - public Lucene60DimensionalFormat() { + public Lucene60PointFormat() { } @Override - public DimensionalWriter fieldsWriter(SegmentWriteState state) throws IOException { - return new Lucene60DimensionalWriter(state); + public PointWriter fieldsWriter(SegmentWriteState state) throws IOException { + return new Lucene60PointWriter(state); } @Override - public DimensionalReader fieldsReader(SegmentReadState state) throws IOException { - return new Lucene60DimensionalReader(state); + public PointReader fieldsReader(SegmentReadState state) throws IOException { + return new Lucene60PointReader(state); } } diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene60/Lucene60DimensionalReader.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene60/Lucene60PointReader.java similarity index 77% rename from lucene/core/src/java/org/apache/lucene/codecs/lucene60/Lucene60DimensionalReader.java rename to lucene/core/src/java/org/apache/lucene/codecs/lucene60/Lucene60PointReader.java index c940e17d455..2e2bddbe80f 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/lucene60/Lucene60DimensionalReader.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene60/Lucene60PointReader.java @@ -28,7 +28,7 @@ import java.util.List; import java.util.Map; import org.apache.lucene.codecs.CodecUtil; -import org.apache.lucene.codecs.DimensionalReader; +import org.apache.lucene.codecs.PointReader; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.SegmentReadState; @@ -39,31 +39,31 @@ import org.apache.lucene.util.Accountables; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.bkd.BKDReader; -/** Reads dimensional values previously written with {@link Lucene60DimensionalWriter} */ -public class Lucene60DimensionalReader extends DimensionalReader implements Closeable { +/** Reads point values previously written with {@link Lucene60PointWriter} */ +public class Lucene60PointReader extends PointReader implements Closeable { final IndexInput dataIn; final SegmentReadState readState; final Map readers = new HashMap<>(); /** Sole constructor */ - public Lucene60DimensionalReader(SegmentReadState readState) throws IOException { + public Lucene60PointReader(SegmentReadState readState) throws IOException { this.readState = readState; String dataFileName = IndexFileNames.segmentFileName(readState.segmentInfo.name, readState.segmentSuffix, - Lucene60DimensionalFormat.DATA_EXTENSION); + Lucene60PointFormat.DATA_EXTENSION); dataIn = readState.directory.openInput(dataFileName, readState.context); String indexFileName = IndexFileNames.segmentFileName(readState.segmentInfo.name, readState.segmentSuffix, - Lucene60DimensionalFormat.INDEX_EXTENSION); + Lucene60PointFormat.INDEX_EXTENSION); boolean success = false; // Read index file try (ChecksumIndexInput indexIn = readState.directory.openChecksumInput(indexFileName, readState.context)) { CodecUtil.checkIndexHeader(indexIn, - Lucene60DimensionalFormat.CODEC_NAME, - Lucene60DimensionalFormat.INDEX_VERSION_START, - Lucene60DimensionalFormat.INDEX_VERSION_START, + Lucene60PointFormat.CODEC_NAME, + Lucene60PointFormat.INDEX_VERSION_START, + Lucene60PointFormat.INDEX_VERSION_START, readState.segmentInfo.getId(), readState.segmentSuffix); int count = indexIn.readVInt(); @@ -89,8 +89,8 @@ public class Lucene60DimensionalReader extends DimensionalReader implements Clos if (fieldInfo == null) { throw new IllegalArgumentException("field=\"" + fieldName + "\" is unrecognized"); } - if (fieldInfo.getDimensionCount() == 0) { - throw new IllegalArgumentException("field=\"" + fieldName + "\" did not index dimensional values"); + if (fieldInfo.getPointDimensionCount() == 0) { + throw new IllegalArgumentException("field=\"" + fieldName + "\" did not index point values"); } return readers.get(fieldInfo.number); @@ -101,8 +101,8 @@ public class Lucene60DimensionalReader extends DimensionalReader implements Clos BKDReader bkdReader = getBKDReader(fieldName); if (bkdReader == null) { - // Schema ghost corner case! This field did index dimensional values in the past, but - // now all docs having this dimensional field were deleted in this segment: + // Schema ghost corner case! This field did index points in the past, but + // now all docs having this point field were deleted in this segment: return; } @@ -144,8 +144,8 @@ public class Lucene60DimensionalReader extends DimensionalReader implements Clos public byte[] getMinPackedValue(String fieldName) { BKDReader bkdReader = getBKDReader(fieldName); if (bkdReader == null) { - // Schema ghost corner case! This field did index dimensional values in the past, but - // now all docs having this dimensional field were deleted in this segment: + // Schema ghost corner case! This field did index points in the past, but + // now all docs having this point field were deleted in this segment: return null; } @@ -156,8 +156,8 @@ public class Lucene60DimensionalReader extends DimensionalReader implements Clos public byte[] getMaxPackedValue(String fieldName) { BKDReader bkdReader = getBKDReader(fieldName); if (bkdReader == null) { - // Schema ghost corner case! This field did index dimensional values in the past, but - // now all docs having this dimensional field were deleted in this segment: + // Schema ghost corner case! This field did index points in the past, but + // now all docs having this point field were deleted in this segment: return null; } @@ -168,8 +168,8 @@ public class Lucene60DimensionalReader extends DimensionalReader implements Clos public int getNumDimensions(String fieldName) { BKDReader bkdReader = getBKDReader(fieldName); if (bkdReader == null) { - // Schema ghost corner case! This field did index dimensional values in the past, but - // now all docs having this dimensional field were deleted in this segment: + // Schema ghost corner case! This field did index points in the past, but + // now all docs having this point field were deleted in this segment: return 0; } return bkdReader.getNumDimensions(); @@ -179,8 +179,8 @@ public class Lucene60DimensionalReader extends DimensionalReader implements Clos public int getBytesPerDimension(String fieldName) { BKDReader bkdReader = getBKDReader(fieldName); if (bkdReader == null) { - // Schema ghost corner case! This field did index dimensional values in the past, but - // now all docs having this dimensional field were deleted in this segment: + // Schema ghost corner case! This field did index points in the past, but + // now all docs having this point field were deleted in this segment: return 0; } return bkdReader.getBytesPerDimension(); diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene60/Lucene60DimensionalWriter.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene60/Lucene60PointWriter.java similarity index 79% rename from lucene/core/src/java/org/apache/lucene/codecs/lucene60/Lucene60DimensionalWriter.java rename to lucene/core/src/java/org/apache/lucene/codecs/lucene60/Lucene60PointWriter.java index 58e9aa86acb..318d665ee2e 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/lucene60/Lucene60DimensionalWriter.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene60/Lucene60PointWriter.java @@ -25,10 +25,10 @@ import java.util.List; import java.util.Map; import org.apache.lucene.codecs.CodecUtil; -import org.apache.lucene.codecs.DimensionalReader; -import org.apache.lucene.codecs.DimensionalWriter; -import org.apache.lucene.index.DimensionalValues.IntersectVisitor; -import org.apache.lucene.index.DimensionalValues.Relation; +import org.apache.lucene.codecs.PointReader; +import org.apache.lucene.codecs.PointWriter; +import org.apache.lucene.index.PointValues.IntersectVisitor; +import org.apache.lucene.index.PointValues.Relation; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.IndexFileNames; @@ -40,7 +40,7 @@ import org.apache.lucene.util.bkd.BKDReader; import org.apache.lucene.util.bkd.BKDWriter; /** Writes dimensional values */ -public class Lucene60DimensionalWriter extends DimensionalWriter implements Closeable { +public class Lucene60PointWriter extends PointWriter implements Closeable { final IndexOutput dataOut; final Map indexFPs = new HashMap<>(); @@ -50,20 +50,20 @@ public class Lucene60DimensionalWriter extends DimensionalWriter implements Clos private boolean closed; /** Full constructor */ - public Lucene60DimensionalWriter(SegmentWriteState writeState, int maxPointsInLeafNode, double maxMBSortInHeap) throws IOException { - assert writeState.fieldInfos.hasDimensionalValues(); + public Lucene60PointWriter(SegmentWriteState writeState, int maxPointsInLeafNode, double maxMBSortInHeap) throws IOException { + assert writeState.fieldInfos.hasPointValues(); this.writeState = writeState; this.maxPointsInLeafNode = maxPointsInLeafNode; this.maxMBSortInHeap = maxMBSortInHeap; String dataFileName = IndexFileNames.segmentFileName(writeState.segmentInfo.name, writeState.segmentSuffix, - Lucene60DimensionalFormat.DATA_EXTENSION); + Lucene60PointFormat.DATA_EXTENSION); dataOut = writeState.directory.createOutput(dataFileName, writeState.context); boolean success = false; try { CodecUtil.writeIndexHeader(dataOut, - Lucene60DimensionalFormat.CODEC_NAME, - Lucene60DimensionalFormat.DATA_VERSION_CURRENT, + Lucene60PointFormat.CODEC_NAME, + Lucene60PointFormat.DATA_VERSION_CURRENT, writeState.segmentInfo.getId(), writeState.segmentSuffix); success = true; @@ -75,17 +75,17 @@ public class Lucene60DimensionalWriter extends DimensionalWriter implements Clos } /** Uses the defaults values for {@code maxPointsInLeafNode} (1024) and {@code maxMBSortInHeap} (16.0) */ - public Lucene60DimensionalWriter(SegmentWriteState writeState) throws IOException { + public Lucene60PointWriter(SegmentWriteState writeState) throws IOException { this(writeState, BKDWriter.DEFAULT_MAX_POINTS_IN_LEAF_NODE, BKDWriter.DEFAULT_MAX_MB_SORT_IN_HEAP); } @Override - public void writeField(FieldInfo fieldInfo, DimensionalReader values) throws IOException { + public void writeField(FieldInfo fieldInfo, PointReader values) throws IOException { try (BKDWriter writer = new BKDWriter(writeState.directory, writeState.segmentInfo.name, - fieldInfo.getDimensionCount(), - fieldInfo.getDimensionNumBytes(), + fieldInfo.getPointDimensionCount(), + fieldInfo.getPointNumBytes(), maxPointsInLeafNode, maxMBSortInHeap)) { @@ -114,8 +114,8 @@ public class Lucene60DimensionalWriter extends DimensionalWriter implements Clos @Override public void merge(MergeState mergeState) throws IOException { - for(DimensionalReader reader : mergeState.dimensionalReaders) { - if (reader instanceof Lucene60DimensionalReader == false) { + for(PointReader reader : mergeState.pointReaders) { + if (reader instanceof Lucene60PointReader == false) { // We can only bulk merge when all to-be-merged segments use our format: super.merge(mergeState); return; @@ -123,25 +123,25 @@ public class Lucene60DimensionalWriter extends DimensionalWriter implements Clos } for (FieldInfo fieldInfo : mergeState.mergeFieldInfos) { - if (fieldInfo.getDimensionCount() != 0) { - if (fieldInfo.getDimensionCount() == 1) { + if (fieldInfo.getPointDimensionCount() != 0) { + if (fieldInfo.getPointDimensionCount() == 1) { //System.out.println("MERGE: field=" + fieldInfo.name); // Optimize the 1D case to use BKDWriter.merge, which does a single merge sort of the // already sorted incoming segments, instead of trying to sort all points again as if // we were simply reindexing them: try (BKDWriter writer = new BKDWriter(writeState.directory, writeState.segmentInfo.name, - fieldInfo.getDimensionCount(), - fieldInfo.getDimensionNumBytes(), + fieldInfo.getPointDimensionCount(), + fieldInfo.getPointNumBytes(), maxPointsInLeafNode, maxMBSortInHeap)) { List bkdReaders = new ArrayList<>(); List docMaps = new ArrayList<>(); List docIDBases = new ArrayList<>(); - for(int i=0;i *

  • - * {@link org.apache.lucene.codecs.lucene60.Lucene60DimensionalFormat Dimensional values}. + * {@link org.apache.lucene.codecs.lucene60.Lucene60PointFormat Point values}. * Optional pair of files, recording dimesionally indexed fields, to enable fast * numeric range filtering and large numeric values like BigInteger and BigDecimal (1D) * and geo shape intersection (2D, 3D). @@ -322,9 +322,9 @@ * Info about what files are live * * - * {@link org.apache.lucene.codecs.lucene60.Lucene60DimensionalFormat Dimensional values} + * {@link org.apache.lucene.codecs.lucene60.Lucene60PointFormat Point values} * .dii, .dim - * Holds dimensionally indexed fields, if any + * Holds indexed points, if any * * * diff --git a/lucene/core/src/java/org/apache/lucene/document/DimensionalBinaryField.java b/lucene/core/src/java/org/apache/lucene/document/BinaryPoint.java similarity index 87% rename from lucene/core/src/java/org/apache/lucene/document/DimensionalBinaryField.java rename to lucene/core/src/java/org/apache/lucene/document/BinaryPoint.java index 718a101554c..a74b17c3e98 100644 --- a/lucene/core/src/java/org/apache/lucene/document/DimensionalBinaryField.java +++ b/lucene/core/src/java/org/apache/lucene/document/BinaryPoint.java @@ -24,7 +24,7 @@ import org.apache.lucene.util.BytesRef; * efficient. Muliple values for the same field in one documents * is allowed. */ -public final class DimensionalBinaryField extends Field { +public final class BinaryPoint extends Field { private static FieldType getType(byte[][] point) { if (point == null) { @@ -89,22 +89,22 @@ public final class DimensionalBinaryField extends Field { return new BytesRef(packed); } - /** General purpose API: creates a new DimensionalField, indexing the + /** General purpose API: creates a new BinaryPoint, indexing the * provided N-dimensional binary point. * * @param name field name * @param point byte[][] value * @throws IllegalArgumentException if the field name or value is null. */ - public DimensionalBinaryField(String name, byte[]... point) { + public BinaryPoint(String name, byte[]... point) { super(name, pack(point), getType(point)); } /** Expert API */ - public DimensionalBinaryField(String name, byte[] packedPoint, FieldType type) { + public BinaryPoint(String name, byte[] packedPoint, FieldType type) { super(name, packedPoint, type); - if (packedPoint.length != type.dimensionCount() * type.dimensionNumBytes()) { - throw new IllegalArgumentException("packedPoint is length=" + packedPoint.length + " but type.dimensionCount()=" + type.dimensionCount() + " and type.dimensionNumBytes()=" + type.dimensionNumBytes()); + if (packedPoint.length != type.pointDimensionCount() * type.pointNumBytes()) { + throw new IllegalArgumentException("packedPoint is length=" + packedPoint.length + " but type.pointDimensionCount()=" + type.pointDimensionCount() + " and type.pointNumBytes()=" + type.pointNumBytes()); } } } diff --git a/lucene/core/src/java/org/apache/lucene/document/DateTools.java b/lucene/core/src/java/org/apache/lucene/document/DateTools.java index e8f6622651f..e378ece28f5 100644 --- a/lucene/core/src/java/org/apache/lucene/document/DateTools.java +++ b/lucene/core/src/java/org/apache/lucene/document/DateTools.java @@ -24,7 +24,6 @@ import java.util.Date; import java.util.Locale; import java.util.TimeZone; -import org.apache.lucene.search.DimensionalRangeQuery; import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.TermRangeQuery; @@ -39,12 +38,12 @@ import org.apache.lucene.search.TermRangeQuery; * {@link TermRangeQuery} and {@link PrefixQuery} will require more memory and become slower. * *

    - * Another approach is {@link DimensionalLongField}, which indexes the + * Another approach is {@link LongPoint}, which indexes the * values in sorted order. * For indexing a {@link Date} or {@link Calendar}, just get the unix timestamp as * long using {@link Date#getTime} or {@link Calendar#getTimeInMillis} and - * index this as a numeric value with {@link DimensionalLongField} - * and use {@link DimensionalRangeQuery} to query it. + * index this as a numeric value with {@link LongPoint} + * and use {@link org.apache.lucene.search.PointRangeQuery} to query it. */ public class DateTools { diff --git a/lucene/core/src/java/org/apache/lucene/document/DimensionalDoubleField.java b/lucene/core/src/java/org/apache/lucene/document/DoublePoint.java similarity index 94% rename from lucene/core/src/java/org/apache/lucene/document/DimensionalDoubleField.java rename to lucene/core/src/java/org/apache/lucene/document/DoublePoint.java index db93cd42727..a7a63e0c34f 100644 --- a/lucene/core/src/java/org/apache/lucene/document/DimensionalDoubleField.java +++ b/lucene/core/src/java/org/apache/lucene/document/DoublePoint.java @@ -26,7 +26,7 @@ import org.apache.lucene.util.RamUsageEstimator; * efficient. Muliple values for the same field in one documents * is allowed. */ -public final class DimensionalDoubleField extends Field { +public final class DoublePoint extends Field { private static FieldType getType(int numDims) { FieldType type = new FieldType(); @@ -73,14 +73,14 @@ public final class DimensionalDoubleField extends Field { return new BytesRef(packed); } - /** Creates a new DimensionalDoubleField, indexing the + /** Creates a new DoublePoint, indexing the * provided N-dimensional int point. * * @param name field name * @param point double[] value * @throws IllegalArgumentException if the field name or value is null. */ - public DimensionalDoubleField(String name, double... point) { + public DoublePoint(String name, double... point) { super(name, pack(point), getType(point.length)); } } diff --git a/lucene/core/src/java/org/apache/lucene/document/FieldType.java b/lucene/core/src/java/org/apache/lucene/document/FieldType.java index 846f853ab73..c6a137b3823 100644 --- a/lucene/core/src/java/org/apache/lucene/document/FieldType.java +++ b/lucene/core/src/java/org/apache/lucene/document/FieldType.java @@ -18,7 +18,6 @@ package org.apache.lucene.document; */ import org.apache.lucene.analysis.Analyzer; // javadocs -import org.apache.lucene.index.DimensionalValues; // javadocs import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableFieldType; @@ -32,7 +31,7 @@ public class FieldType implements IndexableFieldType { /** Data type of the numeric value * @since 3.2 * - * @deprecated Please switch to {@link DimensionalValues} instead + * @deprecated Please switch to {@link org.apache.lucene.index.PointValues} instead */ @Deprecated public enum LegacyNumericType { @@ -304,7 +303,7 @@ public class FieldType implements IndexableFieldType { * future modifications. * @see #numericType() * - * @deprecated Please switch to {@link DimensionalValues} instead + * @deprecated Please switch to {@link org.apache.lucene.index.PointValues} instead */ @Deprecated public void setNumericType(LegacyNumericType type) { @@ -320,7 +319,7 @@ public class FieldType implements IndexableFieldType { * The default is null (no numeric type) * @see #setNumericType(org.apache.lucene.document.FieldType.LegacyNumericType) * - * @deprecated Please switch to {@link DimensionalValues} instead + * @deprecated Please switch to {@link org.apache.lucene.index.PointValues} instead */ @Deprecated public LegacyNumericType numericType() { @@ -335,7 +334,7 @@ public class FieldType implements IndexableFieldType { * future modifications. * @see #numericPrecisionStep() * - * @deprecated Please switch to {@link DimensionalValues} instead + * @deprecated Please switch to {@link org.apache.lucene.index.PointValues} instead */ @Deprecated public void setNumericPrecisionStep(int precisionStep) { @@ -354,7 +353,7 @@ public class FieldType implements IndexableFieldType { * The default is {@link org.apache.lucene.util.LegacyNumericUtils#PRECISION_STEP_DEFAULT} * @see #setNumericPrecisionStep(int) * - * @deprecated Please switch to {@link DimensionalValues} instead + * @deprecated Please switch to {@link org.apache.lucene.index.PointValues} instead */ @Deprecated public int numericPrecisionStep() { @@ -362,22 +361,22 @@ public class FieldType implements IndexableFieldType { } /** - * Enables dimensional indexing. + * Enables points indexing. */ public void setDimensions(int dimensionCount, int dimensionNumBytes) { if (dimensionCount < 0) { - throw new IllegalArgumentException("dimensionCount must be >= 0; got " + dimensionCount); + throw new IllegalArgumentException("pointDimensionCount must be >= 0; got " + dimensionCount); } if (dimensionNumBytes < 0) { - throw new IllegalArgumentException("dimensionNumBytes must be >= 0; got " + dimensionNumBytes); + throw new IllegalArgumentException("pointNumBytes must be >= 0; got " + dimensionNumBytes); } if (dimensionCount == 0) { if (dimensionNumBytes != 0) { - throw new IllegalArgumentException("when dimensionCount is 0 dimensionNumBytes must 0; got " + dimensionNumBytes); + throw new IllegalArgumentException("when pointDimensionCount is 0 pointNumBytes must 0; got " + dimensionNumBytes); } } else if (dimensionNumBytes == 0) { if (dimensionCount != 0) { - throw new IllegalArgumentException("when dimensionNumBytes is 0 dimensionCount must 0; got " + dimensionCount); + throw new IllegalArgumentException("when pointNumBytes is 0 pointDimensionCount must 0; got " + dimensionCount); } } @@ -386,12 +385,12 @@ public class FieldType implements IndexableFieldType { } @Override - public int dimensionCount() { + public int pointDimensionCount() { return dimensionCount; } @Override - public int dimensionNumBytes() { + public int pointNumBytes() { return dimensionNumBytes; } @@ -435,9 +434,9 @@ public class FieldType implements IndexableFieldType { result.append(numericPrecisionStep); } if (dimensionCount != 0) { - result.append(",dimensionCount="); + result.append(",pointDimensionCount="); result.append(dimensionCount); - result.append(",dimensionNumBytes="); + result.append(",pointNumBytes="); result.append(dimensionNumBytes); } } diff --git a/lucene/core/src/java/org/apache/lucene/document/DimensionalFloatField.java b/lucene/core/src/java/org/apache/lucene/document/FloatPoint.java similarity index 94% rename from lucene/core/src/java/org/apache/lucene/document/DimensionalFloatField.java rename to lucene/core/src/java/org/apache/lucene/document/FloatPoint.java index 1fd6ea89eea..a023a4a82b5 100644 --- a/lucene/core/src/java/org/apache/lucene/document/DimensionalFloatField.java +++ b/lucene/core/src/java/org/apache/lucene/document/FloatPoint.java @@ -26,7 +26,7 @@ import org.apache.lucene.util.RamUsageEstimator; * efficient. Muliple values for the same field in one documents * is allowed. */ -public final class DimensionalFloatField extends Field { +public final class FloatPoint extends Field { private static FieldType getType(int numDims) { FieldType type = new FieldType(); @@ -73,14 +73,14 @@ public final class DimensionalFloatField extends Field { return new BytesRef(packed); } - /** Creates a new DimensionalFloatField, indexing the + /** Creates a new FloatPoint, indexing the * provided N-dimensional float point. * * @param name field name * @param point int[] value * @throws IllegalArgumentException if the field name or value is null. */ - public DimensionalFloatField(String name, float... point) { + public FloatPoint(String name, float... point) { super(name, pack(point), getType(point.length)); } } diff --git a/lucene/core/src/java/org/apache/lucene/document/DimensionalIntField.java b/lucene/core/src/java/org/apache/lucene/document/IntPoint.java similarity index 94% rename from lucene/core/src/java/org/apache/lucene/document/DimensionalIntField.java rename to lucene/core/src/java/org/apache/lucene/document/IntPoint.java index 83604a947df..28f6a555472 100644 --- a/lucene/core/src/java/org/apache/lucene/document/DimensionalIntField.java +++ b/lucene/core/src/java/org/apache/lucene/document/IntPoint.java @@ -26,7 +26,7 @@ import org.apache.lucene.util.RamUsageEstimator; * efficient. Muliple values for the same field in one documents * is allowed. */ -public final class DimensionalIntField extends Field { +public final class IntPoint extends Field { private static FieldType getType(int numDims) { FieldType type = new FieldType(); @@ -73,14 +73,14 @@ public final class DimensionalIntField extends Field { return new BytesRef(packed); } - /** Creates a new DimensionalIntField, indexing the + /** Creates a new IntPoint, indexing the * provided N-dimensional int point. * * @param name field name * @param point int[] value * @throws IllegalArgumentException if the field name or value is null. */ - public DimensionalIntField(String name, int... point) { + public IntPoint(String name, int... point) { super(name, pack(point), getType(point.length)); } } diff --git a/lucene/core/src/java/org/apache/lucene/document/LegacyDoubleField.java b/lucene/core/src/java/org/apache/lucene/document/LegacyDoubleField.java index ed4b2ef40f1..eaebd61af89 100644 --- a/lucene/core/src/java/org/apache/lucene/document/LegacyDoubleField.java +++ b/lucene/core/src/java/org/apache/lucene/document/LegacyDoubleField.java @@ -105,7 +105,7 @@ import org.apache.lucene.index.IndexOptions; * class is a wrapper around this token stream type for * easier, more intuitive usage.

    * - * @deprecated Please use {@link DimensionalDoubleField} instead + * @deprecated Please use {@link DoublePoint} instead * * @since 2.9 */ diff --git a/lucene/core/src/java/org/apache/lucene/document/LegacyFloatField.java b/lucene/core/src/java/org/apache/lucene/document/LegacyFloatField.java index cda1aa426e7..e6ac0deb4a8 100644 --- a/lucene/core/src/java/org/apache/lucene/document/LegacyFloatField.java +++ b/lucene/core/src/java/org/apache/lucene/document/LegacyFloatField.java @@ -105,7 +105,7 @@ import org.apache.lucene.util.LegacyNumericUtils; * class is a wrapper around this token stream type for * easier, more intuitive usage.

    * - * @deprecated Please use {@link DimensionalFloatField} instead + * @deprecated Please use {@link FloatPoint} instead * * @since 2.9 */ diff --git a/lucene/core/src/java/org/apache/lucene/document/LegacyIntField.java b/lucene/core/src/java/org/apache/lucene/document/LegacyIntField.java index b937f9f4ee9..3ad963bcbff 100644 --- a/lucene/core/src/java/org/apache/lucene/document/LegacyIntField.java +++ b/lucene/core/src/java/org/apache/lucene/document/LegacyIntField.java @@ -105,7 +105,7 @@ import org.apache.lucene.util.LegacyNumericUtils; * class is a wrapper around this token stream type for * easier, more intuitive usage.

    * - * @deprecated Please use {@link DimensionalIntField} instead + * @deprecated Please use {@link IntPoint} instead * * @since 2.9 */ diff --git a/lucene/core/src/java/org/apache/lucene/document/LegacyLongField.java b/lucene/core/src/java/org/apache/lucene/document/LegacyLongField.java index e478766ae60..ce5c9949e1d 100644 --- a/lucene/core/src/java/org/apache/lucene/document/LegacyLongField.java +++ b/lucene/core/src/java/org/apache/lucene/document/LegacyLongField.java @@ -115,7 +115,7 @@ import org.apache.lucene.index.IndexOptions; * class is a wrapper around this token stream type for * easier, more intuitive usage.

    * - * @deprecated Please use {@link DimensionalLongField} instead + * @deprecated Please use {@link LongPoint} instead * * @since 2.9 */ diff --git a/lucene/core/src/java/org/apache/lucene/document/DimensionalLongField.java b/lucene/core/src/java/org/apache/lucene/document/LongPoint.java similarity index 94% rename from lucene/core/src/java/org/apache/lucene/document/DimensionalLongField.java rename to lucene/core/src/java/org/apache/lucene/document/LongPoint.java index 752adef8790..23fddb28f0c 100644 --- a/lucene/core/src/java/org/apache/lucene/document/DimensionalLongField.java +++ b/lucene/core/src/java/org/apache/lucene/document/LongPoint.java @@ -26,7 +26,7 @@ import org.apache.lucene.util.RamUsageEstimator; * efficient. Muliple values for the same field in one documents * is allowed. */ -public final class DimensionalLongField extends Field { +public final class LongPoint extends Field { private static FieldType getType(int numDims) { FieldType type = new FieldType(); @@ -73,14 +73,14 @@ public final class DimensionalLongField extends Field { return new BytesRef(packed); } - /** Creates a new DimensionalLongField, indexing the + /** Creates a new LongPoint, indexing the * provided N-dimensional int point. * * @param name field name * @param point int[] value * @throws IllegalArgumentException if the field name or value is null. */ - public DimensionalLongField(String name, long... point) { + public LongPoint(String name, long... point) { super(name, pack(point), getType(point.length)); } } diff --git a/lucene/core/src/java/org/apache/lucene/document/package-info.java b/lucene/core/src/java/org/apache/lucene/document/package-info.java index 901d57c93ce..f3e8e5bcfae 100644 --- a/lucene/core/src/java/org/apache/lucene/document/package-info.java +++ b/lucene/core/src/java/org/apache/lucene/document/package-info.java @@ -35,8 +35,8 @@ * the process of taking a file and converting it into a Lucene {@link org.apache.lucene.document.Document}. *

    *

    The {@link org.apache.lucene.document.DateTools} is a utility class to make dates and times searchable. {@link - * org.apache.lucene.document.DimensionalIntField}, {@link org.apache.lucene.document.DimensionalLongField}, - * {@link org.apache.lucene.document.DimensionalFloatField} and {@link org.apache.lucene.document.DimensionalDoubleField} enable indexing - * of numeric values (and also dates) for fast range queries using {@link org.apache.lucene.search.DimensionalRangeQuery}

    + * org.apache.lucene.document.IntPoint}, {@link org.apache.lucene.document.LongPoint}, + * {@link org.apache.lucene.document.FloatPoint} and {@link org.apache.lucene.document.DoublePoint} enable indexing + * of numeric values (and also dates) for fast range queries using {@link org.apache.lucene.search.PointRangeQuery}

    */ package org.apache.lucene.document; diff --git a/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java b/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java index 1571b51d1b8..c3aa49f9a65 100644 --- a/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java +++ b/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java @@ -33,7 +33,7 @@ import java.util.Locale; import java.util.Map; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.DimensionalReader; +import org.apache.lucene.codecs.PointReader; import org.apache.lucene.codecs.DocValuesProducer; import org.apache.lucene.codecs.NormsProducer; import org.apache.lucene.codecs.PostingsFormat; @@ -215,8 +215,8 @@ public final class CheckIndex implements Closeable { /** Status for testing of DocValues (null if DocValues could not be tested). */ public DocValuesStatus docValuesStatus; - /** Status for testing of DimensionalValues (null if DimensionalValues could not be tested). */ - public DimensionalValuesStatus dimensionalValuesStatus; + /** Status for testing of PointValues (null if PointValues could not be tested). */ + public PointsStatus pointsStatus; } /** @@ -358,17 +358,17 @@ public final class CheckIndex implements Closeable { } /** - * Status from testing DimensionalValues + * Status from testing PointValues */ - public static final class DimensionalValuesStatus { + public static final class PointsStatus { - DimensionalValuesStatus() { + PointsStatus() { } - /** Total number of dimensional values points tested. */ + /** Total number of values points tested. */ public long totalValuePoints; - /** Total number of fields with dimensional values. */ + /** Total number of fields with points. */ public int totalValueFields; /** Exception thrown during doc values test (null on success) */ @@ -721,8 +721,8 @@ public final class CheckIndex implements Closeable { // Test Docvalues segInfoStat.docValuesStatus = testDocValues(reader, infoStream, failFast); - // Test DimensionalValues - segInfoStat.dimensionalValuesStatus = testDimensionalValues(reader, infoStream, failFast); + // Test PointValues + segInfoStat.pointsStatus = testPoints(reader, infoStream, failFast); // Rethrow the first exception we encountered // This will cause stats for failed segments to be incremented properly @@ -1681,23 +1681,23 @@ public final class CheckIndex implements Closeable { } /** - * Test the dimensional values index. + * Test the points index * @lucene.experimental */ - public static Status.DimensionalValuesStatus testDimensionalValues(CodecReader reader, PrintStream infoStream, boolean failFast) throws IOException { + public static Status.PointsStatus testPoints(CodecReader reader, PrintStream infoStream, boolean failFast) throws IOException { FieldInfos fieldInfos = reader.getFieldInfos(); - Status.DimensionalValuesStatus status = new Status.DimensionalValuesStatus(); + Status.PointsStatus status = new Status.PointsStatus(); try { - if (fieldInfos.hasDimensionalValues()) { - DimensionalReader values = reader.getDimensionalReader(); + if (fieldInfos.hasPointValues()) { + PointReader values = reader.getPointReader(); if (values == null) { - throw new RuntimeException("there are fields with dimensional values, but reader.getDimensionalRader() is null"); + throw new RuntimeException("there are fields with points, but reader.getPointReader() is null"); } for (FieldInfo fieldInfo : fieldInfos) { - if (fieldInfo.getDimensionCount() > 0) { + if (fieldInfo.getPointDimensionCount() > 0) { status.totalValueFields++; - int dimCount = fieldInfo.getDimensionCount(); - int bytesPerDim = fieldInfo.getDimensionNumBytes(); + int dimCount = fieldInfo.getPointDimensionCount(); + int bytesPerDim = fieldInfo.getPointNumBytes(); byte[] lastMinPackedValue = new byte[dimCount*bytesPerDim]; BytesRef lastMinPacked = new BytesRef(lastMinPackedValue); byte[] lastMaxPackedValue = new byte[dimCount*bytesPerDim]; @@ -1707,7 +1707,7 @@ public final class CheckIndex implements Closeable { lastMinPacked.length = bytesPerDim; scratch.length = bytesPerDim; values.intersect(fieldInfo.name, - new DimensionalValues.IntersectVisitor() { + new PointValues.IntersectVisitor() { @Override public void visit(int docID) { throw new RuntimeException("codec called IntersectVisitor.visit without a packed value for docID=" + docID); @@ -1737,7 +1737,7 @@ public final class CheckIndex implements Closeable { } @Override - public DimensionalValues.Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { + public PointValues.Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { checkPackedValue("min packed value", minPackedValue, -1); System.arraycopy(minPackedValue, 0, lastMinPackedValue, 0, minPackedValue.length); checkPackedValue("max packed value", maxPackedValue, -1); @@ -1745,7 +1745,7 @@ public final class CheckIndex implements Closeable { // We always pretend the query shape is so complex that it crosses every cell, so // that packedValue is passed for every document - return DimensionalValues.Relation.CELL_CROSSES_QUERY; + return PointValues.Relation.CELL_CROSSES_QUERY; } private void checkPackedValue(String desc, byte[] packedValue, int docID) { diff --git a/lucene/core/src/java/org/apache/lucene/index/CodecReader.java b/lucene/core/src/java/org/apache/lucene/index/CodecReader.java index c879b9ed2bf..a5642e4c759 100644 --- a/lucene/core/src/java/org/apache/lucene/index/CodecReader.java +++ b/lucene/core/src/java/org/apache/lucene/index/CodecReader.java @@ -25,7 +25,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import org.apache.lucene.codecs.DimensionalReader; +import org.apache.lucene.codecs.PointReader; import org.apache.lucene.codecs.DocValuesProducer; import org.apache.lucene.codecs.FieldsProducer; import org.apache.lucene.codecs.NormsProducer; @@ -77,10 +77,10 @@ public abstract class CodecReader extends LeafReader implements Accountable { public abstract FieldsProducer getPostingsReader(); /** - * Expert: retrieve underlying DimensionalReader + * Expert: retrieve underlying PointReader * @lucene.internal */ - public abstract DimensionalReader getDimensionalReader(); + public abstract PointReader getPointReader(); @Override public final void document(int docID, StoredFieldVisitor visitor) throws IOException { @@ -322,9 +322,9 @@ public abstract class CodecReader extends LeafReader implements Accountable { ramBytesUsed += getTermVectorsReader().ramBytesUsed(); } - // dimensional values - if (getDimensionalReader() != null) { - ramBytesUsed += getDimensionalReader().ramBytesUsed(); + // points + if (getPointReader() != null) { + ramBytesUsed += getPointReader().ramBytesUsed(); } return ramBytesUsed; @@ -358,9 +358,9 @@ public abstract class CodecReader extends LeafReader implements Accountable { resources.add(Accountables.namedAccountable("term vectors", getTermVectorsReader())); } - // dimensional values - if (getDimensionalReader() != null) { - resources.add(Accountables.namedAccountable("dimensional values", getDimensionalReader())); + // points + if (getPointReader() != null) { + resources.add(Accountables.namedAccountable("points", getPointReader())); } return Collections.unmodifiableList(resources); diff --git a/lucene/core/src/java/org/apache/lucene/index/DefaultIndexingChain.java b/lucene/core/src/java/org/apache/lucene/index/DefaultIndexingChain.java index 0370008be99..23dc6f66746 100644 --- a/lucene/core/src/java/org/apache/lucene/index/DefaultIndexingChain.java +++ b/lucene/core/src/java/org/apache/lucene/index/DefaultIndexingChain.java @@ -23,8 +23,8 @@ import java.util.HashMap; import java.util.Map; import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.codecs.DimensionalFormat; -import org.apache.lucene.codecs.DimensionalWriter; +import org.apache.lucene.codecs.PointFormat; +import org.apache.lucene.codecs.PointWriter; import org.apache.lucene.codecs.DocValuesConsumer; import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.NormsConsumer; @@ -93,7 +93,7 @@ final class DefaultIndexingChain extends DocConsumer { int maxDoc = state.segmentInfo.maxDoc(); writeNorms(state); writeDocValues(state); - writeDimensionalValues(state); + writePoints(state); // it's possible all docs hit non-aborting exceptions... initStoredFieldsWriter(); @@ -121,33 +121,33 @@ final class DefaultIndexingChain extends DocConsumer { docWriter.codec.fieldInfosFormat().write(state.directory, state.segmentInfo, "", state.fieldInfos, IOContext.DEFAULT); } - /** Writes all buffered dimensional values. */ - private void writeDimensionalValues(SegmentWriteState state) throws IOException { - DimensionalWriter dimensionalWriter = null; + /** Writes all buffered points. */ + private void writePoints(SegmentWriteState state) throws IOException { + PointWriter pointWriter = null; boolean success = false; try { for (int i=0;i attributes, int dimensionCount, int dimensionNumBytes) { + long dvGen, Map attributes, int pointDimensionCount, int pointNumBytes) { this.name = Objects.requireNonNull(name); this.number = number; this.docValuesType = Objects.requireNonNull(docValues, "DocValuesType cannot be null (field: \"" + name + "\")"); @@ -78,8 +76,8 @@ public final class FieldInfo { } this.dvGen = dvGen; this.attributes = Objects.requireNonNull(attributes); - this.dimensionCount = dimensionCount; - this.dimensionNumBytes = dimensionNumBytes; + this.pointDimensionCount = pointDimensionCount; + this.pointNumBytes = pointNumBytes; assert checkConsistency(); } @@ -105,20 +103,20 @@ public final class FieldInfo { } } - if (dimensionCount < 0) { - throw new IllegalStateException("dimensionCount must be >= 0; got " + dimensionCount); + if (pointDimensionCount < 0) { + throw new IllegalStateException("pointDimensionCount must be >= 0; got " + pointDimensionCount); } - if (dimensionNumBytes < 0) { - throw new IllegalStateException("dimensionNumBytes must be >= 0; got " + dimensionNumBytes); + if (pointNumBytes < 0) { + throw new IllegalStateException("pointNumBytes must be >= 0; got " + pointNumBytes); } - if (dimensionCount != 0 && dimensionNumBytes == 0) { - throw new IllegalStateException("dimensionNumBytes must be > 0 when dimensionCount=" + dimensionCount); + if (pointDimensionCount != 0 && pointNumBytes == 0) { + throw new IllegalStateException("pointNumBytes must be > 0 when pointDimensionCount=" + pointDimensionCount); } - if (dimensionNumBytes != 0 && dimensionCount == 0) { - throw new IllegalStateException("dimensionCount must be > 0 when dimensionNumBytes=" + dimensionNumBytes); + if (pointNumBytes != 0 && pointDimensionCount == 0) { + throw new IllegalStateException("pointDimensionCount must be > 0 when pointNumBytes=" + pointNumBytes); } if (dvGen != -1 && docValuesType == DocValuesType.NONE) { @@ -144,9 +142,9 @@ public final class FieldInfo { } } - if (this.dimensionCount == 0 && dimensionCount != 0) { - this.dimensionCount = dimensionCount; - this.dimensionNumBytes = dimensionNumBytes; + if (this.pointDimensionCount == 0 && dimensionCount != 0) { + this.pointDimensionCount = dimensionCount; + this.pointNumBytes = dimensionNumBytes; } if (this.indexOptions != IndexOptions.NONE) { // if updated field data is not for indexing, leave the updates out @@ -165,40 +163,40 @@ public final class FieldInfo { assert checkConsistency(); } - /** Record that this field is indexed dimensionally, with the + /** Record that this field is indexed with points, with the * specified number of dimensions and bytes per dimension. */ - public void setDimensions(int count, int numBytes) { + public void setPointDimensions(int count, int numBytes) { if (count <= 0) { - throw new IllegalArgumentException("dimension count must be >= 0; got " + count + " for field=\"" + name + "\""); + throw new IllegalArgumentException("point dimension count must be >= 0; got " + count + " for field=\"" + name + "\""); } - if (count > DimensionalValues.MAX_DIMENSIONS) { - throw new IllegalArgumentException("dimension count must be < DimensionalValues.MAX_DIMENSIONS (= " + DimensionalValues.MAX_DIMENSIONS + "); got " + count + " for field=\"" + name + "\""); + if (count > PointValues.MAX_DIMENSIONS) { + throw new IllegalArgumentException("point dimension count must be < PointValues.MAX_DIMENSIONS (= " + PointValues.MAX_DIMENSIONS + "); got " + count + " for field=\"" + name + "\""); } if (numBytes <= 0) { - throw new IllegalArgumentException("dimension numBytes must be >= 0; got " + numBytes + " for field=\"" + name + "\""); + throw new IllegalArgumentException("point numBytes must be >= 0; got " + numBytes + " for field=\"" + name + "\""); } - if (numBytes > DimensionalValues.MAX_NUM_BYTES) { - throw new IllegalArgumentException("dimension numBytes must be <= DimensionalValues.MAX_NUM_BYTES (= " + DimensionalValues.MAX_NUM_BYTES + "); got " + numBytes + " for field=\"" + name + "\""); + if (numBytes > PointValues.MAX_NUM_BYTES) { + throw new IllegalArgumentException("point numBytes must be <= PointValues.MAX_NUM_BYTES (= " + PointValues.MAX_NUM_BYTES + "); got " + numBytes + " for field=\"" + name + "\""); } - if (dimensionCount != 0 && dimensionCount != count) { - throw new IllegalArgumentException("cannot change dimension count from " + dimensionCount + " to " + count + " for field=\"" + name + "\""); + if (pointDimensionCount != 0 && pointDimensionCount != count) { + throw new IllegalArgumentException("cannot change point dimension count from " + pointDimensionCount + " to " + count + " for field=\"" + name + "\""); } - if (dimensionNumBytes != 0 && dimensionNumBytes != numBytes) { - throw new IllegalArgumentException("cannot change dimension numBytes from " + dimensionNumBytes + " to " + numBytes + " for field=\"" + name + "\""); + if (pointNumBytes != 0 && pointNumBytes != numBytes) { + throw new IllegalArgumentException("cannot change point numBytes from " + pointNumBytes + " to " + numBytes + " for field=\"" + name + "\""); } - dimensionCount = count; - dimensionNumBytes = numBytes; + pointDimensionCount = count; + pointNumBytes = numBytes; } - /** Return dimension count */ - public int getDimensionCount() { - return dimensionCount; + /** Return point dimension count */ + public int getPointDimensionCount() { + return pointDimensionCount; } /** Return number of bytes per dimension */ - public int getDimensionNumBytes() { - return dimensionNumBytes; + public int getPointNumBytes() { + return pointNumBytes; } void setDocValuesType(DocValuesType type) { diff --git a/lucene/core/src/java/org/apache/lucene/index/FieldInfos.java b/lucene/core/src/java/org/apache/lucene/index/FieldInfos.java index 86da86578a7..93f35e7af49 100644 --- a/lucene/core/src/java/org/apache/lucene/index/FieldInfos.java +++ b/lucene/core/src/java/org/apache/lucene/index/FieldInfos.java @@ -39,7 +39,7 @@ public class FieldInfos implements Iterable { private final boolean hasVectors; private final boolean hasNorms; private final boolean hasDocValues; - private final boolean hasDimensionalValues; + private final boolean hasPointValues; // used only by fieldInfo(int) private final FieldInfo[] byNumberTable; // contiguous @@ -59,7 +59,7 @@ public class FieldInfos implements Iterable { boolean hasFreq = false; boolean hasNorms = false; boolean hasDocValues = false; - boolean hasDimensionalValues = false; + boolean hasPointValues = false; TreeMap byNumber = new TreeMap<>(); for (FieldInfo info : infos) { @@ -82,7 +82,7 @@ public class FieldInfos implements Iterable { hasNorms |= info.hasNorms(); hasDocValues |= info.getDocValuesType() != DocValuesType.NONE; hasPayloads |= info.hasPayloads(); - hasDimensionalValues |= (info.getDimensionCount() != 0); + hasPointValues |= (info.getPointDimensionCount() != 0); } this.hasVectors = hasVectors; @@ -92,7 +92,7 @@ public class FieldInfos implements Iterable { this.hasFreq = hasFreq; this.hasNorms = hasNorms; this.hasDocValues = hasDocValues; - this.hasDimensionalValues = hasDimensionalValues; + this.hasPointValues = hasPointValues; this.values = Collections.unmodifiableCollection(byNumber.values()); Integer max = byNumber.isEmpty() ? null : Collections.max(byNumber.keySet()); @@ -147,9 +147,9 @@ public class FieldInfos implements Iterable { return hasDocValues; } - /** Returns true if any fields have DimensionalValues */ - public boolean hasDimensionalValues() { - return hasDimensionalValues; + /** Returns true if any fields have PointValues */ + public boolean hasPointValues() { + return hasPointValues; } /** Returns the number of fields */ @@ -249,10 +249,10 @@ public class FieldInfos implements Iterable { FieldDimensions dims = dimensions.get(fieldName); if (dims != null) { if (dims.dimensionCount != dimensionCount) { - throw new IllegalArgumentException("cannot change dimension count from " + dims.dimensionCount + " to " + dimensionCount + " for field=\"" + fieldName + "\""); + throw new IllegalArgumentException("cannot change point dimension count from " + dims.dimensionCount + " to " + dimensionCount + " for field=\"" + fieldName + "\""); } if (dims.dimensionNumBytes != dimensionNumBytes) { - throw new IllegalArgumentException("cannot change dimension numBytes from " + dims.dimensionNumBytes + " to " + dimensionNumBytes + " for field=\"" + fieldName + "\""); + throw new IllegalArgumentException("cannot change point numBytes from " + dims.dimensionNumBytes + " to " + dimensionNumBytes + " for field=\"" + fieldName + "\""); } } else { dimensions.put(fieldName, new FieldDimensions(dimensionCount, dimensionNumBytes)); @@ -302,10 +302,10 @@ public class FieldInfos implements Iterable { FieldDimensions dim = dimensions.get(name); if (dim != null) { if (dim.dimensionCount != dimensionCount) { - throw new IllegalArgumentException("cannot change dimension count from " + dim.dimensionCount + " to " + dimensionCount + " for field=\"" + name + "\""); + throw new IllegalArgumentException("cannot change point dimension count from " + dim.dimensionCount + " to " + dimensionCount + " for field=\"" + name + "\""); } if (dim.dimensionNumBytes != dimensionNumBytes) { - throw new IllegalArgumentException("cannot change dimension numBytes from " + dim.dimensionNumBytes + " to " + dimensionNumBytes + " for field=\"" + name + "\""); + throw new IllegalArgumentException("cannot change point numBytes from " + dim.dimensionNumBytes + " to " + dimensionNumBytes + " for field=\"" + name + "\""); } } } @@ -337,11 +337,11 @@ public class FieldInfos implements Iterable { } synchronized void setDimensions(int number, String name, int dimensionCount, int dimensionNumBytes) { - if (dimensionNumBytes > DimensionalValues.MAX_NUM_BYTES) { - throw new IllegalArgumentException("dimension numBytes must be <= DimensionalValues.MAX_NUM_BYTES (= " + DimensionalValues.MAX_NUM_BYTES + "); got " + dimensionNumBytes + " for field=\"" + name + "\""); + if (dimensionNumBytes > PointValues.MAX_NUM_BYTES) { + throw new IllegalArgumentException("dimension numBytes must be <= PointValues.MAX_NUM_BYTES (= " + PointValues.MAX_NUM_BYTES + "); got " + dimensionNumBytes + " for field=\"" + name + "\""); } - if (dimensionCount > DimensionalValues.MAX_DIMENSIONS) { - throw new IllegalArgumentException("dimensionCount must be <= DimensionalValues.MAX_DIMENSIONS (= " + DimensionalValues.MAX_DIMENSIONS + "); got " + dimensionCount + " for field=\"" + name + "\""); + if (dimensionCount > PointValues.MAX_DIMENSIONS) { + throw new IllegalArgumentException("pointDimensionCount must be <= PointValues.MAX_DIMENSIONS (= " + PointValues.MAX_DIMENSIONS + "); got " + dimensionCount + " for field=\"" + name + "\""); } verifyConsistentDimensions(number, name, dimensionCount, dimensionNumBytes); dimensions.put(name, new FieldDimensions(dimensionCount, dimensionNumBytes)); @@ -432,7 +432,7 @@ public class FieldInfos implements Iterable { return addOrUpdateInternal(fi.name, fi.number, fi.hasVectors(), fi.omitsNorms(), fi.hasPayloads(), fi.getIndexOptions(), fi.getDocValuesType(), - fi.getDimensionCount(), fi.getDimensionNumBytes()); + fi.getPointDimensionCount(), fi.getPointNumBytes()); } public FieldInfo fieldInfo(String fieldName) { diff --git a/lucene/core/src/java/org/apache/lucene/index/FilterCodecReader.java b/lucene/core/src/java/org/apache/lucene/index/FilterCodecReader.java index b55bb88be3a..65275e2f720 100644 --- a/lucene/core/src/java/org/apache/lucene/index/FilterCodecReader.java +++ b/lucene/core/src/java/org/apache/lucene/index/FilterCodecReader.java @@ -19,7 +19,7 @@ package org.apache.lucene.index; import java.util.Objects; -import org.apache.lucene.codecs.DimensionalReader; +import org.apache.lucene.codecs.PointReader; import org.apache.lucene.codecs.DocValuesProducer; import org.apache.lucene.codecs.FieldsProducer; import org.apache.lucene.codecs.NormsProducer; @@ -82,13 +82,13 @@ public class FilterCodecReader extends CodecReader { } @Override - public DimensionalReader getDimensionalReader() { - return in.getDimensionalReader(); + public PointReader getPointReader() { + return in.getPointReader(); } @Override - public DimensionalValues getDimensionalValues() { - return in.getDimensionalValues(); + public PointValues getPointValues() { + return in.getPointValues(); } @Override diff --git a/lucene/core/src/java/org/apache/lucene/index/FilterLeafReader.java b/lucene/core/src/java/org/apache/lucene/index/FilterLeafReader.java index 48705321b72..eadeffa7c89 100644 --- a/lucene/core/src/java/org/apache/lucene/index/FilterLeafReader.java +++ b/lucene/core/src/java/org/apache/lucene/index/FilterLeafReader.java @@ -381,8 +381,8 @@ public class FilterLeafReader extends LeafReader { } @Override - public DimensionalValues getDimensionalValues() { - return in.getDimensionalValues(); + public PointValues getPointValues() { + return in.getPointValues(); } @Override diff --git a/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java b/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java index dce5dd7ec70..b05e15a0c70 100644 --- a/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java +++ b/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java @@ -1018,7 +1018,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable { for(SegmentCommitInfo info : segmentInfos) { FieldInfos fis = readFieldInfos(info); for(FieldInfo fi : fis) { - map.addOrGet(fi.name, fi.number, fi.getDocValuesType(), fi.getDimensionCount(), fi.getDimensionNumBytes()); + map.addOrGet(fi.name, fi.number, fi.getDocValuesType(), fi.getPointDimensionCount(), fi.getPointNumBytes()); } } @@ -2495,7 +2495,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable { FieldInfos fis = readFieldInfos(info); for(FieldInfo fi : fis) { // This will throw exceptions if any of the incoming fields have an illegal schema change: - globalFieldNumberMap.addOrGet(fi.name, fi.number, fi.getDocValuesType(), fi.getDimensionCount(), fi.getDimensionNumBytes()); + globalFieldNumberMap.addOrGet(fi.name, fi.number, fi.getDocValuesType(), fi.getPointDimensionCount(), fi.getPointNumBytes()); } infos.add(copySegmentAsIs(info, newSegName, context)); } diff --git a/lucene/core/src/java/org/apache/lucene/index/IndexableFieldType.java b/lucene/core/src/java/org/apache/lucene/index/IndexableFieldType.java index 34b86e3aa34..cf87b44dc0f 100644 --- a/lucene/core/src/java/org/apache/lucene/index/IndexableFieldType.java +++ b/lucene/core/src/java/org/apache/lucene/index/IndexableFieldType.java @@ -98,12 +98,12 @@ public interface IndexableFieldType { public DocValuesType docValuesType(); /** - * If this is positive, the field is indexed dimensionally. + * If this is positive, the field is indexed as a point. */ - public int dimensionCount(); + public int pointDimensionCount(); /** * The number of bytes in each dimension's values. */ - public int dimensionNumBytes(); + public int pointNumBytes(); } diff --git a/lucene/core/src/java/org/apache/lucene/index/LeafReader.java b/lucene/core/src/java/org/apache/lucene/index/LeafReader.java index 523ee4e3fbe..9baaeb6e9bb 100644 --- a/lucene/core/src/java/org/apache/lucene/index/LeafReader.java +++ b/lucene/core/src/java/org/apache/lucene/index/LeafReader.java @@ -19,7 +19,6 @@ package org.apache.lucene.index; import java.io.IOException; -import org.apache.lucene.codecs.DimensionalReader; import org.apache.lucene.index.IndexReader.ReaderClosedListener; import org.apache.lucene.util.Bits; @@ -301,9 +300,9 @@ public abstract class LeafReader extends IndexReader { */ public abstract Bits getLiveDocs(); - /** Returns the {@link DimensionalReader} used for numeric or - * spatial searches, or null if there are no dimensional fields. */ - public abstract DimensionalValues getDimensionalValues(); + /** Returns the {@link org.apache.lucene.codecs.PointReader} used for numeric or + * spatial searches, or null if there are no point fields. */ + public abstract PointValues getPointValues(); /** * Checks consistency of this reader. diff --git a/lucene/core/src/java/org/apache/lucene/index/MergeState.java b/lucene/core/src/java/org/apache/lucene/index/MergeState.java index a325b3f0def..09bdfbd61da 100644 --- a/lucene/core/src/java/org/apache/lucene/index/MergeState.java +++ b/lucene/core/src/java/org/apache/lucene/index/MergeState.java @@ -20,7 +20,7 @@ package org.apache.lucene.index; import java.io.IOException; import java.util.List; -import org.apache.lucene.codecs.DimensionalReader; +import org.apache.lucene.codecs.PointReader; import org.apache.lucene.codecs.DocValuesProducer; import org.apache.lucene.codecs.FieldsProducer; import org.apache.lucene.codecs.NormsProducer; @@ -66,8 +66,8 @@ public class MergeState { /** Postings to merge */ public final FieldsProducer[] fieldsProducers; - /** Dimensional readers to merge */ - public final DimensionalReader[] dimensionalReaders; + /** Point readers to merge */ + public final PointReader[] pointReaders; /** New docID base per reader. */ public final int[] docBase; @@ -90,7 +90,7 @@ public class MergeState { storedFieldsReaders = new StoredFieldsReader[numReaders]; termVectorsReaders = new TermVectorsReader[numReaders]; docValuesProducers = new DocValuesProducer[numReaders]; - dimensionalReaders = new DimensionalReader[numReaders]; + pointReaders = new PointReader[numReaders]; fieldInfos = new FieldInfos[numReaders]; liveDocs = new Bits[numReaders]; @@ -122,9 +122,9 @@ public class MergeState { } fieldsProducers[i] = reader.getPostingsReader().getMergeInstance(); - dimensionalReaders[i] = reader.getDimensionalReader(); - if (dimensionalReaders[i] != null) { - dimensionalReaders[i] = dimensionalReaders[i].getMergeInstance(); + pointReaders[i] = reader.getPointReader(); + if (pointReaders[i] != null) { + pointReaders[i] = pointReaders[i].getMergeInstance(); } } diff --git a/lucene/core/src/java/org/apache/lucene/index/MultiDimensionalValues.java b/lucene/core/src/java/org/apache/lucene/index/MultiPointValues.java similarity index 90% rename from lucene/core/src/java/org/apache/lucene/index/MultiDimensionalValues.java rename to lucene/core/src/java/org/apache/lucene/index/MultiPointValues.java index 0acfd6ef0d2..12282e7c5ab 100644 --- a/lucene/core/src/java/org/apache/lucene/index/MultiDimensionalValues.java +++ b/lucene/core/src/java/org/apache/lucene/index/MultiPointValues.java @@ -23,30 +23,30 @@ import java.util.List; import org.apache.lucene.util.StringHelper; -class MultiDimensionalValues extends DimensionalValues { +class MultiPointValues extends PointValues { - private final List subs; + private final List subs; private final List docBases; - private MultiDimensionalValues(List subs, List docBases) { + private MultiPointValues(List subs, List docBases) { this.subs = subs; this.docBases = docBases; } - public static DimensionalValues get(IndexReader r) { + public static PointValues get(IndexReader r) { final List leaves = r.leaves(); final int size = leaves.size(); if (size == 0) { return null; } else if (size == 1) { - return leaves.get(0).reader().getDimensionalValues(); + return leaves.get(0).reader().getPointValues(); } - List values = new ArrayList<>(); + List values = new ArrayList<>(); List docBases = new ArrayList<>(); for (int i = 0; i < size; i++) { LeafReaderContext context = leaves.get(i); - DimensionalValues v = context.reader().getDimensionalValues(); + PointValues v = context.reader().getPointValues(); if (v != null) { values.add(v); docBases.add(context.docBase); @@ -57,7 +57,7 @@ class MultiDimensionalValues extends DimensionalValues { return null; } - return new MultiDimensionalValues(values, docBases); + return new MultiPointValues(values, docBases); } /** Finds all documents and points matching the provided visitor */ @@ -85,7 +85,7 @@ class MultiDimensionalValues extends DimensionalValues { @Override public String toString() { StringBuilder b = new StringBuilder(); - b.append("MultiDimensionalValues("); + b.append("MultiPointValues("); for(int i=0;i 0) { b.append(", "); diff --git a/lucene/core/src/java/org/apache/lucene/index/ParallelLeafReader.java b/lucene/core/src/java/org/apache/lucene/index/ParallelLeafReader.java index 88bc0e51426..73c605f7fb1 100644 --- a/lucene/core/src/java/org/apache/lucene/index/ParallelLeafReader.java +++ b/lucene/core/src/java/org/apache/lucene/index/ParallelLeafReader.java @@ -315,15 +315,15 @@ public class ParallelLeafReader extends LeafReader { } @Override - public DimensionalValues getDimensionalValues() { - return new DimensionalValues() { + public PointValues getPointValues() { + return new PointValues() { @Override public void intersect(String fieldName, IntersectVisitor visitor) throws IOException { LeafReader reader = fieldToReader.get(fieldName); if (reader == null) { return; } - DimensionalValues dimValues = reader.getDimensionalValues(); + PointValues dimValues = reader.getPointValues(); if (dimValues == null) { return; } @@ -336,7 +336,7 @@ public class ParallelLeafReader extends LeafReader { if (reader == null) { return null; } - DimensionalValues dimValues = reader.getDimensionalValues(); + PointValues dimValues = reader.getPointValues(); if (dimValues == null) { return null; } @@ -349,7 +349,7 @@ public class ParallelLeafReader extends LeafReader { if (reader == null) { return null; } - DimensionalValues dimValues = reader.getDimensionalValues(); + PointValues dimValues = reader.getPointValues(); if (dimValues == null) { return null; } @@ -362,7 +362,7 @@ public class ParallelLeafReader extends LeafReader { if (reader == null) { return 0; } - DimensionalValues dimValues = reader.getDimensionalValues(); + PointValues dimValues = reader.getPointValues(); if (dimValues == null) { return 0; } @@ -375,7 +375,7 @@ public class ParallelLeafReader extends LeafReader { if (reader == null) { return 0; } - DimensionalValues dimValues = reader.getDimensionalValues(); + PointValues dimValues = reader.getPointValues(); if (dimValues == null) { return 0; } diff --git a/lucene/core/src/java/org/apache/lucene/index/DimensionalValues.java b/lucene/core/src/java/org/apache/lucene/index/PointValues.java similarity index 85% rename from lucene/core/src/java/org/apache/lucene/index/DimensionalValues.java rename to lucene/core/src/java/org/apache/lucene/index/PointValues.java index 2f30ab66938..e786bad813d 100644 --- a/lucene/core/src/java/org/apache/lucene/index/DimensionalValues.java +++ b/lucene/core/src/java/org/apache/lucene/index/PointValues.java @@ -2,11 +2,6 @@ package org.apache.lucene.index; import java.io.IOException; -import org.apache.lucene.document.DimensionalBinaryField; -import org.apache.lucene.document.DimensionalDoubleField; -import org.apache.lucene.document.DimensionalFloatField; -import org.apache.lucene.document.DimensionalIntField; -import org.apache.lucene.document.DimensionalLongField; import org.apache.lucene.util.bkd.BKDWriter; /* @@ -26,12 +21,12 @@ import org.apache.lucene.util.bkd.BKDWriter; * limitations under the License. */ -/** Allows recursively visiting dimensional values indexed with {@link DimensionalIntField}, - * {@link DimensionalFloatField}, {@link DimensionalLongField}, {@link DimensionalDoubleField} - * or {@link DimensionalBinaryField}. +/** Allows recursively visiting point values indexed with {@link org.apache.lucene.document.IntPoint}, + * {@link org.apache.lucene.document.FloatPoint}, {@link org.apache.lucene.document.LongPoint}, {@link org.apache.lucene.document.DoublePoint} + * or {@link org.apache.lucene.document.BinaryPoint}. * * @lucene.experimental */ -public abstract class DimensionalValues { +public abstract class PointValues { /** Maximum number of bytes for each dimension */ public static final int MAX_NUM_BYTES = 16; @@ -40,7 +35,7 @@ public abstract class DimensionalValues { public static final int MAX_DIMENSIONS = BKDWriter.MAX_DIMS; /** Default constructor */ - protected DimensionalValues() { + protected PointValues() { } /** Used by {@link #intersect} to check how each recursive cell corresponds to the query. */ diff --git a/lucene/core/src/java/org/apache/lucene/index/DimensionalValuesWriter.java b/lucene/core/src/java/org/apache/lucene/index/PointValuesWriter.java similarity index 85% rename from lucene/core/src/java/org/apache/lucene/index/DimensionalValuesWriter.java rename to lucene/core/src/java/org/apache/lucene/index/PointValuesWriter.java index d3bfd3081e8..e05f270977f 100644 --- a/lucene/core/src/java/org/apache/lucene/index/DimensionalValuesWriter.java +++ b/lucene/core/src/java/org/apache/lucene/index/PointValuesWriter.java @@ -19,8 +19,8 @@ package org.apache.lucene.index; import java.io.IOException; -import org.apache.lucene.codecs.DimensionalReader; -import org.apache.lucene.codecs.DimensionalWriter; +import org.apache.lucene.codecs.PointReader; +import org.apache.lucene.codecs.PointWriter; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.ByteBlockPool; import org.apache.lucene.util.BytesRef; @@ -28,7 +28,7 @@ import org.apache.lucene.util.Counter; import org.apache.lucene.util.RamUsageEstimator; /** Buffers up pending byte[][] value(s) per doc, then flushes when segment flushes. */ -class DimensionalValuesWriter { +class PointValuesWriter { private final FieldInfo fieldInfo; private final ByteBlockPool bytes; private final Counter iwBytesUsed; @@ -36,21 +36,21 @@ class DimensionalValuesWriter { private int numDocs; private final byte[] packedValue; - public DimensionalValuesWriter(DocumentsWriterPerThread docWriter, FieldInfo fieldInfo) { + public PointValuesWriter(DocumentsWriterPerThread docWriter, FieldInfo fieldInfo) { this.fieldInfo = fieldInfo; this.iwBytesUsed = docWriter.bytesUsed; this.bytes = new ByteBlockPool(docWriter.byteBlockAllocator); docIDs = new int[16]; iwBytesUsed.addAndGet(16 * RamUsageEstimator.NUM_BYTES_INT); - packedValue = new byte[fieldInfo.getDimensionCount() * fieldInfo.getDimensionNumBytes()]; + packedValue = new byte[fieldInfo.getPointDimensionCount() * fieldInfo.getPointNumBytes()]; } public void addPackedValue(int docID, BytesRef value) { if (value == null) { - throw new IllegalArgumentException("field=" + fieldInfo.name + ": dimensional value cannot be null"); + throw new IllegalArgumentException("field=" + fieldInfo.name + ": point value cannot be null"); } - if (value.length != fieldInfo.getDimensionCount() * fieldInfo.getDimensionNumBytes()) { - throw new IllegalArgumentException("field=" + fieldInfo.name + ": this field's value has length=" + value.length + " but should be " + (fieldInfo.getDimensionCount() * fieldInfo.getDimensionNumBytes())); + if (value.length != fieldInfo.getPointDimensionCount() * fieldInfo.getPointNumBytes()) { + throw new IllegalArgumentException("field=" + fieldInfo.name + ": this field's value has length=" + value.length + " but should be " + (fieldInfo.getPointDimensionCount() * fieldInfo.getPointNumBytes())); } if (docIDs.length == numDocs) { docIDs = ArrayUtil.grow(docIDs, numDocs+1); @@ -61,10 +61,10 @@ class DimensionalValuesWriter { numDocs++; } - public void flush(SegmentWriteState state, DimensionalWriter writer) throws IOException { + public void flush(SegmentWriteState state, PointWriter writer) throws IOException { writer.writeField(fieldInfo, - new DimensionalReader() { + new PointReader() { @Override public void intersect(String fieldName, IntersectVisitor visitor) throws IOException { if (fieldName.equals(fieldInfo.name) == false) { diff --git a/lucene/core/src/java/org/apache/lucene/index/SegmentCoreReaders.java b/lucene/core/src/java/org/apache/lucene/index/SegmentCoreReaders.java index 1b3a12e35b5..414c70732f1 100644 --- a/lucene/core/src/java/org/apache/lucene/index/SegmentCoreReaders.java +++ b/lucene/core/src/java/org/apache/lucene/index/SegmentCoreReaders.java @@ -24,7 +24,7 @@ import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.DimensionalReader; +import org.apache.lucene.codecs.PointReader; import org.apache.lucene.codecs.FieldsProducer; import org.apache.lucene.codecs.NormsProducer; import org.apache.lucene.codecs.PostingsFormat; @@ -54,7 +54,7 @@ final class SegmentCoreReaders { final StoredFieldsReader fieldsReaderOrig; final TermVectorsReader termVectorsReaderOrig; - final DimensionalReader dimensionalReader; + final PointReader pointReader; final Directory cfsReader; /** * fieldinfos for this core: means gen=-1. @@ -124,10 +124,10 @@ final class SegmentCoreReaders { termVectorsReaderOrig = null; } - if (coreFieldInfos.hasDimensionalValues()) { - dimensionalReader = codec.dimensionalFormat().fieldsReader(segmentReadState); + if (coreFieldInfos.hasPointValues()) { + pointReader = codec.pointFormat().fieldsReader(segmentReadState); } else { - dimensionalReader = null; + pointReader = null; } success = true; } finally { @@ -157,7 +157,7 @@ final class SegmentCoreReaders { Throwable th = null; try { IOUtils.close(termVectorsLocal, fieldsReaderLocal, fields, termVectorsReaderOrig, fieldsReaderOrig, - cfsReader, normsProducer, dimensionalReader); + cfsReader, normsProducer, pointReader); } catch (Throwable throwable) { th = throwable; } finally { diff --git a/lucene/core/src/java/org/apache/lucene/index/SegmentMerger.java b/lucene/core/src/java/org/apache/lucene/index/SegmentMerger.java index 16fe667cbf1..d80646f41a2 100644 --- a/lucene/core/src/java/org/apache/lucene/index/SegmentMerger.java +++ b/lucene/core/src/java/org/apache/lucene/index/SegmentMerger.java @@ -21,7 +21,7 @@ import java.io.IOException; import java.util.List; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.DimensionalWriter; +import org.apache.lucene.codecs.PointWriter; import org.apache.lucene.codecs.DocValuesConsumer; import org.apache.lucene.codecs.FieldsConsumer; import org.apache.lucene.codecs.NormsConsumer; @@ -113,12 +113,12 @@ final class SegmentMerger { if (mergeState.infoStream.isEnabled("SM")) { t0 = System.nanoTime(); } - if (mergeState.mergeFieldInfos.hasDimensionalValues()) { - mergeDimensionalValues(segmentWriteState); + if (mergeState.mergeFieldInfos.hasPointValues()) { + mergePoints(segmentWriteState); } if (mergeState.infoStream.isEnabled("SM")) { long t1 = System.nanoTime(); - mergeState.infoStream.message("SM", ((t1-t0)/1000000) + " msec to merge dimensional values [" + numMerged + " docs]"); + mergeState.infoStream.message("SM", ((t1-t0)/1000000) + " msec to merge points [" + numMerged + " docs]"); } if (mergeState.mergeFieldInfos.hasNorms()) { @@ -163,8 +163,8 @@ final class SegmentMerger { } } - private void mergeDimensionalValues(SegmentWriteState segmentWriteState) throws IOException { - try (DimensionalWriter writer = codec.dimensionalFormat().fieldsWriter(segmentWriteState)) { + private void mergePoints(SegmentWriteState segmentWriteState) throws IOException { + try (PointWriter writer = codec.pointFormat().fieldsWriter(segmentWriteState)) { writer.merge(mergeState); } } diff --git a/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java b/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java index 57a4b9abd2c..20f87fd91f2 100644 --- a/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java +++ b/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java @@ -21,7 +21,7 @@ import java.io.IOException; import java.util.Collections; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.DimensionalReader; +import org.apache.lucene.codecs.PointReader; import org.apache.lucene.codecs.DocValuesProducer; import org.apache.lucene.codecs.FieldInfosFormat; import org.apache.lucene.codecs.FieldsProducer; @@ -218,9 +218,9 @@ public final class SegmentReader extends CodecReader { } @Override - public DimensionalValues getDimensionalValues() { + public PointValues getPointValues() { ensureOpen(); - return core.dimensionalReader; + return core.pointReader; } @Override @@ -242,9 +242,9 @@ public final class SegmentReader extends CodecReader { } @Override - public DimensionalReader getDimensionalReader() { + public PointReader getPointReader() { ensureOpen(); - return core.dimensionalReader; + return core.pointReader; } @Override diff --git a/lucene/core/src/java/org/apache/lucene/index/SlowCodecReaderWrapper.java b/lucene/core/src/java/org/apache/lucene/index/SlowCodecReaderWrapper.java index ba236d57ae9..c925efb7688 100644 --- a/lucene/core/src/java/org/apache/lucene/index/SlowCodecReaderWrapper.java +++ b/lucene/core/src/java/org/apache/lucene/index/SlowCodecReaderWrapper.java @@ -20,7 +20,7 @@ package org.apache.lucene.index; import java.io.IOException; import java.util.Iterator; -import org.apache.lucene.codecs.DimensionalReader; +import org.apache.lucene.codecs.PointReader; import org.apache.lucene.codecs.DocValuesProducer; import org.apache.lucene.codecs.FieldsProducer; import org.apache.lucene.codecs.NormsProducer; @@ -92,13 +92,13 @@ public final class SlowCodecReaderWrapper { } @Override - public DimensionalValues getDimensionalValues() { - return reader.getDimensionalValues(); + public PointValues getPointValues() { + return reader.getPointValues(); } @Override - public DimensionalReader getDimensionalReader() { - return dimensionalValuesToReader(reader.getDimensionalValues()); + public PointReader getPointReader() { + return pointValuesToReader(reader.getPointValues()); } @Override @@ -129,11 +129,11 @@ public final class SlowCodecReaderWrapper { } } - private static DimensionalReader dimensionalValuesToReader(DimensionalValues values) { + private static PointReader pointValuesToReader(PointValues values) { if (values == null) { return null; } - return new DimensionalReader() { + return new PointReader() { @Override public void intersect(String fieldName, IntersectVisitor visitor) throws IOException { values.intersect(fieldName, visitor); diff --git a/lucene/core/src/java/org/apache/lucene/index/SlowCompositeReaderWrapper.java b/lucene/core/src/java/org/apache/lucene/index/SlowCompositeReaderWrapper.java index ee0048233cf..0f6cadf35e4 100644 --- a/lucene/core/src/java/org/apache/lucene/index/SlowCompositeReaderWrapper.java +++ b/lucene/core/src/java/org/apache/lucene/index/SlowCompositeReaderWrapper.java @@ -234,9 +234,9 @@ public final class SlowCompositeReaderWrapper extends LeafReader { } @Override - public DimensionalValues getDimensionalValues() { + public PointValues getPointValues() { ensureOpen(); - return MultiDimensionalValues.get(in); + return MultiPointValues.get(in); } @Override diff --git a/lucene/core/src/java/org/apache/lucene/search/ExactPointQuery.java b/lucene/core/src/java/org/apache/lucene/search/ExactPointQuery.java new file mode 100644 index 00000000000..21d7357e5ea --- /dev/null +++ b/lucene/core/src/java/org/apache/lucene/search/ExactPointQuery.java @@ -0,0 +1,153 @@ +package org.apache.lucene.search; + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import java.io.IOException; +import java.util.Arrays; +import java.util.Objects; + +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.PointValues.IntersectVisitor; +import org.apache.lucene.index.PointValues.Relation; +import org.apache.lucene.index.PointValues; +import org.apache.lucene.util.DocIdSetBuilder; +import org.apache.lucene.util.NumericUtils; +import org.apache.lucene.util.RamUsageEstimator; +import org.apache.lucene.util.StringHelper; + +/** Searches for single points in fields previously indexed using points + * e.g. {@link org.apache.lucene.document.LongPoint}. */ + +public class ExactPointQuery extends Query { + final String field; + final int numDims; + final byte[][] point; + final int bytesPerDim; + + public ExactPointQuery(String field, byte[][] point) { + this.field = field; + if (point == null) { + throw new IllegalArgumentException("point must not be null"); + } + this.point = point; + this.numDims = point.length; + + int bytesPerDim = -1; + for(byte[] value : point) { + if (value == null) { + throw new IllegalArgumentException("point's dimensional values must not be null"); + } + if (bytesPerDim == -1) { + bytesPerDim = value.length; + } else if (value.length != bytesPerDim) { + throw new IllegalArgumentException("all dimensions must have same bytes length, but saw " + bytesPerDim + " and " + value.length); + } + } + this.bytesPerDim = bytesPerDim; + } + + /** Use in the 1D case when you indexed 1D int values using {@link org.apache.lucene.document.IntPoint} */ + public static ExactPointQuery new1DIntExact(String field, int value) { + return new ExactPointQuery(field, pack(value)); + } + + /** Use in the 1D case when you indexed 1D long values using {@link org.apache.lucene.document.LongPoint} */ + public static ExactPointQuery new1DLongExact(String field, long value) { + return new ExactPointQuery(field, pack(value)); + } + + /** Use in the 1D case when you indexed 1D float values using {@link org.apache.lucene.document.FloatPoint} */ + public static ExactPointQuery new1DFloatExact(String field, float value) { + return new ExactPointQuery(field, pack(value)); + } + + /** Use in the 1D case when you indexed 1D double values using {@link org.apache.lucene.document.DoublePoint} */ + public static ExactPointQuery new1DDoubleExact(String field, double value) { + return new ExactPointQuery(field, pack(value)); + } + + private static byte[][] pack(long value) { + byte[][] result = new byte[][] {new byte[RamUsageEstimator.NUM_BYTES_LONG]}; + NumericUtils.longToBytes(value, result[0], 0); + return result; + } + + private static byte[][] pack(double value) { + byte[][] result = new byte[][] {new byte[RamUsageEstimator.NUM_BYTES_LONG]}; + NumericUtils.longToBytesDirect(NumericUtils.doubleToSortableLong(value), result[0], 0); + return result; + } + + private static byte[][] pack(int value) { + byte[][] result = new byte[][] {new byte[RamUsageEstimator.NUM_BYTES_INT]}; + NumericUtils.intToBytes(value, result[0], 0); + return result; + } + + private static byte[][] pack(float value) { + byte[][] result = new byte[][] {new byte[RamUsageEstimator.NUM_BYTES_INT]}; + NumericUtils.intToBytesDirect(NumericUtils.floatToSortableInt(value), result[0], 0); + return result; + } + + @Override + public Query rewrite(IndexReader reader) throws IOException { + boolean[] inclusive = new boolean[] {true}; + return new PointRangeQuery(field, point, inclusive, point, inclusive); + } + + @Override + public int hashCode() { + int hash = super.hashCode(); + hash += Arrays.hashCode(point)^0x14fa55fb; + hash += numDims^0x14fa55fb; + hash += Objects.hashCode(bytesPerDim); + return hash; + } + + @Override + public boolean equals(Object other) { + if (super.equals(other)) { + final ExactPointQuery q = (ExactPointQuery) other; + return q.numDims == numDims && + q.bytesPerDim == bytesPerDim && + Arrays.equals(point, q.point); + } + + return false; + } + + @Override + public String toString(String field) { + final StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()); + sb.append(':'); + if (this.field.equals(field) == false) { + sb.append("field="); + sb.append(this.field); + sb.append(':'); + } + + return sb.append(" point=") + .append(Arrays.toString(point)) + .toString(); + } +} diff --git a/lucene/core/src/java/org/apache/lucene/search/LegacyNumericRangeQuery.java b/lucene/core/src/java/org/apache/lucene/search/LegacyNumericRangeQuery.java index be1bf68d6dd..cebfd14e482 100644 --- a/lucene/core/src/java/org/apache/lucene/search/LegacyNumericRangeQuery.java +++ b/lucene/core/src/java/org/apache/lucene/search/LegacyNumericRangeQuery.java @@ -157,7 +157,7 @@ import org.apache.lucene.index.Term; // for javadocs * precision step). This query type was developed for a geographic portal, where the performance for * e.g. bounding boxes or exact date/time stamps is important.

    * - * @deprecated Please use {@link DimensionalRangeQuery} instead + * @deprecated Please use {@link PointRangeQuery} instead * * @since 2.9 **/ diff --git a/lucene/core/src/java/org/apache/lucene/search/DimensionalRangeQuery.java b/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java similarity index 79% rename from lucene/core/src/java/org/apache/lucene/search/DimensionalRangeQuery.java rename to lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java index 864a65e8ab9..714ba0dfd38 100644 --- a/lucene/core/src/java/org/apache/lucene/search/DimensionalRangeQuery.java +++ b/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java @@ -21,14 +21,9 @@ import java.io.IOException; import java.util.Arrays; import java.util.Objects; -import org.apache.lucene.document.DimensionalBinaryField; -import org.apache.lucene.document.DimensionalDoubleField; -import org.apache.lucene.document.DimensionalFloatField; -import org.apache.lucene.document.DimensionalIntField; -import org.apache.lucene.document.DimensionalLongField; -import org.apache.lucene.index.DimensionalValues; -import org.apache.lucene.index.DimensionalValues.IntersectVisitor; -import org.apache.lucene.index.DimensionalValues.Relation; +import org.apache.lucene.index.PointValues; +import org.apache.lucene.index.PointValues.IntersectVisitor; +import org.apache.lucene.index.PointValues.Relation; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; @@ -37,11 +32,11 @@ import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.StringHelper; -/** Searches for ranges in fields previously indexed using dimensional - * fields, e.g. {@link DimensionalLongField}. In a 1D field this is +/** Searches for ranges in fields previously indexed using points e.g. + * {@link org.apache.lucene.document.LongPoint}. In a 1D field this is * a simple range query; in a multi-dimensional field it's a box shape. */ -public class DimensionalRangeQuery extends Query { +public class PointRangeQuery extends Query { final String field; final int numDims; final byte[][] lowerPoint; @@ -51,9 +46,9 @@ public class DimensionalRangeQuery extends Query { // This is null only in the "fully open range" case final Integer bytesPerDim; - public DimensionalRangeQuery(String field, - byte[][] lowerPoint, boolean[] lowerInclusive, - byte[][] upperPoint, boolean[] upperInclusive) { + public PointRangeQuery(String field, + byte[][] lowerPoint, boolean[] lowerInclusive, + byte[][] upperPoint, boolean[] upperInclusive) { this.field = field; if (lowerPoint == null) { throw new IllegalArgumentException("lowerPoint must not be null"); @@ -96,29 +91,29 @@ public class DimensionalRangeQuery extends Query { } } - /** Use in the 1D case when you indexed 1D int values using {@link DimensionalIntField} */ - public static DimensionalRangeQuery new1DIntRange(String field, Integer lowerValue, boolean lowerInclusive, Integer upperValue, boolean upperInclusive) { - return new DimensionalRangeQuery(field, pack(lowerValue), new boolean[] {lowerInclusive}, pack(upperValue), new boolean[] {upperInclusive}); + /** Use in the 1D case when you indexed 1D int values using {@link org.apache.lucene.document.IntPoint} */ + public static PointRangeQuery new1DIntRange(String field, Integer lowerValue, boolean lowerInclusive, Integer upperValue, boolean upperInclusive) { + return new PointRangeQuery(field, pack(lowerValue), new boolean[] {lowerInclusive}, pack(upperValue), new boolean[] {upperInclusive}); } - /** Use in the 1D case when you indexed 1D long values using {@link DimensionalLongField} */ - public static DimensionalRangeQuery new1DLongRange(String field, Long lowerValue, boolean lowerInclusive, Long upperValue, boolean upperInclusive) { - return new DimensionalRangeQuery(field, pack(lowerValue), new boolean[] {lowerInclusive}, pack(upperValue), new boolean[] {upperInclusive}); + /** Use in the 1D case when you indexed 1D long values using {@link org.apache.lucene.document.LongPoint} */ + public static PointRangeQuery new1DLongRange(String field, Long lowerValue, boolean lowerInclusive, Long upperValue, boolean upperInclusive) { + return new PointRangeQuery(field, pack(lowerValue), new boolean[] {lowerInclusive}, pack(upperValue), new boolean[] {upperInclusive}); } - /** Use in the 1D case when you indexed 1D float values using {@link DimensionalFloatField} */ - public static DimensionalRangeQuery new1DFloatRange(String field, Float lowerValue, boolean lowerInclusive, Float upperValue, boolean upperInclusive) { - return new DimensionalRangeQuery(field, pack(lowerValue), new boolean[] {lowerInclusive}, pack(upperValue), new boolean[] {upperInclusive}); + /** Use in the 1D case when you indexed 1D float values using {@link org.apache.lucene.document.FloatPoint} */ + public static PointRangeQuery new1DFloatRange(String field, Float lowerValue, boolean lowerInclusive, Float upperValue, boolean upperInclusive) { + return new PointRangeQuery(field, pack(lowerValue), new boolean[] {lowerInclusive}, pack(upperValue), new boolean[] {upperInclusive}); } - /** Use in the 1D case when you indexed 1D double values using {@link DimensionalDoubleField} */ - public static DimensionalRangeQuery new1DDoubleRange(String field, Double lowerValue, boolean lowerInclusive, Double upperValue, boolean upperInclusive) { - return new DimensionalRangeQuery(field, pack(lowerValue), new boolean[] {lowerInclusive}, pack(upperValue), new boolean[] {upperInclusive}); + /** Use in the 1D case when you indexed 1D double values using {@link org.apache.lucene.document.DoublePoint} */ + public static PointRangeQuery new1DDoubleRange(String field, Double lowerValue, boolean lowerInclusive, Double upperValue, boolean upperInclusive) { + return new PointRangeQuery(field, pack(lowerValue), new boolean[] {lowerInclusive}, pack(upperValue), new boolean[] {upperInclusive}); } - /** Use in the 1D case when you indexed binary values using {@link DimensionalBinaryField} */ - public static DimensionalRangeQuery new1DBinaryRange(String field, byte[] lowerValue, boolean lowerInclusive, byte[] upperValue, boolean upperInclusive) { - return new DimensionalRangeQuery(field, new byte[][] {lowerValue}, new boolean[] {lowerInclusive}, new byte[][] {upperValue}, new boolean[] {upperInclusive}); + /** Use in the 1D case when you indexed binary values using {@link org.apache.lucene.document.BinaryPoint} */ + public static PointRangeQuery new1DBinaryRange(String field, byte[] lowerValue, boolean lowerInclusive, byte[] upperValue, boolean upperInclusive) { + return new PointRangeQuery(field, new byte[][] {lowerValue}, new boolean[] {lowerInclusive}, new byte[][] {upperValue}, new boolean[] {upperInclusive}); } private static byte[][] pack(Long value) { @@ -172,9 +167,9 @@ public class DimensionalRangeQuery extends Query { @Override public Scorer scorer(LeafReaderContext context) throws IOException { LeafReader reader = context.reader(); - DimensionalValues values = reader.getDimensionalValues(); + PointValues values = reader.getPointValues(); if (values == null) { - // No docs in this segment indexed any field dimensionally + // No docs in this segment indexed any points return null; } FieldInfo fieldInfo = reader.getFieldInfos().fieldInfo(field); @@ -182,13 +177,13 @@ public class DimensionalRangeQuery extends Query { // No docs in this segment indexed this field at all return null; } - if (fieldInfo.getDimensionCount() != numDims) { - throw new IllegalArgumentException("field=\"" + field + "\" was indexed with numDims=" + fieldInfo.getDimensionCount() + " but this query has numDims=" + numDims); + if (fieldInfo.getPointDimensionCount() != numDims) { + throw new IllegalArgumentException("field=\"" + field + "\" was indexed with numDims=" + fieldInfo.getPointDimensionCount() + " but this query has numDims=" + numDims); } - if (bytesPerDim != null && bytesPerDim.intValue() != fieldInfo.getDimensionNumBytes()) { - throw new IllegalArgumentException("field=\"" + field + "\" was indexed with bytesPerDim=" + fieldInfo.getDimensionNumBytes() + " but this query has bytesPerDim=" + bytesPerDim); + if (bytesPerDim != null && bytesPerDim.intValue() != fieldInfo.getPointNumBytes()) { + throw new IllegalArgumentException("field=\"" + field + "\" was indexed with bytesPerDim=" + fieldInfo.getPointNumBytes() + " but this query has bytesPerDim=" + bytesPerDim); } - int bytesPerDim = fieldInfo.getDimensionNumBytes(); + int bytesPerDim = fieldInfo.getPointNumBytes(); byte[] packedLowerIncl = new byte[numDims * bytesPerDim]; byte[] packedUpperIncl = new byte[numDims * bytesPerDim]; @@ -320,7 +315,7 @@ public class DimensionalRangeQuery extends Query { @Override public boolean equals(Object other) { if (super.equals(other)) { - final DimensionalRangeQuery q = (DimensionalRangeQuery) other; + final PointRangeQuery q = (PointRangeQuery) other; return q.numDims == numDims && q.bytesPerDim == bytesPerDim && Arrays.equals(lowerPoint, q.lowerPoint) && diff --git a/lucene/core/src/java/org/apache/lucene/search/Query.java b/lucene/core/src/java/org/apache/lucene/search/Query.java index 09db16a9b1e..9646ff746ee 100644 --- a/lucene/core/src/java/org/apache/lucene/search/Query.java +++ b/lucene/core/src/java/org/apache/lucene/search/Query.java @@ -33,7 +33,7 @@ import org.apache.lucene.index.IndexReader;
  • {@link FuzzyQuery}
  • {@link RegexpQuery}
  • {@link TermRangeQuery} -
  • {@link DimensionalRangeQuery} +
  • {@link PointRangeQuery}
  • {@link ConstantScoreQuery}
  • {@link DisjunctionMaxQuery}
  • {@link MatchAllDocsQuery} diff --git a/lucene/core/src/java/org/apache/lucene/search/UsageTrackingQueryCachingPolicy.java b/lucene/core/src/java/org/apache/lucene/search/UsageTrackingQueryCachingPolicy.java index 3c5e997497a..24f165272a8 100644 --- a/lucene/core/src/java/org/apache/lucene/search/UsageTrackingQueryCachingPolicy.java +++ b/lucene/core/src/java/org/apache/lucene/search/UsageTrackingQueryCachingPolicy.java @@ -43,7 +43,7 @@ public final class UsageTrackingQueryCachingPolicy implements QueryCachingPolicy // DocIdSet in the first place return query instanceof MultiTermQuery || query instanceof MultiTermQueryConstantScoreWrapper || - query instanceof DimensionalRangeQuery; + query instanceof PointRangeQuery; } static boolean isCheap(Query query) { diff --git a/lucene/core/src/java/org/apache/lucene/search/package-info.java b/lucene/core/src/java/org/apache/lucene/search/package-info.java index efaeca6ea33..27f7d551346 100644 --- a/lucene/core/src/java/org/apache/lucene/search/package-info.java +++ b/lucene/core/src/java/org/apache/lucene/search/package-info.java @@ -160,22 +160,22 @@ * and an upper * {@link org.apache.lucene.index.Term Term} * according to {@link org.apache.lucene.util.BytesRef#compareTo BytesRef.compareTo()}. It is not intended - * for numerical ranges; use {@link org.apache.lucene.search.DimensionalRangeQuery DimensionalRangeQuery} instead. + * for numerical ranges; use {@link org.apache.lucene.search.PointRangeQuery PointRangeQuery} instead. * * For example, one could find all documents * that have terms beginning with the letters a through c. * *

    - * {@link org.apache.lucene.search.DimensionalRangeQuery DimensionalRangeQuery} + * {@link org.apache.lucene.search.PointRangeQuery PointRangeQuery} *

    * *

    The - * {@link org.apache.lucene.search.DimensionalRangeQuery DimensionalRangeQuery} + * {@link org.apache.lucene.search.PointRangeQuery PointRangeQuery} * matches all documents that occur in a numeric range. - * For DimensionalRangeQuery to work, you must index the values - * using a one of the numeric fields ({@link org.apache.lucene.document.DimensionalIntField DimensionalIntField}, - * {@link org.apache.lucene.document.DimensionalLongField DimensionalLongField}, {@link org.apache.lucene.document.DimensionalFloatField DimensionalFloatField}, - * or {@link org.apache.lucene.document.DimensionalDoubleField DimensionalDoubleField}). + * For PointRangeQuery to work, you must index the values + * using a one of the numeric fields ({@link org.apache.lucene.document.IntPoint IntPoint}, + * {@link org.apache.lucene.document.LongPoint LongPoint}, {@link org.apache.lucene.document.FloatPoint FloatPoint}, + * or {@link org.apache.lucene.document.DoublePoint DoublePoint}). * *

    * {@link org.apache.lucene.search.PrefixQuery PrefixQuery}, diff --git a/lucene/core/src/java/org/apache/lucene/util/LegacyNumericUtils.java b/lucene/core/src/java/org/apache/lucene/util/LegacyNumericUtils.java index e3670827748..1cec17f2826 100644 --- a/lucene/core/src/java/org/apache/lucene/util/LegacyNumericUtils.java +++ b/lucene/core/src/java/org/apache/lucene/util/LegacyNumericUtils.java @@ -19,7 +19,6 @@ package org.apache.lucene.util; import java.io.IOException; -import org.apache.lucene.index.DimensionalValues; import org.apache.lucene.index.FilterLeafReader; import org.apache.lucene.index.FilteredTermsEnum; import org.apache.lucene.index.Terms; @@ -60,7 +59,7 @@ import org.apache.lucene.index.TermsEnum; * * @lucene.internal * - * @deprecated Please use {@link DimensionalValues} instead. + * @deprecated Please use {@link org.apache.lucene.index.PointValues} instead. * * @since 2.9, API changed non backwards-compliant in 4.0 */ diff --git a/lucene/core/src/java/org/apache/lucene/util/bkd/BKDReader.java b/lucene/core/src/java/org/apache/lucene/util/bkd/BKDReader.java index a6854498f61..81b9814c64d 100644 --- a/lucene/core/src/java/org/apache/lucene/util/bkd/BKDReader.java +++ b/lucene/core/src/java/org/apache/lucene/util/bkd/BKDReader.java @@ -21,8 +21,8 @@ import java.io.IOException; import java.util.Arrays; import org.apache.lucene.codecs.CodecUtil; -import org.apache.lucene.index.DimensionalValues.IntersectVisitor; -import org.apache.lucene.index.DimensionalValues.Relation; +import org.apache.lucene.index.PointValues.IntersectVisitor; +import org.apache.lucene.index.PointValues.Relation; import org.apache.lucene.store.IndexInput; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.BytesRef; diff --git a/lucene/core/src/test/org/apache/lucene/document/TestFieldType.java b/lucene/core/src/test/org/apache/lucene/document/TestFieldType.java index 96eb42eb447..00b0966b3d1 100644 --- a/lucene/core/src/test/org/apache/lucene/document/TestFieldType.java +++ b/lucene/core/src/test/org/apache/lucene/document/TestFieldType.java @@ -89,7 +89,7 @@ public class TestFieldType extends LuceneTestCase { if ((method.getModifiers() & Modifier.PUBLIC) != 0 && method.getName().startsWith("set")) { final Class[] parameterTypes = method.getParameterTypes(); final Object[] args = new Object[parameterTypes.length]; - if (method.getName().equals("setDimensions")) { + if (method.getName().equals("setPointDimensions")) { args[0] = 1 + random().nextInt(15); args[1] = 1 + random().nextInt(100); } else { diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java b/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java index c5eef357491..0d25f281a8f 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java @@ -32,11 +32,11 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicLong; import java.util.regex.Pattern; -import org.apache.lucene.document.DimensionalLongField; +import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.NumericDocValuesField; -import org.apache.lucene.search.DimensionalRangeQuery; +import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.ScoreDoc; @@ -681,7 +681,7 @@ public class TestDemoParallelLeafReader extends LuceneTestCase { Document newDoc = new Document(); long value = Long.parseLong(oldDoc.get("text").split(" ")[1]); newDoc.add(new NumericDocValuesField("number", value)); - newDoc.add(new DimensionalLongField("number", value)); + newDoc.add(new LongPoint("number", value)); w.addDocument(newDoc); } @@ -737,7 +737,7 @@ public class TestDemoParallelLeafReader extends LuceneTestCase { Document newDoc = new Document(); long value = Long.parseLong(oldDoc.get("text").split(" ")[1]); newDoc.add(new NumericDocValuesField("number_" + newSchemaGen, value)); - newDoc.add(new DimensionalLongField("number", value)); + newDoc.add(new LongPoint("number", value)); w.addDocument(newDoc); } } else { @@ -832,7 +832,7 @@ public class TestDemoParallelLeafReader extends LuceneTestCase { Document newDoc = new Document(); long value = Long.parseLong(oldDoc.get("text").split(" ")[1]); newDoc.add(new NumericDocValuesField("number", newSchemaGen*value)); - newDoc.add(new DimensionalLongField("number", value)); + newDoc.add(new LongPoint("number", value)); w.addDocument(newDoc); } } else { @@ -1168,7 +1168,7 @@ public class TestDemoParallelLeafReader extends LuceneTestCase { checkAllNumberDVs(r); IndexSearcher s = newSearcher(r); testNumericDVSort(s); - testDimensionalRangeQuery(s); + testPointRangeQuery(s); } finally { reindexer.mgr.release(r); } @@ -1190,7 +1190,7 @@ public class TestDemoParallelLeafReader extends LuceneTestCase { checkAllNumberDVs(r); IndexSearcher s = newSearcher(r); testNumericDVSort(s); - testDimensionalRangeQuery(s); + testPointRangeQuery(s); } finally { reindexer.mgr.release(r); } @@ -1209,7 +1209,7 @@ public class TestDemoParallelLeafReader extends LuceneTestCase { checkAllNumberDVs(r); IndexSearcher s = newSearcher(r); testNumericDVSort(s); - testDimensionalRangeQuery(s); + testPointRangeQuery(s); } finally { reindexer.mgr.release(r); } @@ -1261,7 +1261,7 @@ public class TestDemoParallelLeafReader extends LuceneTestCase { checkAllNumberDVs(r); IndexSearcher s = newSearcher(r); testNumericDVSort(s); - testDimensionalRangeQuery(s); + testPointRangeQuery(s); } finally { reindexer.mgr.release(r); } @@ -1340,7 +1340,7 @@ public class TestDemoParallelLeafReader extends LuceneTestCase { } } - private static void testDimensionalRangeQuery(IndexSearcher s) throws IOException { + private static void testPointRangeQuery(IndexSearcher s) throws IOException { NumericDocValues numbers = MultiDocValues.getNumericValues(s.getIndexReader(), "number"); for(int i=0;i<100;i++) { // Confirm we can range search by the new indexed (numeric) field: @@ -1352,7 +1352,7 @@ public class TestDemoParallelLeafReader extends LuceneTestCase { max = x; } - TopDocs hits = s.search(DimensionalRangeQuery.new1DLongRange("number", min, true, max, true), 100); + TopDocs hits = s.search(PointRangeQuery.new1DLongRange("number", min, true, max, true), 100); for(ScoreDoc scoreDoc : hits.scoreDocs) { long value = Long.parseLong(s.doc(scoreDoc.doc).get("text").split(" ")[1]); assertTrue(value >= min); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexableField.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexableField.java index 0009789d163..28a9eafa051 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestIndexableField.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexableField.java @@ -92,12 +92,12 @@ public class TestIndexableField extends LuceneTestCase { } @Override - public int dimensionCount() { + public int pointDimensionCount() { return 0; } @Override - public int dimensionNumBytes() { + public int pointNumBytes() { return 0; } }; diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDimensionalValues.java b/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java similarity index 85% rename from lucene/core/src/test/org/apache/lucene/index/TestDimensionalValues.java rename to lucene/core/src/test/org/apache/lucene/index/TestPointValues.java index ec1fa8b4baa..f8185673752 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestDimensionalValues.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java @@ -26,20 +26,20 @@ import java.util.List; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.DimensionalFormat; -import org.apache.lucene.codecs.DimensionalReader; -import org.apache.lucene.codecs.DimensionalWriter; +import org.apache.lucene.codecs.PointFormat; +import org.apache.lucene.codecs.PointReader; +import org.apache.lucene.codecs.PointWriter; import org.apache.lucene.codecs.FilterCodec; -import org.apache.lucene.codecs.lucene60.Lucene60DimensionalReader; -import org.apache.lucene.codecs.lucene60.Lucene60DimensionalWriter; -import org.apache.lucene.document.DimensionalBinaryField; -import org.apache.lucene.document.DimensionalIntField; +import org.apache.lucene.codecs.lucene60.Lucene60PointReader; +import org.apache.lucene.codecs.lucene60.Lucene60PointWriter; +import org.apache.lucene.document.BinaryPoint; +import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.StringField; -import org.apache.lucene.index.DimensionalValues.IntersectVisitor; -import org.apache.lucene.index.DimensionalValues.Relation; +import org.apache.lucene.index.PointValues.IntersectVisitor; +import org.apache.lucene.index.PointValues.Relation; import org.apache.lucene.store.Directory; import org.apache.lucene.store.MockDirectoryWrapper; import org.apache.lucene.util.Bits; @@ -52,10 +52,10 @@ import org.apache.lucene.util.TestUtil; // TODO: factor out a BaseTestDimensionFormat -public class TestDimensionalValues extends LuceneTestCase { +public class TestPointValues extends LuceneTestCase { public void testBasic() throws Exception { Directory dir = getDirectory(20); - // TODO: randomize codec once others support dimensional format + // TODO: randomize codec once others support points format IndexWriterConfig iwc = newIndexWriterConfig(); iwc.setMergePolicy(newLogMergePolicy()); IndexWriter w = new IndexWriter(dir, iwc); @@ -63,7 +63,7 @@ public class TestDimensionalValues extends LuceneTestCase { for(int i=0;i<20;i++) { Document doc = new Document(); NumericUtils.intToBytes(i, point, 0); - doc.add(new DimensionalBinaryField("dim", point)); + doc.add(new BinaryPoint("dim", point)); w.addDocument(doc); } w.forceMerge(1); @@ -71,7 +71,7 @@ public class TestDimensionalValues extends LuceneTestCase { DirectoryReader r = DirectoryReader.open(dir); LeafReader sub = getOnlySegmentReader(r); - DimensionalValues values = sub.getDimensionalValues(); + PointValues values = sub.getPointValues(); // Simple test: make sure intersect can visit every doc: BitSet seen = new BitSet(); @@ -102,7 +102,7 @@ public class TestDimensionalValues extends LuceneTestCase { for(int i=0;i<20;i++) { Document doc = new Document(); NumericUtils.intToBytes(i, point, 0); - doc.add(new DimensionalBinaryField("dim", point)); + doc.add(new BinaryPoint("dim", point)); w.addDocument(doc); if (i == 10) { w.commit(); @@ -113,7 +113,7 @@ public class TestDimensionalValues extends LuceneTestCase { DirectoryReader r = DirectoryReader.open(dir); LeafReader sub = getOnlySegmentReader(r); - DimensionalValues values = sub.getDimensionalValues(); + PointValues values = sub.getPointValues(); // Simple test: make sure intersect can visit every doc: BitSet seen = new BitSet(); @@ -135,7 +135,7 @@ public class TestDimensionalValues extends LuceneTestCase { IOUtils.close(r, dir); } - public void testAllDimensionalDocsDeletedInSegment() throws Exception { + public void testAllPointDocsDeletedInSegment() throws Exception { Directory dir = getDirectory(20); IndexWriterConfig iwc = newIndexWriterConfig(); IndexWriter w = new IndexWriter(dir, iwc); @@ -143,7 +143,7 @@ public class TestDimensionalValues extends LuceneTestCase { for(int i=0;i<10;i++) { Document doc = new Document(); NumericUtils.intToBytes(i, point, 0); - doc.add(new DimensionalBinaryField("dim", point)); + doc.add(new BinaryPoint("dim", point)); doc.add(new NumericDocValuesField("id", i)); doc.add(newStringField("x", "x", Field.Store.NO)); w.addDocument(doc); @@ -156,7 +156,7 @@ public class TestDimensionalValues extends LuceneTestCase { w.close(); DirectoryReader r = DirectoryReader.open(dir); assertEquals(1, r.numDocs()); - DimensionalValues values = MultiDimensionalValues.get(r); + PointValues values = MultiPointValues.get(r); Bits liveDocs = MultiFields.getLiveDocs(r); NumericDocValues idValues = MultiDocValues.getNumericValues(r, "id"); @@ -186,8 +186,8 @@ public class TestDimensionalValues extends LuceneTestCase { /** Make sure we close open files, delete temp files, etc., on exception */ public void testWithExceptions() throws Exception { int numDocs = atLeast(10000); - int numBytesPerDim = TestUtil.nextInt(random(), 2, DimensionalValues.MAX_NUM_BYTES); - int numDims = TestUtil.nextInt(random(), 1, DimensionalValues.MAX_DIMENSIONS); + int numBytesPerDim = TestUtil.nextInt(random(), 2, PointValues.MAX_NUM_BYTES); + int numDims = TestUtil.nextInt(random(), 1, PointValues.MAX_DIMENSIONS); byte[][][] docValues = new byte[numDocs][][]; @@ -246,8 +246,8 @@ public class TestDimensionalValues extends LuceneTestCase { } public void testMultiValued() throws Exception { - int numBytesPerDim = TestUtil.nextInt(random(), 2, DimensionalValues.MAX_NUM_BYTES); - int numDims = TestUtil.nextInt(random(), 1, DimensionalValues.MAX_DIMENSIONS); + int numBytesPerDim = TestUtil.nextInt(random(), 2, PointValues.MAX_NUM_BYTES); + int numDims = TestUtil.nextInt(random(), 1, PointValues.MAX_DIMENSIONS); int numDocs = atLeast(1000); List docValues = new ArrayList<>(); @@ -276,8 +276,8 @@ public class TestDimensionalValues extends LuceneTestCase { } public void testAllEqual() throws Exception { - int numBytesPerDim = TestUtil.nextInt(random(), 2, DimensionalValues.MAX_NUM_BYTES); - int numDims = TestUtil.nextInt(random(), 1, DimensionalValues.MAX_DIMENSIONS); + int numBytesPerDim = TestUtil.nextInt(random(), 2, PointValues.MAX_NUM_BYTES); + int numDims = TestUtil.nextInt(random(), 1, PointValues.MAX_DIMENSIONS); int numDocs = atLeast(1000); byte[][][] docValues = new byte[numDocs][][]; @@ -299,8 +299,8 @@ public class TestDimensionalValues extends LuceneTestCase { } public void testOneDimEqual() throws Exception { - int numBytesPerDim = TestUtil.nextInt(random(), 2, DimensionalValues.MAX_NUM_BYTES); - int numDims = TestUtil.nextInt(random(), 1, DimensionalValues.MAX_DIMENSIONS); + int numBytesPerDim = TestUtil.nextInt(random(), 2, PointValues.MAX_NUM_BYTES); + int numDims = TestUtil.nextInt(random(), 1, PointValues.MAX_DIMENSIONS); int numDocs = atLeast(1000); int theEqualDim = random().nextInt(numDims); @@ -326,8 +326,8 @@ public class TestDimensionalValues extends LuceneTestCase { int numDocs = atLeast(1000); try (Directory dir = getDirectory(numDocs)) { - int numBytesPerDim = TestUtil.nextInt(random(), 2, DimensionalValues.MAX_NUM_BYTES); - int numDims = TestUtil.nextInt(random(), 1, DimensionalValues.MAX_DIMENSIONS); + int numBytesPerDim = TestUtil.nextInt(random(), 2, PointValues.MAX_NUM_BYTES); + int numDims = TestUtil.nextInt(random(), 1, PointValues.MAX_DIMENSIONS); IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); // We rely on docIDs not changing: iwc.setMergePolicy(newLogMergePolicy()); @@ -350,14 +350,14 @@ public class TestDimensionalValues extends LuceneTestCase { } docs[docID] = values; Document doc = new Document(); - doc.add(new DimensionalBinaryField("field", bytes)); + doc.add(new BinaryPoint("field", bytes)); w.addDocument(doc); } DirectoryReader r = w.getReader(); w.close(); - DimensionalValues dimValues = MultiDimensionalValues.get(r); + PointValues dimValues = MultiPointValues.get(r); int iters = atLeast(100); for(int iter=0;iter 100000) { - dir = noVirusChecker(newFSDirectory(createTempDir("TestDimensionalRangeQuery"))); + dir = noVirusChecker(newFSDirectory(createTempDir("TestPointRangeQuery"))); } else { dir = getDirectory(); } @@ -489,7 +493,7 @@ public class TestDimensionalRangeQuery extends LuceneTestCase { } if (missing.get(id) == false) { - doc.add(new DimensionalBinaryField("value", docValues[ord])); + doc.add(new BinaryPoint("value", docValues[ord])); if (VERBOSE) { System.out.println("id=" + id); for(int dim=0;dimmight be as - * fast or faster than a regular {@link DimensionalRangeQuery}. + * fast or faster than a regular {@link PointRangeQuery}. * *

    * NOTE: be very careful using this query: it is diff --git a/lucene/sandbox/src/java/org/apache/lucene/search/DimensionalPointInPolygonQuery.java b/lucene/sandbox/src/java/org/apache/lucene/search/PointInPolygonQuery.java similarity index 84% rename from lucene/sandbox/src/java/org/apache/lucene/search/DimensionalPointInPolygonQuery.java rename to lucene/sandbox/src/java/org/apache/lucene/search/PointInPolygonQuery.java index 20a1e944a3d..abdc5b9fdcb 100644 --- a/lucene/sandbox/src/java/org/apache/lucene/search/DimensionalPointInPolygonQuery.java +++ b/lucene/sandbox/src/java/org/apache/lucene/search/PointInPolygonQuery.java @@ -20,10 +20,10 @@ package org.apache.lucene.search; import java.io.IOException; import java.util.Arrays; -import org.apache.lucene.document.DimensionalLatLonField; -import org.apache.lucene.index.DimensionalValues.IntersectVisitor; -import org.apache.lucene.index.DimensionalValues.Relation; -import org.apache.lucene.index.DimensionalValues; +import org.apache.lucene.document.LatLonPoint; +import org.apache.lucene.index.PointValues.IntersectVisitor; +import org.apache.lucene.index.PointValues.Relation; +import org.apache.lucene.index.PointValues; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.util.DocIdSetBuilder; @@ -33,11 +33,11 @@ import org.apache.lucene.util.NumericUtils; /** Finds all previously indexed points that fall within the specified polygon. * - *

    The field must be indexed with using {@link DimensionalLatLonField} added per document. + *

    The field must be indexed with using {@link org.apache.lucene.document.LatLonPoint} added per document. * * @lucene.experimental */ -public class DimensionalPointInPolygonQuery extends Query { +public class PointInPolygonQuery extends Query { final String field; final double minLat; final double maxLat; @@ -47,7 +47,7 @@ public class DimensionalPointInPolygonQuery extends Query { final double[] polyLons; /** The lats/lons must be clockwise or counter-clockwise. */ - public DimensionalPointInPolygonQuery(String field, double[] polyLats, double[] polyLons) { + public PointInPolygonQuery(String field, double[] polyLats, double[] polyLons) { this.field = field; if (polyLats.length != polyLons.length) { throw new IllegalArgumentException("polyLats and polyLons must be equal length"); @@ -105,9 +105,9 @@ public class DimensionalPointInPolygonQuery extends Query { @Override public Scorer scorer(LeafReaderContext context) throws IOException { LeafReader reader = context.reader(); - DimensionalValues values = reader.getDimensionalValues(); + PointValues values = reader.getPointValues(); if (values == null) { - // No docs in this segment had any dimensional fields + // No docs in this segment had any points fields return null; } @@ -124,8 +124,8 @@ public class DimensionalPointInPolygonQuery extends Query { @Override public void visit(int docID, byte[] packedValue) { assert packedValue.length == 8; - double lat = DimensionalLatLonField.decodeLat(NumericUtils.bytesToInt(packedValue, 0)); - double lon = DimensionalLatLonField.decodeLon(NumericUtils.bytesToInt(packedValue, 1)); + double lat = LatLonPoint.decodeLat(NumericUtils.bytesToInt(packedValue, 0)); + double lon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(packedValue, 1)); if (GeoRelationUtils.pointInPolygon(polyLons, polyLats, lat, lon)) { hitCount[0]++; result.add(docID); @@ -134,10 +134,10 @@ public class DimensionalPointInPolygonQuery extends Query { @Override public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { - double cellMinLat = DimensionalLatLonField.decodeLat(NumericUtils.bytesToInt(minPackedValue, 0)); - double cellMinLon = DimensionalLatLonField.decodeLon(NumericUtils.bytesToInt(minPackedValue, 1)); - double cellMaxLat = DimensionalLatLonField.decodeLat(NumericUtils.bytesToInt(maxPackedValue, 0)); - double cellMaxLon = DimensionalLatLonField.decodeLon(NumericUtils.bytesToInt(maxPackedValue, 1)); + double cellMinLat = LatLonPoint.decodeLat(NumericUtils.bytesToInt(minPackedValue, 0)); + double cellMinLon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(minPackedValue, 1)); + double cellMaxLat = LatLonPoint.decodeLat(NumericUtils.bytesToInt(maxPackedValue, 0)); + double cellMaxLon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(maxPackedValue, 1)); if (cellMinLat <= minLat && cellMaxLat >= maxLat && cellMinLon <= minLon && cellMaxLon >= maxLon) { // Cell fully encloses the query @@ -169,7 +169,7 @@ public class DimensionalPointInPolygonQuery extends Query { if (o == null || getClass() != o.getClass()) return false; if (!super.equals(o)) return false; - DimensionalPointInPolygonQuery that = (DimensionalPointInPolygonQuery) o; + PointInPolygonQuery that = (PointInPolygonQuery) o; if (Arrays.equals(polyLons, that.polyLons) == false) { return false; diff --git a/lucene/sandbox/src/java/org/apache/lucene/search/DimensionalPointInRectQuery.java b/lucene/sandbox/src/java/org/apache/lucene/search/PointInRectQuery.java similarity index 78% rename from lucene/sandbox/src/java/org/apache/lucene/search/DimensionalPointInRectQuery.java rename to lucene/sandbox/src/java/org/apache/lucene/search/PointInRectQuery.java index b2e8bf39a4f..9af6097609a 100644 --- a/lucene/sandbox/src/java/org/apache/lucene/search/DimensionalPointInRectQuery.java +++ b/lucene/sandbox/src/java/org/apache/lucene/search/PointInRectQuery.java @@ -19,10 +19,10 @@ package org.apache.lucene.search; import java.io.IOException; -import org.apache.lucene.document.DimensionalLatLonField; -import org.apache.lucene.index.DimensionalValues.IntersectVisitor; -import org.apache.lucene.index.DimensionalValues.Relation; -import org.apache.lucene.index.DimensionalValues; +import org.apache.lucene.document.LatLonPoint; +import org.apache.lucene.index.PointValues.IntersectVisitor; +import org.apache.lucene.index.PointValues.Relation; +import org.apache.lucene.index.PointValues; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; @@ -32,11 +32,11 @@ import org.apache.lucene.util.NumericUtils; /** Finds all previously indexed points that fall within the specified boundings box. * - *

    The field must be indexed with using {@link DimensionalLatLonField} added per document. + *

    The field must be indexed with using {@link org.apache.lucene.document.LatLonPoint} added per document. * * @lucene.experimental */ -public class DimensionalPointInRectQuery extends Query { +public class PointInRectQuery extends Query { final String field; final double minLat; final double maxLat; @@ -44,7 +44,7 @@ public class DimensionalPointInRectQuery extends Query { final double maxLon; /** Matches all points >= minLon, minLat (inclusive) and < maxLon, maxLat (exclusive). */ - public DimensionalPointInRectQuery(String field, double minLat, double maxLat, double minLon, double maxLon) { + public PointInRectQuery(String field, double minLat, double maxLat, double minLon, double maxLon) { this.field = field; if (GeoUtils.isValidLat(minLat) == false) { throw new IllegalArgumentException("minLat=" + minLat + " is not a valid latitude"); @@ -74,9 +74,9 @@ public class DimensionalPointInRectQuery extends Query { @Override public Scorer scorer(LeafReaderContext context) throws IOException { LeafReader reader = context.reader(); - DimensionalValues values = reader.getDimensionalValues(); + PointValues values = reader.getPointValues(); if (values == null) { - // No docs in this segment had any dimensional fields + // No docs in this segment had any points fields return null; } @@ -98,8 +98,8 @@ public class DimensionalPointInRectQuery extends Query { @Override public void visit(int docID, byte[] packedValue) { assert packedValue.length == 8; - double lat = DimensionalLatLonField.decodeLat(NumericUtils.bytesToInt(packedValue, 0)); - double lon = DimensionalLatLonField.decodeLon(NumericUtils.bytesToInt(packedValue, 1)); + double lat = LatLonPoint.decodeLat(NumericUtils.bytesToInt(packedValue, 0)); + double lon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(packedValue, 1)); if (lat >= minLat && lat <= maxLat && lon >= minLon && lon <= maxLon) { hitCount[0]++; result.add(docID); @@ -108,10 +108,10 @@ public class DimensionalPointInRectQuery extends Query { @Override public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { - double cellMinLat = DimensionalLatLonField.decodeLat(NumericUtils.bytesToInt(minPackedValue, 0)); - double cellMinLon = DimensionalLatLonField.decodeLon(NumericUtils.bytesToInt(minPackedValue, 1)); - double cellMaxLat = DimensionalLatLonField.decodeLat(NumericUtils.bytesToInt(maxPackedValue, 0)); - double cellMaxLon = DimensionalLatLonField.decodeLon(NumericUtils.bytesToInt(maxPackedValue, 1)); + double cellMinLat = LatLonPoint.decodeLat(NumericUtils.bytesToInt(minPackedValue, 0)); + double cellMinLon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(minPackedValue, 1)); + double cellMaxLat = LatLonPoint.decodeLat(NumericUtils.bytesToInt(maxPackedValue, 0)); + double cellMaxLon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(maxPackedValue, 1)); if (minLat <= cellMinLat && maxLat >= cellMaxLat && minLon <= cellMinLon && maxLon >= cellMaxLon) { return Relation.CELL_INSIDE_QUERY; @@ -141,9 +141,9 @@ public class DimensionalPointInRectQuery extends Query { q.setDisableCoord(true); // E.g.: maxLon = -179, minLon = 179 - DimensionalPointInRectQuery left = new DimensionalPointInRectQuery(field, minLat, maxLat, GeoUtils.MIN_LON_INCL, maxLon); + PointInRectQuery left = new PointInRectQuery(field, minLat, maxLat, GeoUtils.MIN_LON_INCL, maxLon); q.add(new BooleanClause(left, BooleanClause.Occur.SHOULD)); - DimensionalPointInRectQuery right = new DimensionalPointInRectQuery(field, minLat, maxLat, minLon, GeoUtils.MAX_LON_INCL); + PointInRectQuery right = new PointInRectQuery(field, minLat, maxLat, minLon, GeoUtils.MAX_LON_INCL); q.add(new BooleanClause(right, BooleanClause.Occur.SHOULD)); return new ConstantScoreQuery(q.build()); } else { @@ -163,8 +163,8 @@ public class DimensionalPointInRectQuery extends Query { @Override public boolean equals(Object other) { - if (super.equals(other) && other instanceof DimensionalPointInRectQuery) { - final DimensionalPointInRectQuery q = (DimensionalPointInRectQuery) other; + if (super.equals(other) && other instanceof PointInRectQuery) { + final PointInRectQuery q = (PointInRectQuery) other; return field.equals(q.field) && minLat == q.minLat && maxLat == q.maxLat && diff --git a/lucene/sandbox/src/test/org/apache/lucene/search/TestDocValuesRangeQuery.java b/lucene/sandbox/src/test/org/apache/lucene/search/TestDocValuesRangeQuery.java index aa51a482caf..694fcdabf36 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/search/TestDocValuesRangeQuery.java +++ b/lucene/sandbox/src/test/org/apache/lucene/search/TestDocValuesRangeQuery.java @@ -19,7 +19,7 @@ package org.apache.lucene.search; import java.io.IOException; -import org.apache.lucene.document.DimensionalLongField; +import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field.Store; import org.apache.lucene.document.NumericDocValuesField; @@ -53,12 +53,12 @@ public class TestDocValuesRangeQuery extends LuceneTestCase { for (int j = 0; j < numValues; ++j) { final long value = TestUtil.nextLong(random(), -100, 10000); doc.add(new SortedNumericDocValuesField("dv", value)); - doc.add(new DimensionalLongField("idx", value)); + doc.add(new LongPoint("idx", value)); } iw.addDocument(doc); } if (random().nextBoolean()) { - iw.deleteDocuments(DimensionalRangeQuery.new1DLongRange("idx", 0L, true, 10L, true)); + iw.deleteDocuments(PointRangeQuery.new1DLongRange("idx", 0L, true, 10L, true)); } iw.commit(); final IndexReader reader = iw.getReader(); @@ -70,7 +70,7 @@ public class TestDocValuesRangeQuery extends LuceneTestCase { final Long max = random().nextBoolean() ? null : TestUtil.nextLong(random(), -100, 1000); final boolean minInclusive = random().nextBoolean(); final boolean maxInclusive = random().nextBoolean(); - final Query q1 = DimensionalRangeQuery.new1DLongRange("idx", min, minInclusive, max, maxInclusive); + final Query q1 = PointRangeQuery.new1DLongRange("idx", min, minInclusive, max, maxInclusive); final Query q2 = DocValuesRangeQuery.newLongRange("dv", min, max, minInclusive, maxInclusive); assertSameMatches(searcher, q1, q2, false); } @@ -180,13 +180,13 @@ public class TestDocValuesRangeQuery extends LuceneTestCase { final long value = TestUtil.nextLong(random(), -100, 10000); doc.add(new SortedNumericDocValuesField("dv1", value)); doc.add(new SortedSetDocValuesField("dv2", toSortableBytes(value))); - doc.add(new DimensionalLongField("idx", value)); + doc.add(new LongPoint("idx", value)); doc.add(new StringField("f", random().nextBoolean() ? "a" : "b", Store.NO)); } iw.addDocument(doc); } if (random().nextBoolean()) { - iw.deleteDocuments(DimensionalRangeQuery.new1DLongRange("idx", 0L, true, 10L, true)); + iw.deleteDocuments(PointRangeQuery.new1DLongRange("idx", 0L, true, 10L, true)); } iw.commit(); final IndexReader reader = iw.getReader(); @@ -200,7 +200,7 @@ public class TestDocValuesRangeQuery extends LuceneTestCase { final boolean maxInclusive = random().nextBoolean(); BooleanQuery.Builder ref = new BooleanQuery.Builder(); - ref.add(DimensionalRangeQuery.new1DLongRange("idx", min, minInclusive, max, maxInclusive), Occur.FILTER); + ref.add(PointRangeQuery.new1DLongRange("idx", min, minInclusive, max, maxInclusive), Occur.FILTER); ref.add(new TermQuery(new Term("f", "a")), Occur.MUST); BooleanQuery.Builder bq1 = new BooleanQuery.Builder(); diff --git a/lucene/sandbox/src/test/org/apache/lucene/search/TestDimensionalQueries.java b/lucene/sandbox/src/test/org/apache/lucene/search/TestLatLonPointQueries.java similarity index 76% rename from lucene/sandbox/src/test/org/apache/lucene/search/TestDimensionalQueries.java rename to lucene/sandbox/src/test/org/apache/lucene/search/TestLatLonPointQueries.java index 23234ef3507..313ec21f289 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/search/TestDimensionalQueries.java +++ b/lucene/sandbox/src/test/org/apache/lucene/search/TestLatLonPointQueries.java @@ -17,24 +17,22 @@ package org.apache.lucene.search; * limitations under the License. */ -import org.apache.lucene.document.DimensionalLatLonField; +import org.apache.lucene.document.LatLonPoint; import org.apache.lucene.document.Document; -import org.apache.lucene.store.Directory; import org.apache.lucene.util.BaseGeoPointTestCase; import org.apache.lucene.util.GeoDistanceUtils; import org.apache.lucene.util.GeoRect; -import org.apache.lucene.util.SloppyMath; -public class TestDimensionalQueries extends BaseGeoPointTestCase { +public class TestLatLonPointQueries extends BaseGeoPointTestCase { @Override protected void addPointToDoc(String field, Document doc, double lat, double lon) { - doc.add(new DimensionalLatLonField(field, lat, lon)); + doc.add(new LatLonPoint(field, lat, lon)); } @Override protected Query newRectQuery(String field, GeoRect rect) { - return new DimensionalPointInRectQuery(field, rect.minLat, rect.maxLat, rect.minLon, rect.maxLon); + return new PointInRectQuery(field, rect.minLat, rect.maxLat, rect.minLon, rect.maxLon); } @Override @@ -50,7 +48,7 @@ public class TestDimensionalQueries extends BaseGeoPointTestCase { @Override protected Query newPolygonQuery(String field, double[] lats, double[] lons) { - return new DimensionalPointInPolygonQuery(FIELD_NAME, lats, lons); + return new PointInPolygonQuery(FIELD_NAME, lats, lons); } @Override @@ -58,13 +56,13 @@ public class TestDimensionalQueries extends BaseGeoPointTestCase { assert Double.isNaN(pointLat) == false; - int rectLatMinEnc = DimensionalLatLonField.encodeLat(rect.minLat); - int rectLatMaxEnc = DimensionalLatLonField.encodeLat(rect.maxLat); - int rectLonMinEnc = DimensionalLatLonField.encodeLon(rect.minLon); - int rectLonMaxEnc = DimensionalLatLonField.encodeLon(rect.maxLon); + int rectLatMinEnc = LatLonPoint.encodeLat(rect.minLat); + int rectLatMaxEnc = LatLonPoint.encodeLat(rect.maxLat); + int rectLonMinEnc = LatLonPoint.encodeLon(rect.minLon); + int rectLonMaxEnc = LatLonPoint.encodeLon(rect.maxLon); - int pointLatEnc = DimensionalLatLonField.encodeLat(pointLat); - int pointLonEnc = DimensionalLatLonField.encodeLon(pointLon); + int pointLatEnc = LatLonPoint.encodeLat(pointLat); + int pointLonEnc = LatLonPoint.encodeLon(pointLon); if (rect.minLon < rect.maxLon) { return pointLatEnc >= rectLatMinEnc && @@ -114,12 +112,12 @@ public class TestDimensionalQueries extends BaseGeoPointTestCase { boolean small = random().nextBoolean(); for(int iter=0;iterThe field must be indexed using {@link Geo3DPointField}. + *

    The field must be indexed using {@link Geo3DPoint}. * * @lucene.experimental */ @@ -62,7 +62,7 @@ public class PointInGeo3DShapeQuery extends Query { @Override public Scorer scorer(LeafReaderContext context) throws IOException { LeafReader reader = context.reader(); - DimensionalValues values = reader.getDimensionalValues(); + PointValues values = reader.getPointValues(); if (values == null) { return null; } diff --git a/lucene/spatial3d/src/test/org/apache/lucene/geo3d/TestGeo3DPointField.java b/lucene/spatial3d/src/test/org/apache/lucene/geo3d/TestGeo3DPoint.java similarity index 95% rename from lucene/spatial3d/src/test/org/apache/lucene/geo3d/TestGeo3DPointField.java rename to lucene/spatial3d/src/test/org/apache/lucene/geo3d/TestGeo3DPoint.java index 64e92a42a89..3a3a31f439a 100644 --- a/lucene/spatial3d/src/test/org/apache/lucene/geo3d/TestGeo3DPointField.java +++ b/lucene/spatial3d/src/test/org/apache/lucene/geo3d/TestGeo3DPoint.java @@ -28,12 +28,12 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.DimensionalFormat; -import org.apache.lucene.codecs.DimensionalReader; -import org.apache.lucene.codecs.DimensionalWriter; +import org.apache.lucene.codecs.PointFormat; +import org.apache.lucene.codecs.PointReader; +import org.apache.lucene.codecs.PointWriter; import org.apache.lucene.codecs.FilterCodec; -import org.apache.lucene.codecs.lucene60.Lucene60DimensionalReader; -import org.apache.lucene.codecs.lucene60.Lucene60DimensionalWriter; +import org.apache.lucene.codecs.lucene60.Lucene60PointReader; +import org.apache.lucene.codecs.lucene60.Lucene60PointWriter; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.NumericDocValuesField; @@ -60,7 +60,7 @@ import org.junit.BeforeClass; import com.carrotsearch.randomizedtesting.generators.RandomInts; -public class TestGeo3DPointField extends LuceneTestCase { +public class TestGeo3DPoint extends LuceneTestCase { private static boolean smallBBox; @@ -77,21 +77,21 @@ public class TestGeo3DPointField extends LuceneTestCase { int maxPointsInLeafNode = TestUtil.nextInt(random(), 16, 2048); double maxMBSortInHeap = 3.0 + (3*random().nextDouble()); if (VERBOSE) { - System.out.println("TEST: using Lucene60DimensionalFormat with maxPointsInLeafNode=" + maxPointsInLeafNode + " and maxMBSortInHeap=" + maxMBSortInHeap); + System.out.println("TEST: using Lucene60PointFormat with maxPointsInLeafNode=" + maxPointsInLeafNode + " and maxMBSortInHeap=" + maxMBSortInHeap); } return new FilterCodec("Lucene60", Codec.getDefault()) { @Override - public DimensionalFormat dimensionalFormat() { - return new DimensionalFormat() { + public PointFormat pointFormat() { + return new PointFormat() { @Override - public DimensionalWriter fieldsWriter(SegmentWriteState writeState) throws IOException { - return new Lucene60DimensionalWriter(writeState, maxPointsInLeafNode, maxMBSortInHeap); + public PointWriter fieldsWriter(SegmentWriteState writeState) throws IOException { + return new Lucene60PointWriter(writeState, maxPointsInLeafNode, maxMBSortInHeap); } @Override - public DimensionalReader fieldsReader(SegmentReadState readState) throws IOException { - return new Lucene60DimensionalReader(readState); + public PointReader fieldsReader(SegmentReadState readState) throws IOException { + return new Lucene60PointReader(readState); } }; } @@ -107,7 +107,7 @@ public class TestGeo3DPointField extends LuceneTestCase { iwc.setCodec(getCodec()); IndexWriter w = new IndexWriter(dir, iwc); Document doc = new Document(); - doc.add(new Geo3DPointField("field", PlanetModel.WGS84, toRadians(50.7345267), toRadians(-97.5303555))); + doc.add(new Geo3DPoint("field", PlanetModel.WGS84, toRadians(50.7345267), toRadians(-97.5303555))); w.addDocument(doc); IndexReader r = DirectoryReader.open(w, true); // We can't wrap with "exotic" readers because the query must see the BKD3DDVFormat: @@ -663,7 +663,7 @@ public class TestGeo3DPointField extends LuceneTestCase { doc.add(newStringField("id", ""+id, Field.Store.NO)); doc.add(new NumericDocValuesField("id", id)); if (Double.isNaN(lats[id]) == false) { - doc.add(new Geo3DPointField("point", planetModel, lats[id], lons[id])); + doc.add(new Geo3DPoint("point", planetModel, lats[id], lons[id])); } w.addDocument(doc); if (id > 0 && random().nextInt(100) == 42) { diff --git a/lucene/suggest/src/test/org/apache/lucene/search/suggest/document/TestSuggestField.java b/lucene/suggest/src/test/org/apache/lucene/search/suggest/document/TestSuggestField.java index 05bb6773810..92271f3db08 100644 --- a/lucene/suggest/src/test/org/apache/lucene/search/suggest/document/TestSuggestField.java +++ b/lucene/suggest/src/test/org/apache/lucene/search/suggest/document/TestSuggestField.java @@ -34,7 +34,7 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.codecs.lucene60.Lucene60Codec; -import org.apache.lucene.document.DimensionalIntField; +import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.StoredField; @@ -44,7 +44,7 @@ import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.Term; -import org.apache.lucene.search.DimensionalRangeQuery; +import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.suggest.BitsProducer; @@ -305,7 +305,7 @@ public class TestSuggestField extends LuceneTestCase { Document document = new Document(); document.add(new SuggestField("suggest_field", "abc_" + i, i)); document.add(new StoredField("weight_fld", i)); - document.add(new DimensionalIntField("weight_fld", i)); + document.add(new IntPoint("weight_fld", i)); iw.addDocument(document); if (usually()) { @@ -313,7 +313,7 @@ public class TestSuggestField extends LuceneTestCase { } } - iw.deleteDocuments(DimensionalRangeQuery.new1DIntRange("weight_fld", 2, true, null, false)); + iw.deleteDocuments(PointRangeQuery.new1DIntRange("weight_fld", 2, true, null, false)); DirectoryReader reader = DirectoryReader.open(iw); SuggestIndexSearcher indexSearcher = new SuggestIndexSearcher(reader); diff --git a/lucene/test-framework/src/java/org/apache/lucene/codecs/asserting/AssertingCodec.java b/lucene/test-framework/src/java/org/apache/lucene/codecs/asserting/AssertingCodec.java index 99131293bcc..1e32e012868 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/codecs/asserting/AssertingCodec.java +++ b/lucene/test-framework/src/java/org/apache/lucene/codecs/asserting/AssertingCodec.java @@ -17,7 +17,7 @@ package org.apache.lucene.codecs.asserting; * limitations under the License. */ -import org.apache.lucene.codecs.DimensionalFormat; +import org.apache.lucene.codecs.PointFormat; import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.FilterCodec; import org.apache.lucene.codecs.LiveDocsFormat; @@ -54,7 +54,7 @@ public class AssertingCodec extends FilterCodec { private final LiveDocsFormat liveDocs = new AssertingLiveDocsFormat(); private final PostingsFormat defaultFormat = new AssertingPostingsFormat(); private final DocValuesFormat defaultDVFormat = new AssertingDocValuesFormat(); - private final DimensionalFormat dimensionalFormat = new AssertingDimensionalFormat(); + private final PointFormat pointFormat = new AssertingPointFormat(); public AssertingCodec() { super("Asserting", TestUtil.getDefaultCodec()); @@ -91,8 +91,8 @@ public class AssertingCodec extends FilterCodec { } @Override - public DimensionalFormat dimensionalFormat() { - return dimensionalFormat; + public PointFormat pointFormat() { + return pointFormat; } @Override diff --git a/lucene/test-framework/src/java/org/apache/lucene/codecs/asserting/AssertingDimensionalFormat.java b/lucene/test-framework/src/java/org/apache/lucene/codecs/asserting/AssertingPointFormat.java similarity index 70% rename from lucene/test-framework/src/java/org/apache/lucene/codecs/asserting/AssertingDimensionalFormat.java rename to lucene/test-framework/src/java/org/apache/lucene/codecs/asserting/AssertingPointFormat.java index 4191f65579f..7b306d7e10a 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/codecs/asserting/AssertingDimensionalFormat.java +++ b/lucene/test-framework/src/java/org/apache/lucene/codecs/asserting/AssertingPointFormat.java @@ -20,9 +20,9 @@ package org.apache.lucene.codecs.asserting; import java.io.IOException; import java.util.Collection; -import org.apache.lucene.codecs.DimensionalFormat; -import org.apache.lucene.codecs.DimensionalReader; -import org.apache.lucene.codecs.DimensionalWriter; +import org.apache.lucene.codecs.PointFormat; +import org.apache.lucene.codecs.PointReader; +import org.apache.lucene.codecs.PointWriter; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.MergeState; import org.apache.lucene.index.SegmentReadState; @@ -31,26 +31,26 @@ import org.apache.lucene.util.Accountable; import org.apache.lucene.util.TestUtil; /** - * Just like the default dimensional format but with additional asserts. + * Just like the default point format but with additional asserts. */ -public final class AssertingDimensionalFormat extends DimensionalFormat { - private final DimensionalFormat in = TestUtil.getDefaultCodec().dimensionalFormat(); +public final class AssertingPointFormat extends PointFormat { + private final PointFormat in = TestUtil.getDefaultCodec().pointFormat(); @Override - public DimensionalWriter fieldsWriter(SegmentWriteState state) throws IOException { - return new AssertingDimensionalWriter(state, in.fieldsWriter(state)); + public PointWriter fieldsWriter(SegmentWriteState state) throws IOException { + return new AssertingPointWriter(state, in.fieldsWriter(state)); } @Override - public DimensionalReader fieldsReader(SegmentReadState state) throws IOException { - return new AssertingDimensionalReader(in.fieldsReader(state)); + public PointReader fieldsReader(SegmentReadState state) throws IOException { + return new AssertingPointReader(in.fieldsReader(state)); } - static class AssertingDimensionalReader extends DimensionalReader { - private final DimensionalReader in; + static class AssertingPointReader extends PointReader { + private final PointReader in; - AssertingDimensionalReader(DimensionalReader in) { + AssertingPointReader(PointReader in) { this.in = in; // do a few simple checks on init assert toString() != null; @@ -90,8 +90,8 @@ public final class AssertingDimensionalFormat extends DimensionalFormat { } @Override - public DimensionalReader getMergeInstance() throws IOException { - return new AssertingDimensionalReader(in.getMergeInstance()); + public PointReader getMergeInstance() throws IOException { + return new AssertingPointReader(in.getMergeInstance()); } @Override @@ -120,17 +120,17 @@ public final class AssertingDimensionalFormat extends DimensionalFormat { } } - static class AssertingDimensionalWriter extends DimensionalWriter { - private final DimensionalWriter in; + static class AssertingPointWriter extends PointWriter { + private final PointWriter in; - AssertingDimensionalWriter(SegmentWriteState writeState, DimensionalWriter in) { + AssertingPointWriter(SegmentWriteState writeState, PointWriter in) { this.in = in; } @Override - public void writeField(FieldInfo fieldInfo, DimensionalReader values) throws IOException { - if (fieldInfo.getDimensionCount() == 0) { - throw new IllegalArgumentException("writing field=\"" + fieldInfo.name + "\" but dimensionalCount is 0"); + public void writeField(FieldInfo fieldInfo, PointReader values) throws IOException { + if (fieldInfo.getPointDimensionCount() == 0) { + throw new IllegalArgumentException("writing field=\"" + fieldInfo.name + "\" but pointDimensionalCount is 0"); } in.writeField(fieldInfo, values); } diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java index f7d9f16fe51..b4b6f7d26f9 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java +++ b/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java @@ -312,7 +312,7 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase { FieldInfo proto = oneDocReader.getFieldInfos().fieldInfo("field"); FieldInfo field = new FieldInfo(proto.name, proto.number, proto.hasVectors(), proto.omitsNorms(), proto.hasPayloads(), proto.getIndexOptions(), proto.getDocValuesType(), proto.getDocValuesGen(), new HashMap<>(), - proto.getDimensionCount(), proto.getDimensionNumBytes()); + proto.getPointDimensionCount(), proto.getPointNumBytes()); FieldInfos fieldInfos = new FieldInfos(new FieldInfo[] { field } ); diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/MismatchedLeafReader.java b/lucene/test-framework/src/java/org/apache/lucene/index/MismatchedLeafReader.java index c57159faa89..664e76a6c2a 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/index/MismatchedLeafReader.java +++ b/lucene/test-framework/src/java/org/apache/lucene/index/MismatchedLeafReader.java @@ -67,8 +67,8 @@ public class MismatchedLeafReader extends FilterLeafReader { oldInfo.getDocValuesType(), // docValuesType oldInfo.getDocValuesGen(), // dvGen oldInfo.attributes(), // attributes - oldInfo.getDimensionCount(), // dimension count - oldInfo.getDimensionNumBytes()); // dimension numBytes + oldInfo.getPointDimensionCount(), // dimension count + oldInfo.getPointNumBytes()); // dimension numBytes shuffled.set(i, newInfo); } diff --git a/lucene/test-framework/src/java/org/apache/lucene/search/QueryUtils.java b/lucene/test-framework/src/java/org/apache/lucene/search/QueryUtils.java index 734bced0040..6de6213675b 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/search/QueryUtils.java +++ b/lucene/test-framework/src/java/org/apache/lucene/search/QueryUtils.java @@ -24,7 +24,7 @@ import java.util.List; import java.util.Random; import org.apache.lucene.index.BinaryDocValues; -import org.apache.lucene.index.DimensionalValues; +import org.apache.lucene.index.PointValues; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.Fields; @@ -257,7 +257,7 @@ public class QueryUtils { } @Override - public DimensionalValues getDimensionalValues() { + public PointValues getPointValues() { return null; } diff --git a/lucene/test-framework/src/java/org/apache/lucene/util/LineFileDocs.java b/lucene/test-framework/src/java/org/apache/lucene/util/LineFileDocs.java index 6e18fd7dd18..a198130cf87 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/util/LineFileDocs.java +++ b/lucene/test-framework/src/java/org/apache/lucene/util/LineFileDocs.java @@ -34,10 +34,11 @@ import java.util.Random; import java.util.concurrent.atomic.AtomicInteger; import java.util.zip.GZIPInputStream; -import org.apache.lucene.document.DimensionalIntField; +import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; +import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.SortedDocValuesField; import org.apache.lucene.document.StringField; @@ -186,7 +187,7 @@ public class LineFileDocs implements Closeable { id = new StringField("docid", "", Field.Store.YES); doc.add(id); - idNum = new DimensionalIntField("docid_int", 0); + idNum = new IntPoint("docid_int", 0); doc.add(idNum); date = new StringField("date", "", Field.Store.YES); diff --git a/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java b/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java index 04b4220b3e0..99d4be3ebe2 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java +++ b/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java @@ -57,7 +57,7 @@ import org.apache.lucene.codecs.lucene60.Lucene60Codec; import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat; import org.apache.lucene.codecs.perfield.PerFieldPostingsFormat; import org.apache.lucene.document.BinaryDocValuesField; -import org.apache.lucene.document.DimensionalBinaryField; +import org.apache.lucene.document.BinaryPoint; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType.LegacyNumericType; @@ -1037,7 +1037,7 @@ public final class TestUtil { final Field field1 = (Field) f; final Field field2; final DocValuesType dvType = field1.fieldType().docValuesType(); - final int dimCount = field1.fieldType().dimensionCount(); + final int dimCount = field1.fieldType().pointDimensionCount(); final LegacyNumericType numType = field1.fieldType().numericType(); if (dvType != DocValuesType.NONE) { switch(dvType) { @@ -1057,7 +1057,7 @@ public final class TestUtil { BytesRef br = field1.binaryValue(); byte[] bytes = new byte[br.length]; System.arraycopy(br.bytes, br.offset, bytes, 0, br.length); - field2 = new DimensionalBinaryField(field1.name(), bytes, field1.fieldType()); + field2 = new BinaryPoint(field1.name(), bytes, field1.fieldType()); } else if (numType != null) { switch (numType) { case INT: diff --git a/lucene/test-framework/src/test/org/apache/lucene/codecs/compressing/TestCompressingStoredFieldsFormat.java b/lucene/test-framework/src/test/org/apache/lucene/codecs/compressing/TestCompressingStoredFieldsFormat.java index 3e9db033dff..93e23cb5270 100644 --- a/lucene/test-framework/src/test/org/apache/lucene/codecs/compressing/TestCompressingStoredFieldsFormat.java +++ b/lucene/test-framework/src/test/org/apache/lucene/codecs/compressing/TestCompressingStoredFieldsFormat.java @@ -22,10 +22,11 @@ import java.util.Random; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.document.DimensionalIntField; +import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; +import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.StoredField; import org.apache.lucene.index.BaseStoredFieldsFormatTestCase; import org.apache.lucene.index.CodecReader; @@ -69,7 +70,7 @@ public class TestCompressingStoredFieldsFormat extends BaseStoredFieldsFormatTes IndexWriter iw = new IndexWriter(dir, iwConf); final Document validDoc = new Document(); - validDoc.add(new DimensionalIntField("id", 0)); + validDoc.add(new IntPoint("id", 0)); validDoc.add(new StoredField("id", 0)); iw.addDocument(validDoc); iw.commit(); diff --git a/solr/core/src/java/org/apache/solr/handler/component/ExpandComponent.java b/solr/core/src/java/org/apache/solr/handler/component/ExpandComponent.java index 58ea6cccfe8..694e53af08c 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/ExpandComponent.java +++ b/solr/core/src/java/org/apache/solr/handler/component/ExpandComponent.java @@ -741,8 +741,8 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia DocValuesType.NONE, fieldInfo.getDocValuesGen(), fieldInfo.attributes(), - fieldInfo.getDimensionCount(), - fieldInfo.getDimensionNumBytes()); + fieldInfo.getPointDimensionCount(), + fieldInfo.getPointNumBytes()); newInfos.add(f); } else { diff --git a/solr/core/src/java/org/apache/solr/search/Insanity.java b/solr/core/src/java/org/apache/solr/search/Insanity.java index 6043aa1850a..11920a9b121 100644 --- a/solr/core/src/java/org/apache/solr/search/Insanity.java +++ b/solr/core/src/java/org/apache/solr/search/Insanity.java @@ -67,7 +67,7 @@ public class Insanity { if (fi.name.equals(insaneField)) { filteredInfos.add(new FieldInfo(fi.name, fi.number, fi.hasVectors(), fi.omitsNorms(), fi.hasPayloads(), fi.getIndexOptions(), DocValuesType.NONE, -1, Collections.emptyMap(), - fi.getDimensionCount(), fi.getDimensionNumBytes())); + fi.getPointDimensionCount(), fi.getPointNumBytes())); } else { filteredInfos.add(fi); } diff --git a/solr/core/src/test/org/apache/solr/search/TestDocSet.java b/solr/core/src/test/org/apache/solr/search/TestDocSet.java index 9708e82d675..f435e6b5821 100644 --- a/solr/core/src/test/org/apache/solr/search/TestDocSet.java +++ b/solr/core/src/test/org/apache/solr/search/TestDocSet.java @@ -23,7 +23,7 @@ import java.util.List; import java.util.Random; import org.apache.lucene.index.BinaryDocValues; -import org.apache.lucene.index.DimensionalValues; +import org.apache.lucene.index.PointValues; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.Fields; @@ -428,7 +428,7 @@ public class TestDocSet extends LuceneTestCase { } @Override - public DimensionalValues getDimensionalValues() { + public PointValues getPointValues() { return null; }