mirror of https://github.com/apache/lucene.git
LUCENE-7494: Give points a per-field API.
This commit is contained in:
parent
19c7f8eacf
commit
c22725f0b5
|
@ -25,6 +25,8 @@ API Changes
|
||||||
* LUCENE-7475: Norms now support sparsity, allowing to pay for what is
|
* LUCENE-7475: Norms now support sparsity, allowing to pay for what is
|
||||||
actually used. (Adrien Grand)
|
actually used. (Adrien Grand)
|
||||||
|
|
||||||
|
* LUCENE-7494: Points now have a per-field API, like doc values. (Adrien Grand)
|
||||||
|
|
||||||
Bug Fixes
|
Bug Fixes
|
||||||
|
|
||||||
Improvements
|
Improvements
|
||||||
|
|
|
@ -20,7 +20,6 @@ package org.apache.lucene.codecs.simpletext;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.nio.charset.StandardCharsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
|
|
||||||
import org.apache.lucene.index.PointValues.IntersectVisitor;
|
|
||||||
import org.apache.lucene.store.IndexInput;
|
import org.apache.lucene.store.IndexInput;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.apache.lucene.util.BytesRefBuilder;
|
import org.apache.lucene.util.BytesRefBuilder;
|
||||||
|
|
|
@ -26,6 +26,7 @@ import org.apache.lucene.codecs.PointsReader;
|
||||||
import org.apache.lucene.index.CorruptIndexException;
|
import org.apache.lucene.index.CorruptIndexException;
|
||||||
import org.apache.lucene.index.FieldInfo;
|
import org.apache.lucene.index.FieldInfo;
|
||||||
import org.apache.lucene.index.IndexFileNames;
|
import org.apache.lucene.index.IndexFileNames;
|
||||||
|
import org.apache.lucene.index.PointValues;
|
||||||
import org.apache.lucene.index.SegmentReadState;
|
import org.apache.lucene.index.SegmentReadState;
|
||||||
import org.apache.lucene.store.BufferedChecksumIndexInput;
|
import org.apache.lucene.store.BufferedChecksumIndexInput;
|
||||||
import org.apache.lucene.store.ChecksumIndexInput;
|
import org.apache.lucene.store.ChecksumIndexInput;
|
||||||
|
@ -174,7 +175,8 @@ class SimpleTextPointsReader extends PointsReader {
|
||||||
return new String(scratch.bytes(), prefix.length, scratch.length() - prefix.length, StandardCharsets.UTF_8);
|
return new String(scratch.bytes(), prefix.length, scratch.length() - prefix.length, StandardCharsets.UTF_8);
|
||||||
}
|
}
|
||||||
|
|
||||||
private BKDReader getBKDReader(String fieldName) {
|
@Override
|
||||||
|
public PointValues getValues(String fieldName) throws IOException {
|
||||||
FieldInfo fieldInfo = readState.fieldInfos.fieldInfo(fieldName);
|
FieldInfo fieldInfo = readState.fieldInfos.fieldInfo(fieldName);
|
||||||
if (fieldInfo == null) {
|
if (fieldInfo == null) {
|
||||||
throw new IllegalArgumentException("field=\"" + fieldName + "\" is unrecognized");
|
throw new IllegalArgumentException("field=\"" + fieldName + "\" is unrecognized");
|
||||||
|
@ -185,18 +187,6 @@ class SimpleTextPointsReader extends PointsReader {
|
||||||
return readers.get(fieldName);
|
return readers.get(fieldName);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Finds all documents and points matching the provided visitor */
|
|
||||||
@Override
|
|
||||||
public void intersect(String fieldName, IntersectVisitor visitor) throws IOException {
|
|
||||||
BKDReader bkdReader = getBKDReader(fieldName);
|
|
||||||
if (bkdReader == null) {
|
|
||||||
// Schema ghost corner case! This field did index points in the past, but
|
|
||||||
// now all docs having this field were deleted in this segment:
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
bkdReader.intersect(visitor);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void checkIntegrity() throws IOException {
|
public void checkIntegrity() throws IOException {
|
||||||
BytesRefBuilder scratch = new BytesRefBuilder();
|
BytesRefBuilder scratch = new BytesRefBuilder();
|
||||||
|
@ -234,69 +224,4 @@ class SimpleTextPointsReader extends PointsReader {
|
||||||
return "SimpleTextPointsReader(segment=" + readState.segmentInfo.name + " maxDoc=" + readState.segmentInfo.maxDoc() + ")";
|
return "SimpleTextPointsReader(segment=" + readState.segmentInfo.name + " maxDoc=" + readState.segmentInfo.maxDoc() + ")";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public byte[] getMinPackedValue(String fieldName) {
|
|
||||||
BKDReader bkdReader = getBKDReader(fieldName);
|
|
||||||
if (bkdReader == null) {
|
|
||||||
// Schema ghost corner case! This field did index points in the past, but
|
|
||||||
// now all docs having this field were deleted in this segment:
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return bkdReader.getMinPackedValue();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public byte[] getMaxPackedValue(String fieldName) {
|
|
||||||
BKDReader bkdReader = getBKDReader(fieldName);
|
|
||||||
if (bkdReader == null) {
|
|
||||||
// Schema ghost corner case! This field did index points in the past, but
|
|
||||||
// now all docs having this field were deleted in this segment:
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return bkdReader.getMaxPackedValue();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int getNumDimensions(String fieldName) {
|
|
||||||
BKDReader bkdReader = getBKDReader(fieldName);
|
|
||||||
if (bkdReader == null) {
|
|
||||||
// Schema ghost corner case! This field did index points in the past, but
|
|
||||||
// now all docs having this field were deleted in this segment:
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
return bkdReader.getNumDimensions();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int getBytesPerDimension(String fieldName) {
|
|
||||||
BKDReader bkdReader = getBKDReader(fieldName);
|
|
||||||
if (bkdReader == null) {
|
|
||||||
// Schema ghost corner case! This field did index points in the past, but
|
|
||||||
// now all docs having this field were deleted in this segment:
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
return bkdReader.getBytesPerDimension();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long size(String fieldName) {
|
|
||||||
BKDReader bkdReader = getBKDReader(fieldName);
|
|
||||||
if (bkdReader == null) {
|
|
||||||
// Schema ghost corner case! This field did index points in the past, but
|
|
||||||
// now all docs having this field were deleted in this segment:
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
return bkdReader.getPointCount();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int getDocCount(String fieldName) {
|
|
||||||
BKDReader bkdReader = getBKDReader(fieldName);
|
|
||||||
if (bkdReader == null) {
|
|
||||||
// Schema ghost corner case! This field did index points in the past, but
|
|
||||||
// now all docs having this field were deleted in this segment:
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
return bkdReader.getDocCount();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,6 +26,7 @@ import org.apache.lucene.codecs.PointsReader;
|
||||||
import org.apache.lucene.codecs.PointsWriter;
|
import org.apache.lucene.codecs.PointsWriter;
|
||||||
import org.apache.lucene.index.FieldInfo;
|
import org.apache.lucene.index.FieldInfo;
|
||||||
import org.apache.lucene.index.IndexFileNames;
|
import org.apache.lucene.index.IndexFileNames;
|
||||||
|
import org.apache.lucene.index.PointValues;
|
||||||
import org.apache.lucene.index.PointValues.IntersectVisitor;
|
import org.apache.lucene.index.PointValues.IntersectVisitor;
|
||||||
import org.apache.lucene.index.PointValues.Relation;
|
import org.apache.lucene.index.PointValues.Relation;
|
||||||
import org.apache.lucene.index.SegmentWriteState;
|
import org.apache.lucene.index.SegmentWriteState;
|
||||||
|
@ -68,9 +69,10 @@ class SimpleTextPointsWriter extends PointsWriter {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void writeField(FieldInfo fieldInfo, PointsReader values) throws IOException {
|
public void writeField(FieldInfo fieldInfo, PointsReader reader) throws IOException {
|
||||||
|
|
||||||
boolean singleValuePerDoc = values.size(fieldInfo.name) == values.getDocCount(fieldInfo.name);
|
PointValues values = reader.getValues(fieldInfo.name);
|
||||||
|
boolean singleValuePerDoc = values.size() == values.getDocCount();
|
||||||
|
|
||||||
// We use the normal BKDWriter, but subclass to customize how it writes the index and blocks to disk:
|
// We use the normal BKDWriter, but subclass to customize how it writes the index and blocks to disk:
|
||||||
try (BKDWriter writer = new BKDWriter(writeState.segmentInfo.maxDoc(),
|
try (BKDWriter writer = new BKDWriter(writeState.segmentInfo.maxDoc(),
|
||||||
|
@ -80,7 +82,7 @@ class SimpleTextPointsWriter extends PointsWriter {
|
||||||
fieldInfo.getPointNumBytes(),
|
fieldInfo.getPointNumBytes(),
|
||||||
BKDWriter.DEFAULT_MAX_POINTS_IN_LEAF_NODE,
|
BKDWriter.DEFAULT_MAX_POINTS_IN_LEAF_NODE,
|
||||||
BKDWriter.DEFAULT_MAX_MB_SORT_IN_HEAP,
|
BKDWriter.DEFAULT_MAX_MB_SORT_IN_HEAP,
|
||||||
values.size(fieldInfo.name),
|
values.size(),
|
||||||
singleValuePerDoc) {
|
singleValuePerDoc) {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -173,7 +175,7 @@ class SimpleTextPointsWriter extends PointsWriter {
|
||||||
}
|
}
|
||||||
}) {
|
}) {
|
||||||
|
|
||||||
values.intersect(fieldInfo.name, new IntersectVisitor() {
|
values.intersect(new IntersectVisitor() {
|
||||||
@Override
|
@Override
|
||||||
public void visit(int docID) {
|
public void visit(int docID) {
|
||||||
throw new IllegalStateException();
|
throw new IllegalStateException();
|
||||||
|
|
|
@ -16,15 +16,16 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.lucene.codecs;
|
package org.apache.lucene.codecs;
|
||||||
|
|
||||||
|
import org.apache.lucene.index.PointValues;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
|
|
||||||
/** {@link PointsReader} whose order of points can be changed.
|
/** {@link PointValues} whose order of points can be changed.
|
||||||
* This class is useful for codecs to optimize flush.
|
* This class is useful for codecs to optimize flush.
|
||||||
* @lucene.internal */
|
* @lucene.internal */
|
||||||
public abstract class MutablePointsReader extends PointsReader {
|
public abstract class MutablePointValues extends PointValues {
|
||||||
|
|
||||||
/** Sole constructor. */
|
/** Sole constructor. */
|
||||||
protected MutablePointsReader() {}
|
protected MutablePointValues() {}
|
||||||
|
|
||||||
/** Set {@code packedValue} with a reference to the packed bytes of the i-th value. */
|
/** Set {@code packedValue} with a reference to the packed bytes of the i-th value. */
|
||||||
public abstract void getValue(int i, BytesRef packedValue);
|
public abstract void getValue(int i, BytesRef packedValue);
|
|
@ -19,6 +19,7 @@ package org.apache.lucene.codecs;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import org.apache.lucene.index.PointValues;
|
||||||
import org.apache.lucene.index.SegmentReadState;
|
import org.apache.lucene.index.SegmentReadState;
|
||||||
import org.apache.lucene.index.SegmentWriteState;
|
import org.apache.lucene.index.SegmentWriteState;
|
||||||
|
|
||||||
|
@ -72,38 +73,8 @@ public abstract class PointsFormat {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void intersect(String fieldName, IntersectVisitor visitor) {
|
public PointValues getValues(String field) {
|
||||||
throw new IllegalArgumentException("field=\"" + fieldName + "\" was not indexed with points");
|
throw new IllegalArgumentException("field=\"" + field + "\" was not indexed with points");
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public byte[] getMinPackedValue(String fieldName) {
|
|
||||||
throw new IllegalArgumentException("field=\"" + fieldName + "\" was not indexed with points");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public byte[] getMaxPackedValue(String fieldName) {
|
|
||||||
throw new IllegalArgumentException("field=\"" + fieldName + "\" was not indexed with points");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int getNumDimensions(String fieldName) {
|
|
||||||
throw new IllegalArgumentException("field=\"" + fieldName + "\" was not indexed with points");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int getBytesPerDimension(String fieldName) {
|
|
||||||
throw new IllegalArgumentException("field=\"" + fieldName + "\" was not indexed with points");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long size(String fieldName) {
|
|
||||||
throw new IllegalArgumentException("field=\"" + fieldName + "\" was not indexed with points");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int getDocCount(String fieldName) {
|
|
||||||
throw new IllegalArgumentException("field=\"" + fieldName + "\" was not indexed with points");
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,7 +27,7 @@ import org.apache.lucene.util.Accountable;
|
||||||
*
|
*
|
||||||
* @lucene.experimental
|
* @lucene.experimental
|
||||||
*/
|
*/
|
||||||
public abstract class PointsReader extends PointValues implements Closeable, Accountable {
|
public abstract class PointsReader implements Closeable, Accountable {
|
||||||
|
|
||||||
/** Sole constructor. (For invocation by subclass constructors, typically implicit.) */
|
/** Sole constructor. (For invocation by subclass constructors, typically implicit.) */
|
||||||
protected PointsReader() {}
|
protected PointsReader() {}
|
||||||
|
@ -41,6 +41,9 @@ public abstract class PointsReader extends PointValues implements Closeable, Acc
|
||||||
*/
|
*/
|
||||||
public abstract void checkIntegrity() throws IOException;
|
public abstract void checkIntegrity() throws IOException;
|
||||||
|
|
||||||
|
/** Return {@link PointValues} for the given {@code field}. */
|
||||||
|
public abstract PointValues getValues(String field) throws IOException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns an instance optimized for merging.
|
* Returns an instance optimized for merging.
|
||||||
* <p>
|
* <p>
|
||||||
|
|
|
@ -22,6 +22,7 @@ import java.io.IOException;
|
||||||
|
|
||||||
import org.apache.lucene.index.FieldInfo;
|
import org.apache.lucene.index.FieldInfo;
|
||||||
import org.apache.lucene.index.MergeState;
|
import org.apache.lucene.index.MergeState;
|
||||||
|
import org.apache.lucene.index.PointValues;
|
||||||
|
|
||||||
/** Abstract API to write points
|
/** Abstract API to write points
|
||||||
*
|
*
|
||||||
|
@ -48,8 +49,11 @@ public abstract class PointsWriter implements Closeable {
|
||||||
if (pointsReader != null) {
|
if (pointsReader != null) {
|
||||||
FieldInfo readerFieldInfo = mergeState.fieldInfos[i].fieldInfo(fieldInfo.name);
|
FieldInfo readerFieldInfo = mergeState.fieldInfos[i].fieldInfo(fieldInfo.name);
|
||||||
if (readerFieldInfo != null && readerFieldInfo.getPointDimensionCount() > 0) {
|
if (readerFieldInfo != null && readerFieldInfo.getPointDimensionCount() > 0) {
|
||||||
maxPointCount += pointsReader.size(fieldInfo.name);
|
PointValues values = pointsReader.getValues(fieldInfo.name);
|
||||||
docCount += pointsReader.getDocCount(fieldInfo.name);
|
if (values != null) {
|
||||||
|
maxPointCount += values.size();
|
||||||
|
docCount += values.getDocCount();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -57,12 +61,25 @@ public abstract class PointsWriter implements Closeable {
|
||||||
final int finalDocCount = docCount;
|
final int finalDocCount = docCount;
|
||||||
writeField(fieldInfo,
|
writeField(fieldInfo,
|
||||||
new PointsReader() {
|
new PointsReader() {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void intersect(String fieldName, IntersectVisitor mergedVisitor) throws IOException {
|
public long ramBytesUsed() {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void close() throws IOException {}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public PointValues getValues(String fieldName) {
|
||||||
if (fieldName.equals(fieldInfo.name) == false) {
|
if (fieldName.equals(fieldInfo.name) == false) {
|
||||||
throw new IllegalArgumentException("field name must match the field being merged");
|
throw new IllegalArgumentException("field name must match the field being merged");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return new PointValues() {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void intersect(IntersectVisitor mergedVisitor) throws IOException {
|
||||||
for (int i=0;i<mergeState.pointsReaders.length;i++) {
|
for (int i=0;i<mergeState.pointsReaders.length;i++) {
|
||||||
PointsReader pointsReader = mergeState.pointsReaders[i];
|
PointsReader pointsReader = mergeState.pointsReaders[i];
|
||||||
if (pointsReader == null) {
|
if (pointsReader == null) {
|
||||||
|
@ -80,9 +97,12 @@ public abstract class PointsWriter implements Closeable {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
PointValues values = pointsReader.getValues(fieldName);
|
||||||
|
if (values == null) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
MergeState.DocMap docMap = mergeState.docMaps[i];
|
MergeState.DocMap docMap = mergeState.docMaps[i];
|
||||||
pointsReader.intersect(fieldInfo.name,
|
values.intersect(new IntersectVisitor() {
|
||||||
new IntersectVisitor() {
|
|
||||||
@Override
|
@Override
|
||||||
public void visit(int docID) {
|
public void visit(int docID) {
|
||||||
// Should never be called because our compare method never returns Relation.CELL_INSIDE_QUERY
|
// Should never be called because our compare method never returns Relation.CELL_INSIDE_QUERY
|
||||||
|
@ -108,48 +128,41 @@ public abstract class PointsWriter implements Closeable {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void checkIntegrity() {
|
public byte[] getMinPackedValue() {
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long ramBytesUsed() {
|
public byte[] getMaxPackedValue() {
|
||||||
return 0L;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void close() {
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public byte[] getMinPackedValue(String fieldName) {
|
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public byte[] getMaxPackedValue(String fieldName) {
|
public int getNumDimensions() {
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int getNumDimensions(String fieldName) {
|
public int getBytesPerDimension() {
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int getBytesPerDimension(String fieldName) {
|
public long size() {
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long size(String fieldName) {
|
|
||||||
return finalMaxPointCount;
|
return finalMaxPointCount;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int getDocCount(String fieldName) {
|
public int getDocCount() {
|
||||||
return finalDocCount;
|
return finalDocCount;
|
||||||
}
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void checkIntegrity() throws IOException {
|
||||||
|
throw new UnsupportedOperationException();
|
||||||
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -30,6 +30,7 @@ import org.apache.lucene.codecs.CodecUtil;
|
||||||
import org.apache.lucene.codecs.PointsReader;
|
import org.apache.lucene.codecs.PointsReader;
|
||||||
import org.apache.lucene.index.FieldInfo;
|
import org.apache.lucene.index.FieldInfo;
|
||||||
import org.apache.lucene.index.IndexFileNames;
|
import org.apache.lucene.index.IndexFileNames;
|
||||||
|
import org.apache.lucene.index.PointValues;
|
||||||
import org.apache.lucene.index.SegmentReadState;
|
import org.apache.lucene.index.SegmentReadState;
|
||||||
import org.apache.lucene.store.ChecksumIndexInput;
|
import org.apache.lucene.store.ChecksumIndexInput;
|
||||||
import org.apache.lucene.store.IndexInput;
|
import org.apache.lucene.store.IndexInput;
|
||||||
|
@ -117,7 +118,8 @@ public class Lucene60PointsReader extends PointsReader implements Closeable {
|
||||||
/** Returns the underlying {@link BKDReader}.
|
/** Returns the underlying {@link BKDReader}.
|
||||||
*
|
*
|
||||||
* @lucene.internal */
|
* @lucene.internal */
|
||||||
public BKDReader getBKDReader(String fieldName) {
|
@Override
|
||||||
|
public PointValues getValues(String fieldName) {
|
||||||
FieldInfo fieldInfo = readState.fieldInfos.fieldInfo(fieldName);
|
FieldInfo fieldInfo = readState.fieldInfos.fieldInfo(fieldName);
|
||||||
if (fieldInfo == null) {
|
if (fieldInfo == null) {
|
||||||
throw new IllegalArgumentException("field=\"" + fieldName + "\" is unrecognized");
|
throw new IllegalArgumentException("field=\"" + fieldName + "\" is unrecognized");
|
||||||
|
@ -129,19 +131,6 @@ public class Lucene60PointsReader extends PointsReader implements Closeable {
|
||||||
return readers.get(fieldInfo.number);
|
return readers.get(fieldInfo.number);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public void intersect(String fieldName, IntersectVisitor visitor) throws IOException {
|
|
||||||
BKDReader bkdReader = getBKDReader(fieldName);
|
|
||||||
|
|
||||||
if (bkdReader == null) {
|
|
||||||
// Schema ghost corner case! This field did index points in the past, but
|
|
||||||
// now all docs having this point field were deleted in this segment:
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
bkdReader.intersect(visitor);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long ramBytesUsed() {
|
public long ramBytesUsed() {
|
||||||
long sizeInBytes = 0;
|
long sizeInBytes = 0;
|
||||||
|
@ -173,72 +162,5 @@ public class Lucene60PointsReader extends PointsReader implements Closeable {
|
||||||
readers.clear();
|
readers.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public byte[] getMinPackedValue(String fieldName) {
|
|
||||||
BKDReader bkdReader = getBKDReader(fieldName);
|
|
||||||
if (bkdReader == null) {
|
|
||||||
// Schema ghost corner case! This field did index points in the past, but
|
|
||||||
// now all docs having this point field were deleted in this segment:
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
return bkdReader.getMinPackedValue();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public byte[] getMaxPackedValue(String fieldName) {
|
|
||||||
BKDReader bkdReader = getBKDReader(fieldName);
|
|
||||||
if (bkdReader == null) {
|
|
||||||
// Schema ghost corner case! This field did index points in the past, but
|
|
||||||
// now all docs having this point field were deleted in this segment:
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
return bkdReader.getMaxPackedValue();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int getNumDimensions(String fieldName) {
|
|
||||||
BKDReader bkdReader = getBKDReader(fieldName);
|
|
||||||
if (bkdReader == null) {
|
|
||||||
// Schema ghost corner case! This field did index points in the past, but
|
|
||||||
// now all docs having this point field were deleted in this segment:
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
return bkdReader.getNumDimensions();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int getBytesPerDimension(String fieldName) {
|
|
||||||
BKDReader bkdReader = getBKDReader(fieldName);
|
|
||||||
if (bkdReader == null) {
|
|
||||||
// Schema ghost corner case! This field did index points in the past, but
|
|
||||||
// now all docs having this point field were deleted in this segment:
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
return bkdReader.getBytesPerDimension();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long size(String fieldName) {
|
|
||||||
BKDReader bkdReader = getBKDReader(fieldName);
|
|
||||||
if (bkdReader == null) {
|
|
||||||
// Schema ghost corner case! This field did index points in the past, but
|
|
||||||
// now all docs having this point field were deleted in this segment:
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
return bkdReader.getPointCount();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int getDocCount(String fieldName) {
|
|
||||||
BKDReader bkdReader = getBKDReader(fieldName);
|
|
||||||
if (bkdReader == null) {
|
|
||||||
// Schema ghost corner case! This field did index points in the past, but
|
|
||||||
// now all docs having this point field were deleted in this segment:
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
return bkdReader.getDocCount();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -25,13 +25,14 @@ import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import org.apache.lucene.codecs.CodecUtil;
|
import org.apache.lucene.codecs.CodecUtil;
|
||||||
import org.apache.lucene.codecs.MutablePointsReader;
|
import org.apache.lucene.codecs.MutablePointValues;
|
||||||
import org.apache.lucene.codecs.PointsReader;
|
import org.apache.lucene.codecs.PointsReader;
|
||||||
import org.apache.lucene.codecs.PointsWriter;
|
import org.apache.lucene.codecs.PointsWriter;
|
||||||
import org.apache.lucene.index.FieldInfo;
|
import org.apache.lucene.index.FieldInfo;
|
||||||
import org.apache.lucene.index.FieldInfos;
|
import org.apache.lucene.index.FieldInfos;
|
||||||
import org.apache.lucene.index.IndexFileNames;
|
import org.apache.lucene.index.IndexFileNames;
|
||||||
import org.apache.lucene.index.MergeState;
|
import org.apache.lucene.index.MergeState;
|
||||||
|
import org.apache.lucene.index.PointValues;
|
||||||
import org.apache.lucene.index.PointValues.IntersectVisitor;
|
import org.apache.lucene.index.PointValues.IntersectVisitor;
|
||||||
import org.apache.lucene.index.PointValues.Relation;
|
import org.apache.lucene.index.PointValues.Relation;
|
||||||
import org.apache.lucene.index.SegmentWriteState;
|
import org.apache.lucene.index.SegmentWriteState;
|
||||||
|
@ -85,9 +86,10 @@ public class Lucene60PointsWriter extends PointsWriter implements Closeable {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void writeField(FieldInfo fieldInfo, PointsReader values) throws IOException {
|
public void writeField(FieldInfo fieldInfo, PointsReader reader) throws IOException {
|
||||||
|
|
||||||
boolean singleValuePerDoc = values.size(fieldInfo.name) == values.getDocCount(fieldInfo.name);
|
PointValues values = reader.getValues(fieldInfo.name);
|
||||||
|
boolean singleValuePerDoc = values.size() == values.getDocCount();
|
||||||
|
|
||||||
try (BKDWriter writer = new BKDWriter(writeState.segmentInfo.maxDoc(),
|
try (BKDWriter writer = new BKDWriter(writeState.segmentInfo.maxDoc(),
|
||||||
writeState.directory,
|
writeState.directory,
|
||||||
|
@ -96,18 +98,18 @@ public class Lucene60PointsWriter extends PointsWriter implements Closeable {
|
||||||
fieldInfo.getPointNumBytes(),
|
fieldInfo.getPointNumBytes(),
|
||||||
maxPointsInLeafNode,
|
maxPointsInLeafNode,
|
||||||
maxMBSortInHeap,
|
maxMBSortInHeap,
|
||||||
values.size(fieldInfo.name),
|
values.size(),
|
||||||
singleValuePerDoc)) {
|
singleValuePerDoc)) {
|
||||||
|
|
||||||
if (values instanceof MutablePointsReader) {
|
if (values instanceof MutablePointValues) {
|
||||||
final long fp = writer.writeField(dataOut, fieldInfo.name, (MutablePointsReader) values);
|
final long fp = writer.writeField(dataOut, fieldInfo.name, (MutablePointValues) values);
|
||||||
if (fp != -1) {
|
if (fp != -1) {
|
||||||
indexFPs.put(fieldInfo.name, fp);
|
indexFPs.put(fieldInfo.name, fp);
|
||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
values.intersect(fieldInfo.name, new IntersectVisitor() {
|
values.intersect(new IntersectVisitor() {
|
||||||
@Override
|
@Override
|
||||||
public void visit(int docID) {
|
public void visit(int docID) {
|
||||||
throw new IllegalStateException();
|
throw new IllegalStateException();
|
||||||
|
@ -166,8 +168,11 @@ public class Lucene60PointsWriter extends PointsWriter implements Closeable {
|
||||||
FieldInfos readerFieldInfos = mergeState.fieldInfos[i];
|
FieldInfos readerFieldInfos = mergeState.fieldInfos[i];
|
||||||
FieldInfo readerFieldInfo = readerFieldInfos.fieldInfo(fieldInfo.name);
|
FieldInfo readerFieldInfo = readerFieldInfos.fieldInfo(fieldInfo.name);
|
||||||
if (readerFieldInfo != null && readerFieldInfo.getPointDimensionCount() > 0) {
|
if (readerFieldInfo != null && readerFieldInfo.getPointDimensionCount() > 0) {
|
||||||
totMaxSize += reader.size(fieldInfo.name);
|
PointValues values = reader.getValues(fieldInfo.name);
|
||||||
singleValuePerDoc &= reader.size(fieldInfo.name) == reader.getDocCount(fieldInfo.name);
|
if (values != null) {
|
||||||
|
totMaxSize += values.size();
|
||||||
|
singleValuePerDoc &= values.size() == values.getDocCount();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1793,8 +1793,8 @@ public final class CheckIndex implements Closeable {
|
||||||
try {
|
try {
|
||||||
|
|
||||||
if (fieldInfos.hasPointValues()) {
|
if (fieldInfos.hasPointValues()) {
|
||||||
PointsReader values = reader.getPointsReader();
|
PointsReader pointsReader = reader.getPointsReader();
|
||||||
if (values == null) {
|
if (pointsReader == null) {
|
||||||
throw new RuntimeException("there are fields with points, but reader.getPointsReader() is null");
|
throw new RuntimeException("there are fields with points, but reader.getPointsReader() is null");
|
||||||
}
|
}
|
||||||
for (FieldInfo fieldInfo : fieldInfos) {
|
for (FieldInfo fieldInfo : fieldInfos) {
|
||||||
|
@ -1812,9 +1812,13 @@ public final class CheckIndex implements Closeable {
|
||||||
|
|
||||||
long[] pointCountSeen = new long[1];
|
long[] pointCountSeen = new long[1];
|
||||||
|
|
||||||
byte[] globalMinPackedValue = values.getMinPackedValue(fieldInfo.name);
|
PointValues values = pointsReader.getValues(fieldInfo.name);
|
||||||
long size = values.size(fieldInfo.name);
|
if (values == null) {
|
||||||
int docCount = values.getDocCount(fieldInfo.name);
|
continue;
|
||||||
|
}
|
||||||
|
byte[] globalMinPackedValue = values.getMinPackedValue();
|
||||||
|
long size = values.size();
|
||||||
|
int docCount = values.getDocCount();
|
||||||
|
|
||||||
if (docCount > size) {
|
if (docCount > size) {
|
||||||
throw new RuntimeException("point values for field \"" + fieldInfo.name + "\" claims to have size=" + size + " points and inconsistent docCount=" + docCount);
|
throw new RuntimeException("point values for field \"" + fieldInfo.name + "\" claims to have size=" + size + " points and inconsistent docCount=" + docCount);
|
||||||
|
@ -1831,7 +1835,7 @@ public final class CheckIndex implements Closeable {
|
||||||
} else if (globalMinPackedValue.length != packedBytesCount) {
|
} else if (globalMinPackedValue.length != packedBytesCount) {
|
||||||
throw new RuntimeException("getMinPackedValue for field \"" + fieldInfo.name + "\" return length=" + globalMinPackedValue.length + " array, but should be " + packedBytesCount);
|
throw new RuntimeException("getMinPackedValue for field \"" + fieldInfo.name + "\" return length=" + globalMinPackedValue.length + " array, but should be " + packedBytesCount);
|
||||||
}
|
}
|
||||||
byte[] globalMaxPackedValue = values.getMaxPackedValue(fieldInfo.name);
|
byte[] globalMaxPackedValue = values.getMaxPackedValue();
|
||||||
if (globalMaxPackedValue == null) {
|
if (globalMaxPackedValue == null) {
|
||||||
if (size != 0) {
|
if (size != 0) {
|
||||||
throw new RuntimeException("getMaxPackedValue is null points for field \"" + fieldInfo.name + "\" yet size=" + size);
|
throw new RuntimeException("getMaxPackedValue is null points for field \"" + fieldInfo.name + "\" yet size=" + size);
|
||||||
|
@ -1840,8 +1844,7 @@ public final class CheckIndex implements Closeable {
|
||||||
throw new RuntimeException("getMaxPackedValue for field \"" + fieldInfo.name + "\" return length=" + globalMaxPackedValue.length + " array, but should be " + packedBytesCount);
|
throw new RuntimeException("getMaxPackedValue for field \"" + fieldInfo.name + "\" return length=" + globalMaxPackedValue.length + " array, but should be " + packedBytesCount);
|
||||||
}
|
}
|
||||||
|
|
||||||
values.intersect(fieldInfo.name,
|
values.intersect(new PointValues.IntersectVisitor() {
|
||||||
new PointValues.IntersectVisitor() {
|
|
||||||
|
|
||||||
private int lastDocID = -1;
|
private int lastDocID = -1;
|
||||||
|
|
||||||
|
|
|
@ -188,6 +188,18 @@ public abstract class CodecReader extends LeafReader implements Accountable {
|
||||||
return getNormsReader().getNorms(fi);
|
return getNormsReader().getNorms(fi);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public final PointValues getPointValues(String field) throws IOException {
|
||||||
|
ensureOpen();
|
||||||
|
FieldInfo fi = getFieldInfos().fieldInfo(field);
|
||||||
|
if (fi == null || fi.getPointDimensionCount() == 0) {
|
||||||
|
// Field does not exist or does not index points
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return getPointsReader().getValues(field);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doClose() throws IOException {
|
protected void doClose() throws IOException {
|
||||||
}
|
}
|
||||||
|
|
|
@ -87,11 +87,6 @@ public abstract class FilterCodecReader extends CodecReader {
|
||||||
return in.getPointsReader();
|
return in.getPointsReader();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public PointValues getPointValues() {
|
|
||||||
return in.getPointValues();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int numDocs() {
|
public int numDocs() {
|
||||||
return in.numDocs();
|
return in.numDocs();
|
||||||
|
|
|
@ -382,8 +382,8 @@ public abstract class FilterLeafReader extends LeafReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public PointValues getPointValues() {
|
public PointValues getPointValues(String field) throws IOException {
|
||||||
return in.getPointValues();
|
return in.getPointValues(field);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -295,8 +295,9 @@ public abstract class LeafReader extends IndexReader {
|
||||||
public abstract Bits getLiveDocs();
|
public abstract Bits getLiveDocs();
|
||||||
|
|
||||||
/** Returns the {@link PointValues} used for numeric or
|
/** Returns the {@link PointValues} used for numeric or
|
||||||
* spatial searches, or null if there are no point fields. */
|
* spatial searches for the given field, or null if there
|
||||||
public abstract PointValues getPointValues();
|
* are no point fields. */
|
||||||
|
public abstract PointValues getPointValues(String field) throws IOException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Checks consistency of this reader.
|
* Checks consistency of this reader.
|
||||||
|
|
|
@ -197,8 +197,8 @@ class MergeReaderWrapper extends LeafReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public PointValues getPointValues() {
|
public PointValues getPointValues(String fieldName) throws IOException {
|
||||||
return in.getPointValues();
|
return in.getPointValues(fieldName);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -321,99 +321,10 @@ public class ParallelLeafReader extends LeafReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public PointValues getPointValues() {
|
public PointValues getPointValues(String fieldName) throws IOException {
|
||||||
return new PointValues() {
|
ensureOpen();
|
||||||
@Override
|
|
||||||
public void intersect(String fieldName, IntersectVisitor visitor) throws IOException {
|
|
||||||
LeafReader reader = fieldToReader.get(fieldName);
|
LeafReader reader = fieldToReader.get(fieldName);
|
||||||
if (reader == null) {
|
return reader == null ? null : reader.getPointValues(fieldName);
|
||||||
return;
|
|
||||||
}
|
|
||||||
PointValues dimValues = reader.getPointValues();
|
|
||||||
if (dimValues == null) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
dimValues.intersect(fieldName, visitor);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public byte[] getMinPackedValue(String fieldName) throws IOException {
|
|
||||||
LeafReader reader = fieldToReader.get(fieldName);
|
|
||||||
if (reader == null) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
PointValues dimValues = reader.getPointValues();
|
|
||||||
if (dimValues == null) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return dimValues.getMinPackedValue(fieldName);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public byte[] getMaxPackedValue(String fieldName) throws IOException {
|
|
||||||
LeafReader reader = fieldToReader.get(fieldName);
|
|
||||||
if (reader == null) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
PointValues dimValues = reader.getPointValues();
|
|
||||||
if (dimValues == null) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return dimValues.getMaxPackedValue(fieldName);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int getNumDimensions(String fieldName) throws IOException {
|
|
||||||
LeafReader reader = fieldToReader.get(fieldName);
|
|
||||||
if (reader == null) {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
PointValues dimValues = reader.getPointValues();
|
|
||||||
if (dimValues == null) {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
return dimValues.getNumDimensions(fieldName);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int getBytesPerDimension(String fieldName) throws IOException {
|
|
||||||
LeafReader reader = fieldToReader.get(fieldName);
|
|
||||||
if (reader == null) {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
PointValues dimValues = reader.getPointValues();
|
|
||||||
if (dimValues == null) {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
return dimValues.getBytesPerDimension(fieldName);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long size(String fieldName) {
|
|
||||||
LeafReader reader = fieldToReader.get(fieldName);
|
|
||||||
if (reader == null) {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
PointValues dimValues = reader.getPointValues();
|
|
||||||
if (dimValues == null) {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
return dimValues.size(fieldName);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int getDocCount(String fieldName) {
|
|
||||||
LeafReader reader = fieldToReader.get(fieldName);
|
|
||||||
if (reader == null) {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
PointValues dimValues = reader.getPointValues();
|
|
||||||
if (dimValues == null) {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
return dimValues.getDocCount(fieldName);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -89,16 +89,14 @@ public abstract class PointValues {
|
||||||
/** Return the cumulated number of points across all leaves of the given
|
/** Return the cumulated number of points across all leaves of the given
|
||||||
* {@link IndexReader}. Leaves that do not have points for the given field
|
* {@link IndexReader}. Leaves that do not have points for the given field
|
||||||
* are ignored.
|
* are ignored.
|
||||||
* @see PointValues#size(String) */
|
* @see PointValues#size() */
|
||||||
public static long size(IndexReader reader, String field) throws IOException {
|
public static long size(IndexReader reader, String field) throws IOException {
|
||||||
long size = 0;
|
long size = 0;
|
||||||
for (LeafReaderContext ctx : reader.leaves()) {
|
for (LeafReaderContext ctx : reader.leaves()) {
|
||||||
FieldInfo info = ctx.reader().getFieldInfos().fieldInfo(field);
|
PointValues values = ctx.reader().getPointValues(field);
|
||||||
if (info == null || info.getPointDimensionCount() == 0) {
|
if (values != null) {
|
||||||
continue;
|
size += values.size();
|
||||||
}
|
}
|
||||||
PointValues values = ctx.reader().getPointValues();
|
|
||||||
size += values.size(field);
|
|
||||||
}
|
}
|
||||||
return size;
|
return size;
|
||||||
}
|
}
|
||||||
|
@ -106,16 +104,14 @@ public abstract class PointValues {
|
||||||
/** Return the cumulated number of docs that have points across all leaves
|
/** Return the cumulated number of docs that have points across all leaves
|
||||||
* of the given {@link IndexReader}. Leaves that do not have points for the
|
* of the given {@link IndexReader}. Leaves that do not have points for the
|
||||||
* given field are ignored.
|
* given field are ignored.
|
||||||
* @see PointValues#getDocCount(String) */
|
* @see PointValues#getDocCount() */
|
||||||
public static int getDocCount(IndexReader reader, String field) throws IOException {
|
public static int getDocCount(IndexReader reader, String field) throws IOException {
|
||||||
int count = 0;
|
int count = 0;
|
||||||
for (LeafReaderContext ctx : reader.leaves()) {
|
for (LeafReaderContext ctx : reader.leaves()) {
|
||||||
FieldInfo info = ctx.reader().getFieldInfos().fieldInfo(field);
|
PointValues values = ctx.reader().getPointValues(field);
|
||||||
if (info == null || info.getPointDimensionCount() == 0) {
|
if (values != null) {
|
||||||
continue;
|
count += values.getDocCount();
|
||||||
}
|
}
|
||||||
PointValues values = ctx.reader().getPointValues();
|
|
||||||
count += values.getDocCount(field);
|
|
||||||
}
|
}
|
||||||
return count;
|
return count;
|
||||||
}
|
}
|
||||||
|
@ -123,24 +119,23 @@ public abstract class PointValues {
|
||||||
/** Return the minimum packed values across all leaves of the given
|
/** Return the minimum packed values across all leaves of the given
|
||||||
* {@link IndexReader}. Leaves that do not have points for the given field
|
* {@link IndexReader}. Leaves that do not have points for the given field
|
||||||
* are ignored.
|
* are ignored.
|
||||||
* @see PointValues#getMinPackedValue(String) */
|
* @see PointValues#getMinPackedValue() */
|
||||||
public static byte[] getMinPackedValue(IndexReader reader, String field) throws IOException {
|
public static byte[] getMinPackedValue(IndexReader reader, String field) throws IOException {
|
||||||
byte[] minValue = null;
|
byte[] minValue = null;
|
||||||
for (LeafReaderContext ctx : reader.leaves()) {
|
for (LeafReaderContext ctx : reader.leaves()) {
|
||||||
FieldInfo info = ctx.reader().getFieldInfos().fieldInfo(field);
|
PointValues values = ctx.reader().getPointValues(field);
|
||||||
if (info == null || info.getPointDimensionCount() == 0) {
|
if (values == null) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
PointValues values = ctx.reader().getPointValues();
|
byte[] leafMinValue = values.getMinPackedValue();
|
||||||
byte[] leafMinValue = values.getMinPackedValue(field);
|
|
||||||
if (leafMinValue == null) {
|
if (leafMinValue == null) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if (minValue == null) {
|
if (minValue == null) {
|
||||||
minValue = leafMinValue.clone();
|
minValue = leafMinValue.clone();
|
||||||
} else {
|
} else {
|
||||||
final int numDimensions = values.getNumDimensions(field);
|
final int numDimensions = values.getNumDimensions();
|
||||||
final int numBytesPerDimension = values.getBytesPerDimension(field);
|
final int numBytesPerDimension = values.getBytesPerDimension();
|
||||||
for (int i = 0; i < numDimensions; ++i) {
|
for (int i = 0; i < numDimensions; ++i) {
|
||||||
int offset = i * numBytesPerDimension;
|
int offset = i * numBytesPerDimension;
|
||||||
if (StringHelper.compare(numBytesPerDimension, leafMinValue, offset, minValue, offset) < 0) {
|
if (StringHelper.compare(numBytesPerDimension, leafMinValue, offset, minValue, offset) < 0) {
|
||||||
|
@ -155,24 +150,23 @@ public abstract class PointValues {
|
||||||
/** Return the maximum packed values across all leaves of the given
|
/** Return the maximum packed values across all leaves of the given
|
||||||
* {@link IndexReader}. Leaves that do not have points for the given field
|
* {@link IndexReader}. Leaves that do not have points for the given field
|
||||||
* are ignored.
|
* are ignored.
|
||||||
* @see PointValues#getMaxPackedValue(String) */
|
* @see PointValues#getMaxPackedValue() */
|
||||||
public static byte[] getMaxPackedValue(IndexReader reader, String field) throws IOException {
|
public static byte[] getMaxPackedValue(IndexReader reader, String field) throws IOException {
|
||||||
byte[] maxValue = null;
|
byte[] maxValue = null;
|
||||||
for (LeafReaderContext ctx : reader.leaves()) {
|
for (LeafReaderContext ctx : reader.leaves()) {
|
||||||
FieldInfo info = ctx.reader().getFieldInfos().fieldInfo(field);
|
PointValues values = ctx.reader().getPointValues(field);
|
||||||
if (info == null || info.getPointDimensionCount() == 0) {
|
if (values == null) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
PointValues values = ctx.reader().getPointValues();
|
byte[] leafMaxValue = values.getMaxPackedValue();
|
||||||
byte[] leafMaxValue = values.getMaxPackedValue(field);
|
|
||||||
if (leafMaxValue == null) {
|
if (leafMaxValue == null) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if (maxValue == null) {
|
if (maxValue == null) {
|
||||||
maxValue = leafMaxValue.clone();
|
maxValue = leafMaxValue.clone();
|
||||||
} else {
|
} else {
|
||||||
final int numDimensions = values.getNumDimensions(field);
|
final int numDimensions = values.getNumDimensions();
|
||||||
final int numBytesPerDimension = values.getBytesPerDimension(field);
|
final int numBytesPerDimension = values.getBytesPerDimension();
|
||||||
for (int i = 0; i < numDimensions; ++i) {
|
for (int i = 0; i < numDimensions; ++i) {
|
||||||
int offset = i * numBytesPerDimension;
|
int offset = i * numBytesPerDimension;
|
||||||
if (StringHelper.compare(numBytesPerDimension, leafMaxValue, offset, maxValue, offset) > 0) {
|
if (StringHelper.compare(numBytesPerDimension, leafMaxValue, offset, maxValue, offset) > 0) {
|
||||||
|
@ -224,23 +218,23 @@ public abstract class PointValues {
|
||||||
/** Finds all documents and points matching the provided visitor.
|
/** Finds all documents and points matching the provided visitor.
|
||||||
* This method does not enforce live documents, so it's up to the caller
|
* This method does not enforce live documents, so it's up to the caller
|
||||||
* to test whether each document is deleted, if necessary. */
|
* to test whether each document is deleted, if necessary. */
|
||||||
public abstract void intersect(String fieldName, IntersectVisitor visitor) throws IOException;
|
public abstract void intersect(IntersectVisitor visitor) throws IOException;
|
||||||
|
|
||||||
/** Returns minimum value for each dimension, packed, or null if {@link #size} is <code>0</code> */
|
/** Returns minimum value for each dimension, packed, or null if {@link #size} is <code>0</code> */
|
||||||
public abstract byte[] getMinPackedValue(String fieldName) throws IOException;
|
public abstract byte[] getMinPackedValue() throws IOException;
|
||||||
|
|
||||||
/** Returns maximum value for each dimension, packed, or null if {@link #size} is <code>0</code> */
|
/** Returns maximum value for each dimension, packed, or null if {@link #size} is <code>0</code> */
|
||||||
public abstract byte[] getMaxPackedValue(String fieldName) throws IOException;
|
public abstract byte[] getMaxPackedValue() throws IOException;
|
||||||
|
|
||||||
/** Returns how many dimensions were indexed */
|
/** Returns how many dimensions were indexed */
|
||||||
public abstract int getNumDimensions(String fieldName) throws IOException;
|
public abstract int getNumDimensions() throws IOException;
|
||||||
|
|
||||||
/** Returns the number of bytes per dimension */
|
/** Returns the number of bytes per dimension */
|
||||||
public abstract int getBytesPerDimension(String fieldName) throws IOException;
|
public abstract int getBytesPerDimension() throws IOException;
|
||||||
|
|
||||||
/** Returns the total number of indexed points across all documents in this field. */
|
/** Returns the total number of indexed points across all documents. */
|
||||||
public abstract long size(String fieldName);
|
public abstract long size();
|
||||||
|
|
||||||
/** Returns the total number of documents that have indexed at least one point for this field. */
|
/** Returns the total number of documents that have indexed at least one point. */
|
||||||
public abstract int getDocCount(String fieldName);
|
public abstract int getDocCount();
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,7 +18,7 @@ package org.apache.lucene.index;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
import org.apache.lucene.codecs.MutablePointsReader;
|
import org.apache.lucene.codecs.MutablePointValues;
|
||||||
import org.apache.lucene.codecs.PointsReader;
|
import org.apache.lucene.codecs.PointsReader;
|
||||||
import org.apache.lucene.codecs.PointsWriter;
|
import org.apache.lucene.codecs.PointsWriter;
|
||||||
import org.apache.lucene.util.ArrayUtil;
|
import org.apache.lucene.util.ArrayUtil;
|
||||||
|
@ -70,7 +70,7 @@ class PointValuesWriter {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void flush(SegmentWriteState state, PointsWriter writer) throws IOException {
|
public void flush(SegmentWriteState state, PointsWriter writer) throws IOException {
|
||||||
PointsReader reader = new MutablePointsReader() {
|
PointValues values = new MutablePointValues() {
|
||||||
|
|
||||||
final int[] ords = new int[numPoints];
|
final int[] ords = new int[numPoints];
|
||||||
{
|
{
|
||||||
|
@ -80,10 +80,7 @@ class PointValuesWriter {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void intersect(String fieldName, IntersectVisitor visitor) throws IOException {
|
public void intersect(IntersectVisitor visitor) throws IOException {
|
||||||
if (fieldName.equals(fieldInfo.name) == false) {
|
|
||||||
throw new IllegalArgumentException("fieldName must be the same");
|
|
||||||
}
|
|
||||||
final BytesRef scratch = new BytesRef();
|
final BytesRef scratch = new BytesRef();
|
||||||
final byte[] packedValue = new byte[packedBytesLength];
|
final byte[] packedValue = new byte[packedBytesLength];
|
||||||
for(int i=0;i<numPoints;i++) {
|
for(int i=0;i<numPoints;i++) {
|
||||||
|
@ -95,52 +92,32 @@ class PointValuesWriter {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void checkIntegrity() {
|
public byte[] getMinPackedValue() {
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long ramBytesUsed() {
|
public byte[] getMaxPackedValue() {
|
||||||
return 0L;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void close() {
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public byte[] getMinPackedValue(String fieldName) {
|
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public byte[] getMaxPackedValue(String fieldName) {
|
public int getNumDimensions() {
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int getNumDimensions(String fieldName) {
|
public int getBytesPerDimension() {
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int getBytesPerDimension(String fieldName) {
|
public long size() {
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long size(String fieldName) {
|
|
||||||
if (fieldName.equals(fieldInfo.name) == false) {
|
|
||||||
throw new IllegalArgumentException("fieldName must be the same");
|
|
||||||
}
|
|
||||||
return numPoints;
|
return numPoints;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int getDocCount(String fieldName) {
|
public int getDocCount() {
|
||||||
if (fieldName.equals(fieldInfo.name) == false) {
|
|
||||||
throw new IllegalArgumentException("fieldName must be the same");
|
|
||||||
}
|
|
||||||
return numDocs;
|
return numDocs;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -170,6 +147,31 @@ class PointValuesWriter {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
PointsReader reader = new PointsReader() {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public PointValues getValues(String fieldName) {
|
||||||
|
if (fieldName.equals(fieldInfo.name) == false) {
|
||||||
|
throw new IllegalArgumentException("fieldName must be the same");
|
||||||
|
}
|
||||||
|
return values;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void checkIntegrity() {
|
||||||
|
throw new UnsupportedOperationException();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long ramBytesUsed() {
|
||||||
|
return 0L;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void close() {
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
writer.writeField(fieldInfo, reader);
|
writer.writeField(fieldInfo, reader);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -230,7 +230,7 @@ public final class SegmentReader extends CodecReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public PointValues getPointValues() {
|
public PointsReader getPointsReader() {
|
||||||
ensureOpen();
|
ensureOpen();
|
||||||
return core.pointsReader;
|
return core.pointsReader;
|
||||||
}
|
}
|
||||||
|
@ -253,12 +253,6 @@ public final class SegmentReader extends CodecReader {
|
||||||
return core.fields;
|
return core.fields;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public PointsReader getPointsReader() {
|
|
||||||
ensureOpen();
|
|
||||||
return core.pointsReader;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
// SegmentInfo.toString takes dir and number of
|
// SegmentInfo.toString takes dir and number of
|
||||||
|
|
|
@ -92,14 +92,9 @@ public final class SlowCodecReaderWrapper {
|
||||||
return reader.getFieldInfos();
|
return reader.getFieldInfos();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public PointValues getPointValues() {
|
|
||||||
return reader.getPointValues();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public PointsReader getPointsReader() {
|
public PointsReader getPointsReader() {
|
||||||
return pointValuesToReader(reader.getPointValues());
|
return pointValuesToReader(reader);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -140,14 +135,12 @@ public final class SlowCodecReaderWrapper {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static PointsReader pointValuesToReader(PointValues values) {
|
private static PointsReader pointValuesToReader(LeafReader reader) {
|
||||||
if (values == null) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return new PointsReader() {
|
return new PointsReader() {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void intersect(String fieldName, IntersectVisitor visitor) throws IOException {
|
public PointValues getValues(String field) throws IOException {
|
||||||
values.intersect(fieldName, visitor);
|
return reader.getPointValues(field);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -164,35 +157,6 @@ public final class SlowCodecReaderWrapper {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public byte[] getMinPackedValue(String fieldName) throws IOException {
|
|
||||||
return values.getMinPackedValue(fieldName);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public byte[] getMaxPackedValue(String fieldName) throws IOException {
|
|
||||||
return values.getMaxPackedValue(fieldName);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int getNumDimensions(String fieldName) throws IOException {
|
|
||||||
return values.getNumDimensions(fieldName);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int getBytesPerDimension(String fieldName) throws IOException {
|
|
||||||
return values.getBytesPerDimension(fieldName);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long size(String fieldName) {
|
|
||||||
return values.size(fieldName);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int getDocCount(String fieldName) {
|
|
||||||
return values.getDocCount(fieldName);
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -297,9 +297,8 @@ class SortingLeafReader extends FilterLeafReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void intersect(String fieldName, IntersectVisitor visitor) throws IOException {
|
public void intersect(IntersectVisitor visitor) throws IOException {
|
||||||
in.intersect(fieldName,
|
in.intersect(new IntersectVisitor() {
|
||||||
new IntersectVisitor() {
|
|
||||||
@Override
|
@Override
|
||||||
public void visit(int docID) throws IOException {
|
public void visit(int docID) throws IOException {
|
||||||
visitor.visit(docMap.oldToNew(docID));
|
visitor.visit(docMap.oldToNew(docID));
|
||||||
|
@ -318,33 +317,33 @@ class SortingLeafReader extends FilterLeafReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public byte[] getMinPackedValue(String fieldName) throws IOException {
|
public byte[] getMinPackedValue() throws IOException {
|
||||||
return in.getMinPackedValue(fieldName);
|
return in.getMinPackedValue();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public byte[] getMaxPackedValue(String fieldName) throws IOException {
|
public byte[] getMaxPackedValue() throws IOException {
|
||||||
return in.getMaxPackedValue(fieldName);
|
return in.getMaxPackedValue();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int getNumDimensions(String fieldName) throws IOException {
|
public int getNumDimensions() throws IOException {
|
||||||
return in.getNumDimensions(fieldName);
|
return in.getNumDimensions();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int getBytesPerDimension(String fieldName) throws IOException {
|
public int getBytesPerDimension() throws IOException {
|
||||||
return in.getBytesPerDimension(fieldName);
|
return in.getBytesPerDimension();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long size(String fieldName) {
|
public long size() {
|
||||||
return in.size(fieldName);
|
return in.size();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int getDocCount(String fieldName) {
|
public int getDocCount() {
|
||||||
return in.getDocCount(fieldName);
|
return in.getDocCount();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1049,8 +1048,8 @@ class SortingLeafReader extends FilterLeafReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public PointValues getPointValues() {
|
public PointValues getPointValues(String fieldName) throws IOException {
|
||||||
final PointValues inPointValues = in.getPointValues();
|
final PointValues inPointValues = in.getPointValues(fieldName);
|
||||||
if (inPointValues == null) {
|
if (inPointValues == null) {
|
||||||
return null;
|
return null;
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -23,7 +23,6 @@ import java.util.Collection;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.NoSuchElementException;
|
import java.util.NoSuchElementException;
|
||||||
import org.apache.lucene.document.IntPoint;
|
import org.apache.lucene.document.IntPoint;
|
||||||
import org.apache.lucene.index.FieldInfo;
|
|
||||||
import org.apache.lucene.index.LeafReader;
|
import org.apache.lucene.index.LeafReader;
|
||||||
import org.apache.lucene.index.LeafReaderContext;
|
import org.apache.lucene.index.LeafReaderContext;
|
||||||
import org.apache.lucene.index.PointValues.IntersectVisitor;
|
import org.apache.lucene.index.PointValues.IntersectVisitor;
|
||||||
|
@ -116,21 +115,18 @@ public abstract class PointInSetQuery extends Query {
|
||||||
@Override
|
@Override
|
||||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||||
LeafReader reader = context.reader();
|
LeafReader reader = context.reader();
|
||||||
PointValues values = reader.getPointValues();
|
|
||||||
|
PointValues values = reader.getPointValues(field);
|
||||||
if (values == null) {
|
if (values == null) {
|
||||||
// No docs in this segment indexed any points
|
// No docs in this segment/field indexed any points
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
FieldInfo fieldInfo = reader.getFieldInfos().fieldInfo(field);
|
|
||||||
if (fieldInfo == null) {
|
if (values.getNumDimensions() != numDims) {
|
||||||
// No docs in this segment indexed this field at all
|
throw new IllegalArgumentException("field=\"" + field + "\" was indexed with numDims=" + values.getNumDimensions() + " but this query has numDims=" + numDims);
|
||||||
return null;
|
|
||||||
}
|
}
|
||||||
if (fieldInfo.getPointDimensionCount() != numDims) {
|
if (values.getBytesPerDimension() != bytesPerDim) {
|
||||||
throw new IllegalArgumentException("field=\"" + field + "\" was indexed with numDims=" + fieldInfo.getPointDimensionCount() + " but this query has numDims=" + numDims);
|
throw new IllegalArgumentException("field=\"" + field + "\" was indexed with bytesPerDim=" + values.getBytesPerDimension() + " but this query has bytesPerDim=" + bytesPerDim);
|
||||||
}
|
|
||||||
if (fieldInfo.getPointNumBytes() != bytesPerDim) {
|
|
||||||
throw new IllegalArgumentException("field=\"" + field + "\" was indexed with bytesPerDim=" + fieldInfo.getPointNumBytes() + " but this query has bytesPerDim=" + bytesPerDim);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
DocIdSetBuilder result = new DocIdSetBuilder(reader.maxDoc(), values, field);
|
DocIdSetBuilder result = new DocIdSetBuilder(reader.maxDoc(), values, field);
|
||||||
|
@ -138,7 +134,7 @@ public abstract class PointInSetQuery extends Query {
|
||||||
if (numDims == 1) {
|
if (numDims == 1) {
|
||||||
|
|
||||||
// We optimize this common case, effectively doing a merge sort of the indexed values vs the queried set:
|
// We optimize this common case, effectively doing a merge sort of the indexed values vs the queried set:
|
||||||
values.intersect(field, new MergePointVisitor(sortedPackedPoints, result));
|
values.intersect(new MergePointVisitor(sortedPackedPoints, result));
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
// NOTE: this is naive implementation, where for each point we re-walk the KD tree to intersect. We could instead do a similar
|
// NOTE: this is naive implementation, where for each point we re-walk the KD tree to intersect. We could instead do a similar
|
||||||
|
@ -148,7 +144,7 @@ public abstract class PointInSetQuery extends Query {
|
||||||
TermIterator iterator = sortedPackedPoints.iterator();
|
TermIterator iterator = sortedPackedPoints.iterator();
|
||||||
for (BytesRef point = iterator.next(); point != null; point = iterator.next()) {
|
for (BytesRef point = iterator.next(); point != null; point = iterator.next()) {
|
||||||
visitor.setPoint(point);
|
visitor.setPoint(point);
|
||||||
values.intersect(field, visitor);
|
values.intersect(visitor);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -24,7 +24,6 @@ import org.apache.lucene.index.PointValues;
|
||||||
import org.apache.lucene.index.PointValues.IntersectVisitor;
|
import org.apache.lucene.index.PointValues.IntersectVisitor;
|
||||||
import org.apache.lucene.index.PointValues.Relation;
|
import org.apache.lucene.index.PointValues.Relation;
|
||||||
import org.apache.lucene.document.IntPoint; // javadocs
|
import org.apache.lucene.document.IntPoint; // javadocs
|
||||||
import org.apache.lucene.index.FieldInfo;
|
|
||||||
import org.apache.lucene.index.LeafReader;
|
import org.apache.lucene.index.LeafReader;
|
||||||
import org.apache.lucene.index.LeafReaderContext;
|
import org.apache.lucene.index.LeafReaderContext;
|
||||||
import org.apache.lucene.util.DocIdSetBuilder;
|
import org.apache.lucene.util.DocIdSetBuilder;
|
||||||
|
@ -108,7 +107,7 @@ public abstract class PointRangeQuery extends Query {
|
||||||
private DocIdSet buildMatchingDocIdSet(LeafReader reader, PointValues values) throws IOException {
|
private DocIdSet buildMatchingDocIdSet(LeafReader reader, PointValues values) throws IOException {
|
||||||
DocIdSetBuilder result = new DocIdSetBuilder(reader.maxDoc(), values, field);
|
DocIdSetBuilder result = new DocIdSetBuilder(reader.maxDoc(), values, field);
|
||||||
|
|
||||||
values.intersect(field,
|
values.intersect(
|
||||||
new IntersectVisitor() {
|
new IntersectVisitor() {
|
||||||
|
|
||||||
DocIdSetBuilder.BulkAdder adder;
|
DocIdSetBuilder.BulkAdder adder;
|
||||||
|
@ -171,27 +170,24 @@ public abstract class PointRangeQuery extends Query {
|
||||||
@Override
|
@Override
|
||||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||||
LeafReader reader = context.reader();
|
LeafReader reader = context.reader();
|
||||||
PointValues values = reader.getPointValues();
|
|
||||||
|
PointValues values = reader.getPointValues(field);
|
||||||
if (values == null) {
|
if (values == null) {
|
||||||
// No docs in this segment indexed any points
|
// No docs in this segment/field indexed any points
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
FieldInfo fieldInfo = reader.getFieldInfos().fieldInfo(field);
|
|
||||||
if (fieldInfo == null) {
|
if (values.getNumDimensions() != numDims) {
|
||||||
// No docs in this segment indexed this field at all
|
throw new IllegalArgumentException("field=\"" + field + "\" was indexed with numDims=" + values.getNumDimensions() + " but this query has numDims=" + numDims);
|
||||||
return null;
|
|
||||||
}
|
}
|
||||||
if (fieldInfo.getPointDimensionCount() != numDims) {
|
if (bytesPerDim != values.getBytesPerDimension()) {
|
||||||
throw new IllegalArgumentException("field=\"" + field + "\" was indexed with numDims=" + fieldInfo.getPointDimensionCount() + " but this query has numDims=" + numDims);
|
throw new IllegalArgumentException("field=\"" + field + "\" was indexed with bytesPerDim=" + values.getBytesPerDimension() + " but this query has bytesPerDim=" + bytesPerDim);
|
||||||
}
|
|
||||||
if (bytesPerDim != fieldInfo.getPointNumBytes()) {
|
|
||||||
throw new IllegalArgumentException("field=\"" + field + "\" was indexed with bytesPerDim=" + fieldInfo.getPointNumBytes() + " but this query has bytesPerDim=" + bytesPerDim);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
boolean allDocsMatch;
|
boolean allDocsMatch;
|
||||||
if (values.getDocCount(field) == reader.maxDoc()) {
|
if (values.getDocCount() == reader.maxDoc()) {
|
||||||
final byte[] fieldPackedLower = values.getMinPackedValue(field);
|
final byte[] fieldPackedLower = values.getMinPackedValue();
|
||||||
final byte[] fieldPackedUpper = values.getMaxPackedValue(field);
|
final byte[] fieldPackedUpper = values.getMaxPackedValue();
|
||||||
allDocsMatch = true;
|
allDocsMatch = true;
|
||||||
for (int i = 0; i < numDims; ++i) {
|
for (int i = 0; i < numDims; ++i) {
|
||||||
int offset = i * bytesPerDim;
|
int offset = i * bytesPerDim;
|
||||||
|
|
|
@ -116,7 +116,7 @@ public final class DocIdSetBuilder {
|
||||||
/** Create a {@link DocIdSetBuilder} instance that is optimized for
|
/** Create a {@link DocIdSetBuilder} instance that is optimized for
|
||||||
* accumulating docs that match the given {@link PointValues}. */
|
* accumulating docs that match the given {@link PointValues}. */
|
||||||
public DocIdSetBuilder(int maxDoc, PointValues values, String field) throws IOException {
|
public DocIdSetBuilder(int maxDoc, PointValues values, String field) throws IOException {
|
||||||
this(maxDoc, values.getDocCount(field), values.size(field));
|
this(maxDoc, values.getDocCount(), values.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
DocIdSetBuilder(int maxDoc, int docCount, long valueCount) {
|
DocIdSetBuilder(int maxDoc, int docCount, long valueCount) {
|
||||||
|
|
|
@ -21,8 +21,7 @@ import java.util.Arrays;
|
||||||
|
|
||||||
import org.apache.lucene.codecs.CodecUtil;
|
import org.apache.lucene.codecs.CodecUtil;
|
||||||
import org.apache.lucene.index.CorruptIndexException;
|
import org.apache.lucene.index.CorruptIndexException;
|
||||||
import org.apache.lucene.index.PointValues.IntersectVisitor;
|
import org.apache.lucene.index.PointValues;
|
||||||
import org.apache.lucene.index.PointValues.Relation;
|
|
||||||
import org.apache.lucene.store.IndexInput;
|
import org.apache.lucene.store.IndexInput;
|
||||||
import org.apache.lucene.util.Accountable;
|
import org.apache.lucene.util.Accountable;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
|
@ -32,7 +31,7 @@ import org.apache.lucene.util.StringHelper;
|
||||||
*
|
*
|
||||||
* @lucene.experimental */
|
* @lucene.experimental */
|
||||||
|
|
||||||
public class BKDReader implements Accountable {
|
public class BKDReader extends PointValues implements Accountable {
|
||||||
// Packed array of byte[] holding all split values in the full binary tree:
|
// Packed array of byte[] holding all split values in the full binary tree:
|
||||||
final private byte[] splitPackedValues;
|
final private byte[] splitPackedValues;
|
||||||
final long[] leafBlockFPs;
|
final long[] leafBlockFPs;
|
||||||
|
@ -496,26 +495,32 @@ public class BKDReader implements Accountable {
|
||||||
leafBlockFPs.length * Long.BYTES;
|
leafBlockFPs.length * Long.BYTES;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public byte[] getMinPackedValue() {
|
public byte[] getMinPackedValue() {
|
||||||
return minPackedValue.clone();
|
return minPackedValue.clone();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public byte[] getMaxPackedValue() {
|
public byte[] getMaxPackedValue() {
|
||||||
return maxPackedValue.clone();
|
return maxPackedValue.clone();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public int getNumDimensions() {
|
public int getNumDimensions() {
|
||||||
return numDims;
|
return numDims;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public int getBytesPerDimension() {
|
public int getBytesPerDimension() {
|
||||||
return bytesPerDim;
|
return bytesPerDim;
|
||||||
}
|
}
|
||||||
|
|
||||||
public long getPointCount() {
|
@Override
|
||||||
|
public long size() {
|
||||||
return pointCount;
|
return pointCount;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public int getDocCount() {
|
public int getDocCount() {
|
||||||
return docCount;
|
return docCount;
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,7 +25,7 @@ import java.util.List;
|
||||||
import java.util.function.IntFunction;
|
import java.util.function.IntFunction;
|
||||||
|
|
||||||
import org.apache.lucene.codecs.CodecUtil;
|
import org.apache.lucene.codecs.CodecUtil;
|
||||||
import org.apache.lucene.codecs.MutablePointsReader;
|
import org.apache.lucene.codecs.MutablePointValues;
|
||||||
import org.apache.lucene.index.MergeState;
|
import org.apache.lucene.index.MergeState;
|
||||||
import org.apache.lucene.index.PointValues.IntersectVisitor;
|
import org.apache.lucene.index.PointValues.IntersectVisitor;
|
||||||
import org.apache.lucene.index.PointValues.Relation;
|
import org.apache.lucene.index.PointValues.Relation;
|
||||||
|
@ -417,12 +417,12 @@ public class BKDWriter implements Closeable {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Write a field from a {@link MutablePointsReader}. This way of writing
|
/** Write a field from a {@link MutablePointValues}. This way of writing
|
||||||
* points is faster than regular writes with {@link BKDWriter#add} since
|
* points is faster than regular writes with {@link BKDWriter#add} since
|
||||||
* there is opportunity for reordering points before writing them to
|
* there is opportunity for reordering points before writing them to
|
||||||
* disk. This method does not use transient disk in order to reorder points.
|
* disk. This method does not use transient disk in order to reorder points.
|
||||||
*/
|
*/
|
||||||
public long writeField(IndexOutput out, String fieldName, MutablePointsReader reader) throws IOException {
|
public long writeField(IndexOutput out, String fieldName, MutablePointValues reader) throws IOException {
|
||||||
if (numDims == 1) {
|
if (numDims == 1) {
|
||||||
return writeField1Dim(out, fieldName, reader);
|
return writeField1Dim(out, fieldName, reader);
|
||||||
} else {
|
} else {
|
||||||
|
@ -433,7 +433,7 @@ public class BKDWriter implements Closeable {
|
||||||
|
|
||||||
/* In the 2+D case, we recursively pick the split dimension, compute the
|
/* In the 2+D case, we recursively pick the split dimension, compute the
|
||||||
* median value and partition other values around it. */
|
* median value and partition other values around it. */
|
||||||
private long writeFieldNDims(IndexOutput out, String fieldName, MutablePointsReader reader) throws IOException {
|
private long writeFieldNDims(IndexOutput out, String fieldName, MutablePointValues values) throws IOException {
|
||||||
if (pointCount != 0) {
|
if (pointCount != 0) {
|
||||||
throw new IllegalStateException("cannot mix add and writeField");
|
throw new IllegalStateException("cannot mix add and writeField");
|
||||||
}
|
}
|
||||||
|
@ -446,7 +446,7 @@ public class BKDWriter implements Closeable {
|
||||||
// Mark that we already finished:
|
// Mark that we already finished:
|
||||||
heapPointWriter = null;
|
heapPointWriter = null;
|
||||||
|
|
||||||
long countPerLeaf = pointCount = reader.size(fieldName);
|
long countPerLeaf = pointCount = values.size();
|
||||||
long innerNodeCount = 1;
|
long innerNodeCount = 1;
|
||||||
|
|
||||||
while (countPerLeaf > maxPointsInLeafNode) {
|
while (countPerLeaf > maxPointsInLeafNode) {
|
||||||
|
@ -465,7 +465,7 @@ public class BKDWriter implements Closeable {
|
||||||
Arrays.fill(minPackedValue, (byte) 0xff);
|
Arrays.fill(minPackedValue, (byte) 0xff);
|
||||||
Arrays.fill(maxPackedValue, (byte) 0);
|
Arrays.fill(maxPackedValue, (byte) 0);
|
||||||
for (int i = 0; i < Math.toIntExact(pointCount); ++i) {
|
for (int i = 0; i < Math.toIntExact(pointCount); ++i) {
|
||||||
reader.getValue(i, scratchBytesRef1);
|
values.getValue(i, scratchBytesRef1);
|
||||||
for(int dim=0;dim<numDims;dim++) {
|
for(int dim=0;dim<numDims;dim++) {
|
||||||
int offset = dim*bytesPerDim;
|
int offset = dim*bytesPerDim;
|
||||||
if (StringHelper.compare(bytesPerDim, scratchBytesRef1.bytes, scratchBytesRef1.offset + offset, minPackedValue, offset) < 0) {
|
if (StringHelper.compare(bytesPerDim, scratchBytesRef1.bytes, scratchBytesRef1.offset + offset, minPackedValue, offset) < 0) {
|
||||||
|
@ -476,10 +476,10 @@ public class BKDWriter implements Closeable {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
docsSeen.set(reader.getDocID(i));
|
docsSeen.set(values.getDocID(i));
|
||||||
}
|
}
|
||||||
|
|
||||||
build(1, numLeaves, reader, 0, Math.toIntExact(pointCount), out,
|
build(1, numLeaves, values, 0, Math.toIntExact(pointCount), out,
|
||||||
minPackedValue, maxPackedValue, splitPackedValues, leafBlockFPs,
|
minPackedValue, maxPackedValue, splitPackedValues, leafBlockFPs,
|
||||||
new int[maxPointsInLeafNode]);
|
new int[maxPointsInLeafNode]);
|
||||||
|
|
||||||
|
@ -491,12 +491,12 @@ public class BKDWriter implements Closeable {
|
||||||
|
|
||||||
/* In the 1D case, we can simply sort points in ascending order and use the
|
/* In the 1D case, we can simply sort points in ascending order and use the
|
||||||
* same writing logic as we use at merge time. */
|
* same writing logic as we use at merge time. */
|
||||||
private long writeField1Dim(IndexOutput out, String fieldName, MutablePointsReader reader) throws IOException {
|
private long writeField1Dim(IndexOutput out, String fieldName, MutablePointValues reader) throws IOException {
|
||||||
MutablePointsReaderUtils.sort(maxDoc, packedBytesLength, reader, 0, Math.toIntExact(reader.size(fieldName)));
|
MutablePointsReaderUtils.sort(maxDoc, packedBytesLength, reader, 0, Math.toIntExact(reader.size()));
|
||||||
|
|
||||||
final OneDimensionBKDWriter oneDimWriter = new OneDimensionBKDWriter(out);
|
final OneDimensionBKDWriter oneDimWriter = new OneDimensionBKDWriter(out);
|
||||||
|
|
||||||
reader.intersect(fieldName, new IntersectVisitor() {
|
reader.intersect(new IntersectVisitor() {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void visit(int docID, byte[] packedValue) throws IOException {
|
public void visit(int docID, byte[] packedValue) throws IOException {
|
||||||
|
@ -1238,7 +1238,7 @@ public class BKDWriter implements Closeable {
|
||||||
|
|
||||||
/* Recursively reorders the provided reader and writes the bkd-tree on the fly. */
|
/* Recursively reorders the provided reader and writes the bkd-tree on the fly. */
|
||||||
private void build(int nodeID, int leafNodeOffset,
|
private void build(int nodeID, int leafNodeOffset,
|
||||||
MutablePointsReader reader, int from, int to,
|
MutablePointValues reader, int from, int to,
|
||||||
IndexOutput out,
|
IndexOutput out,
|
||||||
byte[] minPackedValue, byte[] maxPackedValue,
|
byte[] minPackedValue, byte[] maxPackedValue,
|
||||||
byte[] splitPackedValues,
|
byte[] splitPackedValues,
|
||||||
|
|
|
@ -16,7 +16,7 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.lucene.util.bkd;
|
package org.apache.lucene.util.bkd;
|
||||||
|
|
||||||
import org.apache.lucene.codecs.MutablePointsReader;
|
import org.apache.lucene.codecs.MutablePointValues;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.apache.lucene.util.IntroSelector;
|
import org.apache.lucene.util.IntroSelector;
|
||||||
import org.apache.lucene.util.IntroSorter;
|
import org.apache.lucene.util.IntroSorter;
|
||||||
|
@ -30,9 +30,9 @@ final class MutablePointsReaderUtils {
|
||||||
|
|
||||||
MutablePointsReaderUtils() {}
|
MutablePointsReaderUtils() {}
|
||||||
|
|
||||||
/** Sort the given {@link MutablePointsReader} based on its packed value then doc ID. */
|
/** Sort the given {@link MutablePointValues} based on its packed value then doc ID. */
|
||||||
static void sort(int maxDoc, int packedBytesLength,
|
static void sort(int maxDoc, int packedBytesLength,
|
||||||
MutablePointsReader reader, int from, int to) {
|
MutablePointValues reader, int from, int to) {
|
||||||
final int bitsPerDocId = PackedInts.bitsRequired(maxDoc - 1);
|
final int bitsPerDocId = PackedInts.bitsRequired(maxDoc - 1);
|
||||||
new MSBRadixSorter(packedBytesLength + (bitsPerDocId + 7) / 8) {
|
new MSBRadixSorter(packedBytesLength + (bitsPerDocId + 7) / 8) {
|
||||||
|
|
||||||
|
@ -89,7 +89,7 @@ final class MutablePointsReaderUtils {
|
||||||
|
|
||||||
/** Sort points on the given dimension. */
|
/** Sort points on the given dimension. */
|
||||||
static void sortByDim(int sortedDim, int bytesPerDim, int[] commonPrefixLengths,
|
static void sortByDim(int sortedDim, int bytesPerDim, int[] commonPrefixLengths,
|
||||||
MutablePointsReader reader, int from, int to,
|
MutablePointValues reader, int from, int to,
|
||||||
BytesRef scratch1, BytesRef scratch2) {
|
BytesRef scratch1, BytesRef scratch2) {
|
||||||
|
|
||||||
// No need for a fancy radix sort here, this is called on the leaves only so
|
// No need for a fancy radix sort here, this is called on the leaves only so
|
||||||
|
@ -128,7 +128,7 @@ final class MutablePointsReaderUtils {
|
||||||
* than or equal to it and all values on the right must be greater than or
|
* than or equal to it and all values on the right must be greater than or
|
||||||
* equal to it. */
|
* equal to it. */
|
||||||
static void partition(int maxDoc, int splitDim, int bytesPerDim, int commonPrefixLen,
|
static void partition(int maxDoc, int splitDim, int bytesPerDim, int commonPrefixLen,
|
||||||
MutablePointsReader reader, int from, int to, int mid,
|
MutablePointValues reader, int from, int to, int mid,
|
||||||
BytesRef scratch1, BytesRef scratch2) {
|
BytesRef scratch1, BytesRef scratch2) {
|
||||||
final int offset = splitDim * bytesPerDim + commonPrefixLen;
|
final int offset = splitDim * bytesPerDim + commonPrefixLen;
|
||||||
final int cmpBytes = bytesPerDim - commonPrefixLen;
|
final int cmpBytes = bytesPerDim - commonPrefixLen;
|
||||||
|
|
|
@ -78,7 +78,7 @@ public class Test2BPoints extends LuceneTestCase {
|
||||||
DirectoryReader r = DirectoryReader.open(w);
|
DirectoryReader r = DirectoryReader.open(w);
|
||||||
IndexSearcher s = new IndexSearcher(r);
|
IndexSearcher s = new IndexSearcher(r);
|
||||||
assertEquals(numDocs, s.count(LongPoint.newRangeQuery("long", Long.MIN_VALUE, Long.MAX_VALUE)));
|
assertEquals(numDocs, s.count(LongPoint.newRangeQuery("long", Long.MIN_VALUE, Long.MAX_VALUE)));
|
||||||
assertTrue(r.leaves().get(0).reader().getPointValues().size("long") > Integer.MAX_VALUE);
|
assertTrue(r.leaves().get(0).reader().getPointValues("long").size() > Integer.MAX_VALUE);
|
||||||
r.close();
|
r.close();
|
||||||
w.close();
|
w.close();
|
||||||
System.out.println("TEST: now CheckIndex");
|
System.out.println("TEST: now CheckIndex");
|
||||||
|
@ -126,7 +126,7 @@ public class Test2BPoints extends LuceneTestCase {
|
||||||
DirectoryReader r = DirectoryReader.open(w);
|
DirectoryReader r = DirectoryReader.open(w);
|
||||||
IndexSearcher s = new IndexSearcher(r);
|
IndexSearcher s = new IndexSearcher(r);
|
||||||
assertEquals(numDocs, s.count(LongPoint.newRangeQuery("long", new long[] {Long.MIN_VALUE, Long.MIN_VALUE}, new long[] {Long.MAX_VALUE, Long.MAX_VALUE})));
|
assertEquals(numDocs, s.count(LongPoint.newRangeQuery("long", new long[] {Long.MIN_VALUE, Long.MIN_VALUE}, new long[] {Long.MAX_VALUE, Long.MAX_VALUE})));
|
||||||
assertTrue(r.leaves().get(0).reader().getPointValues().size("long") > Integer.MAX_VALUE);
|
assertTrue(r.leaves().get(0).reader().getPointValues("long").size() > Integer.MAX_VALUE);
|
||||||
r.close();
|
r.close();
|
||||||
w.close();
|
w.close();
|
||||||
System.out.println("TEST: now CheckIndex");
|
System.out.println("TEST: now CheckIndex");
|
||||||
|
|
|
@ -503,8 +503,8 @@ public class TestPointValues extends LuceneTestCase {
|
||||||
IndexReader r = DirectoryReader.open(w);
|
IndexReader r = DirectoryReader.open(w);
|
||||||
|
|
||||||
for(LeafReaderContext ctx : r.leaves()) {
|
for(LeafReaderContext ctx : r.leaves()) {
|
||||||
PointValues points = ctx.reader().getPointValues();
|
PointValues points = ctx.reader().getPointValues("int");
|
||||||
points.intersect("int",
|
points.intersect(
|
||||||
new IntersectVisitor() {
|
new IntersectVisitor() {
|
||||||
|
|
||||||
int lastDocID = -1;
|
int lastDocID = -1;
|
||||||
|
@ -553,8 +553,7 @@ public class TestPointValues extends LuceneTestCase {
|
||||||
|
|
||||||
w.forceMerge(1);
|
w.forceMerge(1);
|
||||||
DirectoryReader r = w.getReader();
|
DirectoryReader r = w.getReader();
|
||||||
assertEquals(0, r.leaves().get(0).reader().getPointValues().size("int"));
|
assertNull(r.leaves().get(0).reader().getPointValues("int"));
|
||||||
assertEquals(0, r.leaves().get(0).reader().getPointValues().getDocCount("int"));
|
|
||||||
w.close();
|
w.close();
|
||||||
r.close();
|
r.close();
|
||||||
dir.close();
|
dir.close();
|
||||||
|
@ -611,10 +610,10 @@ public class TestPointValues extends LuceneTestCase {
|
||||||
int size = 0;
|
int size = 0;
|
||||||
String fieldName = "int" + field;
|
String fieldName = "int" + field;
|
||||||
for(LeafReaderContext ctx : r.leaves()) {
|
for(LeafReaderContext ctx : r.leaves()) {
|
||||||
PointValues points = ctx.reader().getPointValues();
|
PointValues points = ctx.reader().getPointValues(fieldName);
|
||||||
if (ctx.reader().getFieldInfos().fieldInfo(fieldName) != null) {
|
if (points != null) {
|
||||||
docCount += points.getDocCount(fieldName);
|
docCount += points.getDocCount();
|
||||||
size += points.size(fieldName);
|
size += points.size();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
assertEquals(fieldDocCounts[field], docCount);
|
assertEquals(fieldDocCounts[field], docCount);
|
||||||
|
@ -738,7 +737,7 @@ public class TestPointValues extends LuceneTestCase {
|
||||||
final IndexReader reader1 = DirectoryReader.open(w);
|
final IndexReader reader1 = DirectoryReader.open(w);
|
||||||
w.forceMerge(1);
|
w.forceMerge(1);
|
||||||
final IndexReader reader2 = DirectoryReader.open(w);
|
final IndexReader reader2 = DirectoryReader.open(w);
|
||||||
final PointValues expected = getOnlyLeafReader(reader2).getPointValues();
|
final PointValues expected = getOnlyLeafReader(reader2).getPointValues("field");
|
||||||
if (expected == null) {
|
if (expected == null) {
|
||||||
assertNull(PointValues.getMinPackedValue(reader1, "field"));
|
assertNull(PointValues.getMinPackedValue(reader1, "field"));
|
||||||
assertNull(PointValues.getMaxPackedValue(reader1, "field"));
|
assertNull(PointValues.getMaxPackedValue(reader1, "field"));
|
||||||
|
@ -746,13 +745,13 @@ public class TestPointValues extends LuceneTestCase {
|
||||||
assertEquals(0, PointValues.size(reader1, "field"));
|
assertEquals(0, PointValues.size(reader1, "field"));
|
||||||
} else {
|
} else {
|
||||||
assertArrayEquals(
|
assertArrayEquals(
|
||||||
expected.getMinPackedValue("field"),
|
expected.getMinPackedValue(),
|
||||||
PointValues.getMinPackedValue(reader1, "field"));
|
PointValues.getMinPackedValue(reader1, "field"));
|
||||||
assertArrayEquals(
|
assertArrayEquals(
|
||||||
expected.getMaxPackedValue("field"),
|
expected.getMaxPackedValue(),
|
||||||
PointValues.getMaxPackedValue(reader1, "field"));
|
PointValues.getMaxPackedValue(reader1, "field"));
|
||||||
assertEquals(expected.getDocCount("field"), PointValues.getDocCount(reader1, "field"));
|
assertEquals(expected.getDocCount(), PointValues.getDocCount(reader1, "field"));
|
||||||
assertEquals(expected.size("field"), PointValues.size(reader1, "field"));
|
assertEquals(expected.size(), PointValues.size(reader1, "field"));
|
||||||
}
|
}
|
||||||
IOUtils.close(w, reader1, reader2, dir);
|
IOUtils.close(w, reader1, reader2, dir);
|
||||||
}
|
}
|
||||||
|
|
|
@ -307,37 +307,37 @@ public class TestDocIdSetBuilder extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void intersect(String fieldName, IntersectVisitor visitor) throws IOException {
|
public void intersect(IntersectVisitor visitor) throws IOException {
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public byte[] getMinPackedValue(String fieldName) throws IOException {
|
public byte[] getMinPackedValue() throws IOException {
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public byte[] getMaxPackedValue(String fieldName) throws IOException {
|
public byte[] getMaxPackedValue() throws IOException {
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int getNumDimensions(String fieldName) throws IOException {
|
public int getNumDimensions() throws IOException {
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int getBytesPerDimension(String fieldName) throws IOException {
|
public int getBytesPerDimension() throws IOException {
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long size(String fieldName) {
|
public long size() {
|
||||||
return numPoints;
|
return numPoints;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int getDocCount(String fieldName) {
|
public int getDocCount() {
|
||||||
return docCount;
|
return docCount;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -20,7 +20,7 @@ import java.io.IOException;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Comparator;
|
import java.util.Comparator;
|
||||||
|
|
||||||
import org.apache.lucene.codecs.MutablePointsReader;
|
import org.apache.lucene.codecs.MutablePointValues;
|
||||||
import org.apache.lucene.util.ArrayUtil;
|
import org.apache.lucene.util.ArrayUtil;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
@ -184,7 +184,7 @@ public class TestMutablePointsReaderUtils extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static class DummyPointsReader extends MutablePointsReader {
|
private static class DummyPointsReader extends MutablePointValues {
|
||||||
|
|
||||||
private final Point[] points;
|
private final Point[] points;
|
||||||
|
|
||||||
|
@ -192,16 +192,6 @@ public class TestMutablePointsReaderUtils extends LuceneTestCase {
|
||||||
this.points = points.clone();
|
this.points = points.clone();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public void close() throws IOException {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long ramBytesUsed() {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void getValue(int i, BytesRef packedValue) {
|
public void getValue(int i, BytesRef packedValue) {
|
||||||
packedValue.bytes = points[i].packedValue.bytes;
|
packedValue.bytes = points[i].packedValue.bytes;
|
||||||
|
@ -226,42 +216,37 @@ public class TestMutablePointsReaderUtils extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void checkIntegrity() throws IOException {
|
public void intersect(IntersectVisitor visitor) throws IOException {
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void intersect(String fieldName, IntersectVisitor visitor) throws IOException {
|
public byte[] getMinPackedValue() throws IOException {
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public byte[] getMinPackedValue(String fieldName) throws IOException {
|
public byte[] getMaxPackedValue() throws IOException {
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public byte[] getMaxPackedValue(String fieldName) throws IOException {
|
public int getNumDimensions() throws IOException {
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int getNumDimensions(String fieldName) throws IOException {
|
public int getBytesPerDimension() throws IOException {
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int getBytesPerDimension(String fieldName) throws IOException {
|
public long size() {
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long size(String fieldName) {
|
public int getDocCount() {
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int getDocCount(String fieldName) {
|
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -144,7 +144,7 @@ public class TermVectorLeafReader extends LeafReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public PointValues getPointValues() {
|
public PointValues getPointValues(String fieldName) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -140,10 +140,6 @@ abstract class PointInSetIncludingScoreQuery extends Query {
|
||||||
@Override
|
@Override
|
||||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||||
LeafReader reader = context.reader();
|
LeafReader reader = context.reader();
|
||||||
PointValues values = reader.getPointValues();
|
|
||||||
if (values == null) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
FieldInfo fieldInfo = reader.getFieldInfos().fieldInfo(field);
|
FieldInfo fieldInfo = reader.getFieldInfos().fieldInfo(field);
|
||||||
if (fieldInfo == null) {
|
if (fieldInfo == null) {
|
||||||
return null;
|
return null;
|
||||||
|
@ -154,10 +150,14 @@ abstract class PointInSetIncludingScoreQuery extends Query {
|
||||||
if (fieldInfo.getPointNumBytes() != bytesPerDim) {
|
if (fieldInfo.getPointNumBytes() != bytesPerDim) {
|
||||||
throw new IllegalArgumentException("field=\"" + field + "\" was indexed with bytesPerDim=" + fieldInfo.getPointNumBytes() + " but this query has bytesPerDim=" + bytesPerDim);
|
throw new IllegalArgumentException("field=\"" + field + "\" was indexed with bytesPerDim=" + fieldInfo.getPointNumBytes() + " but this query has bytesPerDim=" + bytesPerDim);
|
||||||
}
|
}
|
||||||
|
PointValues values = reader.getPointValues(field);
|
||||||
|
if (values == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
FixedBitSet result = new FixedBitSet(reader.maxDoc());
|
FixedBitSet result = new FixedBitSet(reader.maxDoc());
|
||||||
float[] scores = new float[reader.maxDoc()];
|
float[] scores = new float[reader.maxDoc()];
|
||||||
values.intersect(field, new MergePointVisitor(sortedPackedPoints, result, scores));
|
values.intersect(new MergePointVisitor(sortedPackedPoints, result, scores));
|
||||||
return new Scorer(this) {
|
return new Scorer(this) {
|
||||||
|
|
||||||
DocIdSetIterator disi = new BitSetIterator(result, 10L);
|
DocIdSetIterator disi = new BitSetIterator(result, 10L);
|
||||||
|
|
|
@ -22,6 +22,7 @@ import java.util.Collection;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.Objects;
|
||||||
import java.util.SortedMap;
|
import java.util.SortedMap;
|
||||||
import java.util.TreeMap;
|
import java.util.TreeMap;
|
||||||
|
|
||||||
|
@ -1065,22 +1066,12 @@ public class MemoryIndex {
|
||||||
*/
|
*/
|
||||||
private final class MemoryIndexReader extends LeafReader {
|
private final class MemoryIndexReader extends LeafReader {
|
||||||
|
|
||||||
private final PointValues pointValues;
|
|
||||||
private Fields memoryFields = new MemoryFields(fields);
|
private Fields memoryFields = new MemoryFields(fields);
|
||||||
|
|
||||||
private MemoryIndexReader() {
|
private MemoryIndexReader() {
|
||||||
super(); // avoid as much superclass baggage as possible
|
super(); // avoid as much superclass baggage as possible
|
||||||
boolean hasPointValues = false;
|
|
||||||
for (Info info : fields.values()) {
|
for (Info info : fields.values()) {
|
||||||
info.prepareDocValuesAndPointValues();
|
info.prepareDocValuesAndPointValues();
|
||||||
if (info.pointValues != null) {
|
|
||||||
hasPointValues = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (hasPointValues) {
|
|
||||||
pointValues = new MemoryIndexPointValues();
|
|
||||||
} else {
|
|
||||||
pointValues = null;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1198,8 +1189,12 @@ public class MemoryIndex {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public PointValues getPointValues() {
|
public PointValues getPointValues(String fieldName) {
|
||||||
return pointValues;
|
Info info = fields.get(fieldName);
|
||||||
|
if (info.pointValues == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return new MemoryIndexPointValues(info);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -1504,16 +1499,15 @@ public class MemoryIndex {
|
||||||
|
|
||||||
private class MemoryIndexPointValues extends PointValues {
|
private class MemoryIndexPointValues extends PointValues {
|
||||||
|
|
||||||
|
final Info info;
|
||||||
|
|
||||||
|
MemoryIndexPointValues(Info info) {
|
||||||
|
this.info = Objects.requireNonNull(info);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void intersect(String fieldName, IntersectVisitor visitor) throws IOException {
|
public void intersect(IntersectVisitor visitor) throws IOException {
|
||||||
Info info = fields.get(fieldName);
|
|
||||||
if (info == null) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
BytesRef[] values = info.pointValues;
|
BytesRef[] values = info.pointValues;
|
||||||
if (values == null) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
visitor.grow(info.pointValuesCount);
|
visitor.grow(info.pointValuesCount);
|
||||||
for (int i = 0; i < info.pointValuesCount; i++) {
|
for (int i = 0; i < info.pointValuesCount; i++) {
|
||||||
|
@ -1522,11 +1516,7 @@ public class MemoryIndex {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public byte[] getMinPackedValue(String fieldName) throws IOException {
|
public byte[] getMinPackedValue() throws IOException {
|
||||||
Info info = fields.get(fieldName);
|
|
||||||
if (info == null) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
BytesRef[] values = info.pointValues;
|
BytesRef[] values = info.pointValues;
|
||||||
if (values != null) {
|
if (values != null) {
|
||||||
return info.minPackedValue;
|
return info.minPackedValue;
|
||||||
|
@ -1536,63 +1526,28 @@ public class MemoryIndex {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public byte[] getMaxPackedValue(String fieldName) throws IOException {
|
public byte[] getMaxPackedValue() throws IOException {
|
||||||
Info info = fields.get(fieldName);
|
|
||||||
if (info == null) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
BytesRef[] values = info.pointValues;
|
|
||||||
if (values != null) {
|
|
||||||
return info.maxPackedValue;
|
return info.maxPackedValue;
|
||||||
} else {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int getNumDimensions(String fieldName) throws IOException {
|
public int getNumDimensions() throws IOException {
|
||||||
Info info = fields.get(fieldName);
|
|
||||||
if (info == null){
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
return info.fieldInfo.getPointDimensionCount();
|
return info.fieldInfo.getPointDimensionCount();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int getBytesPerDimension(String fieldName) throws IOException {
|
public int getBytesPerDimension() throws IOException {
|
||||||
Info info = fields.get(fieldName);
|
|
||||||
if (info == null){
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
return info.fieldInfo.getPointNumBytes();
|
return info.fieldInfo.getPointNumBytes();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long size(String fieldName) {
|
public long size() {
|
||||||
Info info = fields.get(fieldName);
|
|
||||||
if (info == null) {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
BytesRef[] values = info.pointValues;
|
|
||||||
if (values != null) {
|
|
||||||
return info.pointValuesCount;
|
return info.pointValuesCount;
|
||||||
} else {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int getDocCount(String fieldName) {
|
public int getDocCount() {
|
||||||
Info info = fields.get(fieldName);
|
|
||||||
if (info == null) {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
BytesRef[] values = info.pointValues;
|
|
||||||
if (values != null) {
|
|
||||||
return 1;
|
return 1;
|
||||||
} else {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -479,9 +479,9 @@ public class TestMemoryIndex extends LuceneTestCase {
|
||||||
MemoryIndex mi = MemoryIndex.fromDocument(doc, analyzer);
|
MemoryIndex mi = MemoryIndex.fromDocument(doc, analyzer);
|
||||||
LeafReader leafReader = mi.createSearcher().getIndexReader().leaves().get(0).reader();
|
LeafReader leafReader = mi.createSearcher().getIndexReader().leaves().get(0).reader();
|
||||||
|
|
||||||
assertEquals(1, leafReader.getPointValues().size("field"));
|
assertEquals(1, leafReader.getPointValues("field").size());
|
||||||
assertArrayEquals(packedPoint, leafReader.getPointValues().getMinPackedValue("field"));
|
assertArrayEquals(packedPoint, leafReader.getPointValues("field").getMinPackedValue());
|
||||||
assertArrayEquals(packedPoint, leafReader.getPointValues().getMaxPackedValue("field"));
|
assertArrayEquals(packedPoint, leafReader.getPointValues("field").getMaxPackedValue());
|
||||||
|
|
||||||
BinaryDocValues dvs = leafReader.getBinaryDocValues("field");
|
BinaryDocValues dvs = leafReader.getBinaryDocValues("field");
|
||||||
assertEquals(0, dvs.nextDoc());
|
assertEquals(0, dvs.nextDoc());
|
||||||
|
|
|
@ -21,7 +21,6 @@ import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.lucene.codecs.lucene60.Lucene60PointsFormat;
|
import org.apache.lucene.codecs.lucene60.Lucene60PointsFormat;
|
||||||
import org.apache.lucene.codecs.lucene60.Lucene60PointsReader;
|
|
||||||
import org.apache.lucene.geo.GeoUtils;
|
import org.apache.lucene.geo.GeoUtils;
|
||||||
import org.apache.lucene.geo.Polygon;
|
import org.apache.lucene.geo.Polygon;
|
||||||
import org.apache.lucene.index.FieldInfo;
|
import org.apache.lucene.index.FieldInfo;
|
||||||
|
@ -307,13 +306,13 @@ public class LatLonPoint extends Field {
|
||||||
List<Bits> liveDocs = new ArrayList<>();
|
List<Bits> liveDocs = new ArrayList<>();
|
||||||
int totalHits = 0;
|
int totalHits = 0;
|
||||||
for(LeafReaderContext leaf : searcher.getIndexReader().leaves()) {
|
for(LeafReaderContext leaf : searcher.getIndexReader().leaves()) {
|
||||||
PointValues points = leaf.reader().getPointValues();
|
PointValues points = leaf.reader().getPointValues(field);
|
||||||
if (points != null) {
|
if (points != null) {
|
||||||
if (points instanceof Lucene60PointsReader == false) {
|
if (points instanceof BKDReader == false) {
|
||||||
throw new IllegalArgumentException("can only run on Lucene60PointsReader points implementation, but got " + points);
|
throw new IllegalArgumentException("can only run on Lucene60PointsReader points implementation, but got " + points);
|
||||||
}
|
}
|
||||||
totalHits += points.getDocCount(field);
|
totalHits += points.getDocCount();
|
||||||
BKDReader reader = ((Lucene60PointsReader) points).getBKDReader(field);
|
BKDReader reader = (BKDReader) points;
|
||||||
if (reader != null) {
|
if (reader != null) {
|
||||||
readers.add(reader);
|
readers.add(reader);
|
||||||
docBases.add(leaf.docBase);
|
docBases.add(leaf.docBase);
|
||||||
|
|
|
@ -105,7 +105,7 @@ final class LatLonPointDistanceQuery extends Query {
|
||||||
@Override
|
@Override
|
||||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||||
LeafReader reader = context.reader();
|
LeafReader reader = context.reader();
|
||||||
PointValues values = reader.getPointValues();
|
PointValues values = reader.getPointValues(field);
|
||||||
if (values == null) {
|
if (values == null) {
|
||||||
// No docs in this segment had any points fields
|
// No docs in this segment had any points fields
|
||||||
return null;
|
return null;
|
||||||
|
@ -120,7 +120,7 @@ final class LatLonPointDistanceQuery extends Query {
|
||||||
// matching docids
|
// matching docids
|
||||||
DocIdSetBuilder result = new DocIdSetBuilder(reader.maxDoc(), values, field);
|
DocIdSetBuilder result = new DocIdSetBuilder(reader.maxDoc(), values, field);
|
||||||
|
|
||||||
values.intersect(field,
|
values.intersect(
|
||||||
new IntersectVisitor() {
|
new IntersectVisitor() {
|
||||||
|
|
||||||
DocIdSetBuilder.BulkAdder adder;
|
DocIdSetBuilder.BulkAdder adder;
|
||||||
|
|
|
@ -98,7 +98,7 @@ final class LatLonPointInPolygonQuery extends Query {
|
||||||
@Override
|
@Override
|
||||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||||
LeafReader reader = context.reader();
|
LeafReader reader = context.reader();
|
||||||
PointValues values = reader.getPointValues();
|
PointValues values = reader.getPointValues(field);
|
||||||
if (values == null) {
|
if (values == null) {
|
||||||
// No docs in this segment had any points fields
|
// No docs in this segment had any points fields
|
||||||
return null;
|
return null;
|
||||||
|
@ -113,7 +113,7 @@ final class LatLonPointInPolygonQuery extends Query {
|
||||||
// matching docids
|
// matching docids
|
||||||
DocIdSetBuilder result = new DocIdSetBuilder(reader.maxDoc(), values, field);
|
DocIdSetBuilder result = new DocIdSetBuilder(reader.maxDoc(), values, field);
|
||||||
|
|
||||||
values.intersect(field,
|
values.intersect(
|
||||||
new IntersectVisitor() {
|
new IntersectVisitor() {
|
||||||
|
|
||||||
DocIdSetBuilder.BulkAdder adder;
|
DocIdSetBuilder.BulkAdder adder;
|
||||||
|
|
|
@ -110,7 +110,7 @@ abstract class RangeFieldQuery extends Query {
|
||||||
final RangeFieldComparator comparator = new RangeFieldComparator();
|
final RangeFieldComparator comparator = new RangeFieldComparator();
|
||||||
private DocIdSet buildMatchingDocIdSet(LeafReader reader, PointValues values) throws IOException {
|
private DocIdSet buildMatchingDocIdSet(LeafReader reader, PointValues values) throws IOException {
|
||||||
DocIdSetBuilder result = new DocIdSetBuilder(reader.maxDoc(), values, field);
|
DocIdSetBuilder result = new DocIdSetBuilder(reader.maxDoc(), values, field);
|
||||||
values.intersect(field,
|
values.intersect(
|
||||||
new IntersectVisitor() {
|
new IntersectVisitor() {
|
||||||
DocIdSetBuilder.BulkAdder adder;
|
DocIdSetBuilder.BulkAdder adder;
|
||||||
@Override
|
@Override
|
||||||
|
@ -157,7 +157,7 @@ abstract class RangeFieldQuery extends Query {
|
||||||
@Override
|
@Override
|
||||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||||
LeafReader reader = context.reader();
|
LeafReader reader = context.reader();
|
||||||
PointValues values = reader.getPointValues();
|
PointValues values = reader.getPointValues(field);
|
||||||
if (values == null) {
|
if (values == null) {
|
||||||
// no docs in this segment indexed any ranges
|
// no docs in this segment indexed any ranges
|
||||||
return null;
|
return null;
|
||||||
|
@ -168,9 +168,9 @@ abstract class RangeFieldQuery extends Query {
|
||||||
}
|
}
|
||||||
checkFieldInfo(fieldInfo);
|
checkFieldInfo(fieldInfo);
|
||||||
boolean allDocsMatch = true;
|
boolean allDocsMatch = true;
|
||||||
if (values.getDocCount(field) == reader.maxDoc()) {
|
if (values.getDocCount() == reader.maxDoc()) {
|
||||||
// if query crosses, docs need to be further scrutinized
|
// if query crosses, docs need to be further scrutinized
|
||||||
byte[] range = getInternalRange(values.getMinPackedValue(field), values.getMaxPackedValue(field));
|
byte[] range = getInternalRange(values.getMinPackedValue(), values.getMaxPackedValue());
|
||||||
// if the internal node is not equal and not contained by the query, all docs do not match
|
// if the internal node is not equal and not contained by the query, all docs do not match
|
||||||
if ((!Arrays.equals(ranges, range)
|
if ((!Arrays.equals(ranges, range)
|
||||||
&& (comparator.contains(range) && queryType != QueryType.CONTAINS)) == false) {
|
&& (comparator.contains(range) && queryType != QueryType.CONTAINS)) == false) {
|
||||||
|
|
|
@ -70,7 +70,7 @@ final class PointInGeo3DShapeQuery extends Query {
|
||||||
@Override
|
@Override
|
||||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||||
LeafReader reader = context.reader();
|
LeafReader reader = context.reader();
|
||||||
PointValues values = reader.getPointValues();
|
PointValues values = reader.getPointValues(field);
|
||||||
if (values == null) {
|
if (values == null) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -99,7 +99,7 @@ final class PointInGeo3DShapeQuery extends Query {
|
||||||
|
|
||||||
DocIdSetBuilder result = new DocIdSetBuilder(reader.maxDoc(), values, field);
|
DocIdSetBuilder result = new DocIdSetBuilder(reader.maxDoc(), values, field);
|
||||||
|
|
||||||
values.intersect(field, new PointInShapeIntersectVisitor(result, shape, shapeBounds));
|
values.intersect(new PointInShapeIntersectVisitor(result, shape, shapeBounds));
|
||||||
|
|
||||||
return new ConstantScoreScorer(this, score(), result.build().iterator());
|
return new ConstantScoreScorer(this, score(), result.build().iterator());
|
||||||
}
|
}
|
||||||
|
|
|
@ -1486,11 +1486,11 @@ public class TestGeo3DPoint extends LuceneTestCase {
|
||||||
docID - reader.leaves().get(subIndex).docBase, 3, Integer.BYTES, b);
|
docID - reader.leaves().get(subIndex).docBase, 3, Integer.BYTES, b);
|
||||||
|
|
||||||
// Do first phase, where we just figure out the "path" that leads to the target docID:
|
// Do first phase, where we just figure out the "path" that leads to the target docID:
|
||||||
leafReader.getPointValues().intersect(fieldName, visitor);
|
leafReader.getPointValues(fieldName).intersect(visitor);
|
||||||
|
|
||||||
// Do second phase, where we we see how the wrapped visitor responded along that path:
|
// Do second phase, where we we see how the wrapped visitor responded along that path:
|
||||||
visitor.startSecondPhase();
|
visitor.startSecondPhase();
|
||||||
leafReader.getPointValues().intersect(fieldName, visitor);
|
leafReader.getPointValues(fieldName).intersect(visitor);
|
||||||
|
|
||||||
return b.toString();
|
return b.toString();
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,16 +22,13 @@ import java.util.Collection;
|
||||||
import org.apache.lucene.codecs.PointsFormat;
|
import org.apache.lucene.codecs.PointsFormat;
|
||||||
import org.apache.lucene.codecs.PointsReader;
|
import org.apache.lucene.codecs.PointsReader;
|
||||||
import org.apache.lucene.codecs.PointsWriter;
|
import org.apache.lucene.codecs.PointsWriter;
|
||||||
|
import org.apache.lucene.index.AssertingLeafReader;
|
||||||
import org.apache.lucene.index.FieldInfo;
|
import org.apache.lucene.index.FieldInfo;
|
||||||
import org.apache.lucene.index.MergeState;
|
import org.apache.lucene.index.MergeState;
|
||||||
import org.apache.lucene.index.PointValues.IntersectVisitor;
|
|
||||||
import org.apache.lucene.index.PointValues.Relation;
|
|
||||||
import org.apache.lucene.index.PointValues;
|
import org.apache.lucene.index.PointValues;
|
||||||
import org.apache.lucene.index.SegmentReadState;
|
import org.apache.lucene.index.SegmentReadState;
|
||||||
import org.apache.lucene.index.SegmentWriteState;
|
import org.apache.lucene.index.SegmentWriteState;
|
||||||
import org.apache.lucene.util.Accountable;
|
import org.apache.lucene.util.Accountable;
|
||||||
import org.apache.lucene.util.BytesRef;
|
|
||||||
import org.apache.lucene.util.StringHelper;
|
|
||||||
import org.apache.lucene.util.TestUtil;
|
import org.apache.lucene.util.TestUtil;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -66,88 +63,6 @@ public final class AssertingPointsFormat extends PointsFormat {
|
||||||
return new AssertingPointsReader(state.segmentInfo.maxDoc(), in.fieldsReader(state));
|
return new AssertingPointsReader(state.segmentInfo.maxDoc(), in.fieldsReader(state));
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Validates in the 1D case that all points are visited in order, and point values are in bounds of the last cell checked */
|
|
||||||
static class AssertingIntersectVisitor implements IntersectVisitor {
|
|
||||||
final IntersectVisitor in;
|
|
||||||
final int numDims;
|
|
||||||
final int bytesPerDim;
|
|
||||||
final byte[] lastDocValue;
|
|
||||||
final byte[] lastMinPackedValue;
|
|
||||||
final byte[] lastMaxPackedValue;
|
|
||||||
private Relation lastCompareResult;
|
|
||||||
private int lastDocID = -1;
|
|
||||||
private int docBudget;
|
|
||||||
|
|
||||||
public AssertingIntersectVisitor(int numDims, int bytesPerDim, IntersectVisitor in) {
|
|
||||||
this.in = in;
|
|
||||||
this.numDims = numDims;
|
|
||||||
this.bytesPerDim = bytesPerDim;
|
|
||||||
lastMaxPackedValue = new byte[numDims*bytesPerDim];
|
|
||||||
lastMinPackedValue = new byte[numDims*bytesPerDim];
|
|
||||||
if (numDims == 1) {
|
|
||||||
lastDocValue = new byte[bytesPerDim];
|
|
||||||
} else {
|
|
||||||
lastDocValue = null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void visit(int docID) throws IOException {
|
|
||||||
assert --docBudget >= 0 : "called add() more times than the last call to grow() reserved";
|
|
||||||
|
|
||||||
// This method, not filtering each hit, should only be invoked when the cell is inside the query shape:
|
|
||||||
assert lastCompareResult == Relation.CELL_INSIDE_QUERY;
|
|
||||||
in.visit(docID);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void visit(int docID, byte[] packedValue) throws IOException {
|
|
||||||
assert --docBudget >= 0 : "called add() more times than the last call to grow() reserved";
|
|
||||||
|
|
||||||
// This method, to filter each doc's value, should only be invoked when the cell crosses the query shape:
|
|
||||||
assert lastCompareResult == PointValues.Relation.CELL_CROSSES_QUERY;
|
|
||||||
|
|
||||||
// This doc's packed value should be contained in the last cell passed to compare:
|
|
||||||
for(int dim=0;dim<numDims;dim++) {
|
|
||||||
assert StringHelper.compare(bytesPerDim, lastMinPackedValue, dim*bytesPerDim, packedValue, dim*bytesPerDim) <= 0: "dim=" + dim + " of " + numDims + " value=" + new BytesRef(packedValue);
|
|
||||||
assert StringHelper.compare(bytesPerDim, lastMaxPackedValue, dim*bytesPerDim, packedValue, dim*bytesPerDim) >= 0: "dim=" + dim + " of " + numDims + " value=" + new BytesRef(packedValue);
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: we should assert that this "matches" whatever relation the last call to compare had returned
|
|
||||||
assert packedValue.length == numDims * bytesPerDim;
|
|
||||||
if (numDims == 1) {
|
|
||||||
int cmp = StringHelper.compare(bytesPerDim, lastDocValue, 0, packedValue, 0);
|
|
||||||
if (cmp < 0) {
|
|
||||||
// ok
|
|
||||||
} else if (cmp == 0) {
|
|
||||||
assert lastDocID <= docID: "doc ids are out of order when point values are the same!";
|
|
||||||
} else {
|
|
||||||
// out of order!
|
|
||||||
assert false: "point values are out of order";
|
|
||||||
}
|
|
||||||
System.arraycopy(packedValue, 0, lastDocValue, 0, bytesPerDim);
|
|
||||||
lastDocID = docID;
|
|
||||||
}
|
|
||||||
in.visit(docID, packedValue);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void grow(int count) {
|
|
||||||
in.grow(count);
|
|
||||||
docBudget = count;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) {
|
|
||||||
for(int dim=0;dim<numDims;dim++) {
|
|
||||||
assert StringHelper.compare(bytesPerDim, minPackedValue, dim*bytesPerDim, maxPackedValue, dim*bytesPerDim) <= 0;
|
|
||||||
}
|
|
||||||
System.arraycopy(maxPackedValue, 0, lastMaxPackedValue, 0, numDims*bytesPerDim);
|
|
||||||
System.arraycopy(minPackedValue, 0, lastMinPackedValue, 0, numDims*bytesPerDim);
|
|
||||||
lastCompareResult = in.compare(minPackedValue, maxPackedValue);
|
|
||||||
return lastCompareResult;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static class AssertingPointsReader extends PointsReader {
|
static class AssertingPointsReader extends PointsReader {
|
||||||
private final PointsReader in;
|
private final PointsReader in;
|
||||||
|
@ -169,9 +84,12 @@ public final class AssertingPointsFormat extends PointsFormat {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void intersect(String fieldName, IntersectVisitor visitor) throws IOException {
|
public PointValues getValues(String field) throws IOException {
|
||||||
in.intersect(fieldName,
|
PointValues values = this.in.getValues(field);
|
||||||
new AssertingIntersectVisitor(in.getNumDimensions(fieldName), in.getBytesPerDimension(fieldName), visitor));
|
if (values == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return new AssertingLeafReader.AssertingPointValues(values, maxDoc);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -202,49 +120,6 @@ public final class AssertingPointsFormat extends PointsFormat {
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return getClass().getSimpleName() + "(" + in.toString() + ")";
|
return getClass().getSimpleName() + "(" + in.toString() + ")";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public byte[] getMinPackedValue(String fieldName) throws IOException {
|
|
||||||
assertStats(fieldName);
|
|
||||||
return in.getMinPackedValue(fieldName);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public byte[] getMaxPackedValue(String fieldName) throws IOException {
|
|
||||||
assertStats(fieldName);
|
|
||||||
return in.getMaxPackedValue(fieldName);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int getNumDimensions(String fieldName) throws IOException {
|
|
||||||
assertStats(fieldName);
|
|
||||||
return in.getNumDimensions(fieldName);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int getBytesPerDimension(String fieldName) throws IOException {
|
|
||||||
assertStats(fieldName);
|
|
||||||
return in.getBytesPerDimension(fieldName);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long size(String fieldName) {
|
|
||||||
assertStats(fieldName);
|
|
||||||
return in.size(fieldName);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int getDocCount(String fieldName) {
|
|
||||||
assertStats(fieldName);
|
|
||||||
return in.getDocCount(fieldName);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void assertStats(String fieldName) {
|
|
||||||
assert in.size(fieldName) >= 0;
|
|
||||||
assert in.getDocCount(fieldName) >= 0;
|
|
||||||
assert in.getDocCount(fieldName) <= in.size(fieldName);
|
|
||||||
assert in.getDocCount(fieldName) <= maxDoc;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static class AssertingPointsWriter extends PointsWriter {
|
static class AssertingPointsWriter extends PointsWriter {
|
||||||
|
|
|
@ -24,6 +24,7 @@ import org.apache.lucene.codecs.PointsReader;
|
||||||
import org.apache.lucene.codecs.PointsWriter;
|
import org.apache.lucene.codecs.PointsWriter;
|
||||||
import org.apache.lucene.index.FieldInfo;
|
import org.apache.lucene.index.FieldInfo;
|
||||||
import org.apache.lucene.index.MergeState;
|
import org.apache.lucene.index.MergeState;
|
||||||
|
import org.apache.lucene.index.PointValues;
|
||||||
import org.apache.lucene.index.SegmentReadState;
|
import org.apache.lucene.index.SegmentReadState;
|
||||||
import org.apache.lucene.index.SegmentWriteState;
|
import org.apache.lucene.index.SegmentWriteState;
|
||||||
|
|
||||||
|
@ -114,46 +115,67 @@ class CrankyPointsFormat extends PointsFormat {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void intersect(String fieldName, IntersectVisitor visitor) throws IOException {
|
public PointValues getValues(String fieldName) throws IOException {
|
||||||
|
final PointValues delegate = this.delegate.getValues(fieldName);
|
||||||
|
if (delegate == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return new PointValues() {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void intersect(IntersectVisitor visitor) throws IOException {
|
||||||
if (random.nextInt(100) == 0) {
|
if (random.nextInt(100) == 0) {
|
||||||
throw new IOException("Fake IOException");
|
throw new IOException("Fake IOException");
|
||||||
}
|
}
|
||||||
delegate.intersect(fieldName, visitor);
|
delegate.intersect(visitor);
|
||||||
if (random.nextInt(100) == 0) {
|
if (random.nextInt(100) == 0) {
|
||||||
throw new IOException("Fake IOException");
|
throw new IOException("Fake IOException");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public byte[] getMinPackedValue(String fieldName) throws IOException {
|
public byte[] getMinPackedValue() throws IOException {
|
||||||
if (random.nextInt(100) == 0) {
|
if (random.nextInt(100) == 0) {
|
||||||
throw new IOException("Fake IOException");
|
throw new IOException("Fake IOException");
|
||||||
}
|
}
|
||||||
return delegate.getMinPackedValue(fieldName);
|
return delegate.getMinPackedValue();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public byte[] getMaxPackedValue(String fieldName) throws IOException {
|
public byte[] getMaxPackedValue() throws IOException {
|
||||||
if (random.nextInt(100) == 0) {
|
if (random.nextInt(100) == 0) {
|
||||||
throw new IOException("Fake IOException");
|
throw new IOException("Fake IOException");
|
||||||
}
|
}
|
||||||
return delegate.getMaxPackedValue(fieldName);
|
return delegate.getMaxPackedValue();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int getNumDimensions(String fieldName) throws IOException {
|
public int getNumDimensions() throws IOException {
|
||||||
if (random.nextInt(100) == 0) {
|
if (random.nextInt(100) == 0) {
|
||||||
throw new IOException("Fake IOException");
|
throw new IOException("Fake IOException");
|
||||||
}
|
}
|
||||||
return delegate.getNumDimensions(fieldName);
|
return delegate.getNumDimensions();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int getBytesPerDimension(String fieldName) throws IOException {
|
public int getBytesPerDimension() throws IOException {
|
||||||
if (random.nextInt(100) == 0) {
|
if (random.nextInt(100) == 0) {
|
||||||
throw new IOException("Fake IOException");
|
throw new IOException("Fake IOException");
|
||||||
}
|
}
|
||||||
return delegate.getBytesPerDimension(fieldName);
|
return delegate.getBytesPerDimension();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long size() {
|
||||||
|
return delegate.size();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int getDocCount() {
|
||||||
|
return delegate.getDocCount();
|
||||||
|
}
|
||||||
|
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -168,15 +190,5 @@ class CrankyPointsFormat extends PointsFormat {
|
||||||
public long ramBytesUsed() {
|
public long ramBytesUsed() {
|
||||||
return delegate.ramBytesUsed();
|
return delegate.ramBytesUsed();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public long size(String fieldName) {
|
|
||||||
return delegate.size(fieldName);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int getDocCount(String fieldName) {
|
|
||||||
return delegate.getDocCount(fieldName);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,10 +18,14 @@ package org.apache.lucene.index;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
|
import org.apache.lucene.index.PointValues.IntersectVisitor;
|
||||||
|
import org.apache.lucene.index.PointValues.Relation;
|
||||||
import org.apache.lucene.search.DocIdSetIterator;
|
import org.apache.lucene.search.DocIdSetIterator;
|
||||||
import org.apache.lucene.util.Bits;
|
import org.apache.lucene.util.Bits;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
|
import org.apache.lucene.util.StringHelper;
|
||||||
import org.apache.lucene.util.VirtualMethod;
|
import org.apache.lucene.util.VirtualMethod;
|
||||||
import org.apache.lucene.util.automaton.CompiledAutomaton;
|
import org.apache.lucene.util.automaton.CompiledAutomaton;
|
||||||
|
|
||||||
|
@ -777,6 +781,144 @@ public class AssertingLeafReader extends FilterLeafReader {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Wraps a SortedSetDocValues but with additional asserts */
|
||||||
|
public static class AssertingPointValues extends PointValues {
|
||||||
|
|
||||||
|
private final PointValues in;
|
||||||
|
|
||||||
|
/** Sole constructor. */
|
||||||
|
public AssertingPointValues(PointValues in, int maxDoc) {
|
||||||
|
this.in = in;
|
||||||
|
assertStats(maxDoc);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void assertStats(int maxDoc) {
|
||||||
|
assert in.size() > 0;
|
||||||
|
assert in.getDocCount() > 0;
|
||||||
|
assert in.getDocCount() <= in.size();
|
||||||
|
assert in.getDocCount() <= maxDoc;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void intersect(IntersectVisitor visitor) throws IOException {
|
||||||
|
in.intersect(new AssertingIntersectVisitor(in.getNumDimensions(), in.getBytesPerDimension(), visitor));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public byte[] getMinPackedValue() throws IOException {
|
||||||
|
return Objects.requireNonNull(in.getMinPackedValue());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public byte[] getMaxPackedValue() throws IOException {
|
||||||
|
return Objects.requireNonNull(in.getMaxPackedValue());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int getNumDimensions() throws IOException {
|
||||||
|
return in.getNumDimensions();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int getBytesPerDimension() throws IOException {
|
||||||
|
return in.getBytesPerDimension();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long size() {
|
||||||
|
return in.size();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int getDocCount() {
|
||||||
|
return in.getDocCount();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Validates in the 1D case that all points are visited in order, and point values are in bounds of the last cell checked */
|
||||||
|
static class AssertingIntersectVisitor implements IntersectVisitor {
|
||||||
|
final IntersectVisitor in;
|
||||||
|
final int numDims;
|
||||||
|
final int bytesPerDim;
|
||||||
|
final byte[] lastDocValue;
|
||||||
|
final byte[] lastMinPackedValue;
|
||||||
|
final byte[] lastMaxPackedValue;
|
||||||
|
private Relation lastCompareResult;
|
||||||
|
private int lastDocID = -1;
|
||||||
|
private int docBudget;
|
||||||
|
|
||||||
|
AssertingIntersectVisitor(int numDims, int bytesPerDim, IntersectVisitor in) {
|
||||||
|
this.in = in;
|
||||||
|
this.numDims = numDims;
|
||||||
|
this.bytesPerDim = bytesPerDim;
|
||||||
|
lastMaxPackedValue = new byte[numDims*bytesPerDim];
|
||||||
|
lastMinPackedValue = new byte[numDims*bytesPerDim];
|
||||||
|
if (numDims == 1) {
|
||||||
|
lastDocValue = new byte[bytesPerDim];
|
||||||
|
} else {
|
||||||
|
lastDocValue = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void visit(int docID) throws IOException {
|
||||||
|
assert --docBudget >= 0 : "called add() more times than the last call to grow() reserved";
|
||||||
|
|
||||||
|
// This method, not filtering each hit, should only be invoked when the cell is inside the query shape:
|
||||||
|
assert lastCompareResult == Relation.CELL_INSIDE_QUERY;
|
||||||
|
in.visit(docID);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void visit(int docID, byte[] packedValue) throws IOException {
|
||||||
|
assert --docBudget >= 0 : "called add() more times than the last call to grow() reserved";
|
||||||
|
|
||||||
|
// This method, to filter each doc's value, should only be invoked when the cell crosses the query shape:
|
||||||
|
assert lastCompareResult == PointValues.Relation.CELL_CROSSES_QUERY;
|
||||||
|
|
||||||
|
// This doc's packed value should be contained in the last cell passed to compare:
|
||||||
|
for(int dim=0;dim<numDims;dim++) {
|
||||||
|
assert StringHelper.compare(bytesPerDim, lastMinPackedValue, dim*bytesPerDim, packedValue, dim*bytesPerDim) <= 0: "dim=" + dim + " of " + numDims + " value=" + new BytesRef(packedValue);
|
||||||
|
assert StringHelper.compare(bytesPerDim, lastMaxPackedValue, dim*bytesPerDim, packedValue, dim*bytesPerDim) >= 0: "dim=" + dim + " of " + numDims + " value=" + new BytesRef(packedValue);
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: we should assert that this "matches" whatever relation the last call to compare had returned
|
||||||
|
assert packedValue.length == numDims * bytesPerDim;
|
||||||
|
if (numDims == 1) {
|
||||||
|
int cmp = StringHelper.compare(bytesPerDim, lastDocValue, 0, packedValue, 0);
|
||||||
|
if (cmp < 0) {
|
||||||
|
// ok
|
||||||
|
} else if (cmp == 0) {
|
||||||
|
assert lastDocID <= docID: "doc ids are out of order when point values are the same!";
|
||||||
|
} else {
|
||||||
|
// out of order!
|
||||||
|
assert false: "point values are out of order";
|
||||||
|
}
|
||||||
|
System.arraycopy(packedValue, 0, lastDocValue, 0, bytesPerDim);
|
||||||
|
lastDocID = docID;
|
||||||
|
}
|
||||||
|
in.visit(docID, packedValue);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void grow(int count) {
|
||||||
|
in.grow(count);
|
||||||
|
docBudget = count;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) {
|
||||||
|
for(int dim=0;dim<numDims;dim++) {
|
||||||
|
assert StringHelper.compare(bytesPerDim, minPackedValue, dim*bytesPerDim, maxPackedValue, dim*bytesPerDim) <= 0;
|
||||||
|
}
|
||||||
|
System.arraycopy(maxPackedValue, 0, lastMaxPackedValue, 0, numDims*bytesPerDim);
|
||||||
|
System.arraycopy(minPackedValue, 0, lastMinPackedValue, 0, numDims*bytesPerDim);
|
||||||
|
lastCompareResult = in.compare(minPackedValue, maxPackedValue);
|
||||||
|
return lastCompareResult;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public NumericDocValues getNumericDocValues(String field) throws IOException {
|
public NumericDocValues getNumericDocValues(String field) throws IOException {
|
||||||
NumericDocValues dv = super.getNumericDocValues(field);
|
NumericDocValues dv = super.getNumericDocValues(field);
|
||||||
|
@ -861,6 +1003,15 @@ public class AssertingLeafReader extends FilterLeafReader {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public PointValues getPointValues(String field) throws IOException {
|
||||||
|
PointValues values = in.getPointValues(field);
|
||||||
|
if (values == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return new AssertingPointValues(values, maxDoc());
|
||||||
|
}
|
||||||
|
|
||||||
/** Wraps a Bits but with additional asserts */
|
/** Wraps a Bits but with additional asserts */
|
||||||
public static class AssertingBits implements Bits {
|
public static class AssertingBits implements Bits {
|
||||||
private final Thread creationThread = Thread.currentThread();
|
private final Thread creationThread = Thread.currentThread();
|
||||||
|
|
|
@ -80,11 +80,11 @@ public abstract class BasePointsFormatTestCase extends BaseIndexFileFormatTestCa
|
||||||
|
|
||||||
DirectoryReader r = DirectoryReader.open(dir);
|
DirectoryReader r = DirectoryReader.open(dir);
|
||||||
LeafReader sub = getOnlyLeafReader(r);
|
LeafReader sub = getOnlyLeafReader(r);
|
||||||
PointValues values = sub.getPointValues();
|
PointValues values = sub.getPointValues("dim");
|
||||||
|
|
||||||
// Simple test: make sure intersect can visit every doc:
|
// Simple test: make sure intersect can visit every doc:
|
||||||
BitSet seen = new BitSet();
|
BitSet seen = new BitSet();
|
||||||
values.intersect("dim",
|
values.intersect(
|
||||||
new IntersectVisitor() {
|
new IntersectVisitor() {
|
||||||
@Override
|
@Override
|
||||||
public Relation compare(byte[] minPacked, byte[] maxPacked) {
|
public Relation compare(byte[] minPacked, byte[] maxPacked) {
|
||||||
|
@ -122,11 +122,11 @@ public abstract class BasePointsFormatTestCase extends BaseIndexFileFormatTestCa
|
||||||
|
|
||||||
DirectoryReader r = DirectoryReader.open(dir);
|
DirectoryReader r = DirectoryReader.open(dir);
|
||||||
LeafReader sub = getOnlyLeafReader(r);
|
LeafReader sub = getOnlyLeafReader(r);
|
||||||
PointValues values = sub.getPointValues();
|
PointValues values = sub.getPointValues("dim");
|
||||||
|
|
||||||
// Simple test: make sure intersect can visit every doc:
|
// Simple test: make sure intersect can visit every doc:
|
||||||
BitSet seen = new BitSet();
|
BitSet seen = new BitSet();
|
||||||
values.intersect("dim",
|
values.intersect(
|
||||||
new IntersectVisitor() {
|
new IntersectVisitor() {
|
||||||
@Override
|
@Override
|
||||||
public Relation compare(byte[] minPacked, byte[] maxPacked) {
|
public Relation compare(byte[] minPacked, byte[] maxPacked) {
|
||||||
|
@ -168,7 +168,7 @@ public abstract class BasePointsFormatTestCase extends BaseIndexFileFormatTestCa
|
||||||
Bits liveDocs = MultiFields.getLiveDocs(r);
|
Bits liveDocs = MultiFields.getLiveDocs(r);
|
||||||
|
|
||||||
for(LeafReaderContext ctx : r.leaves()) {
|
for(LeafReaderContext ctx : r.leaves()) {
|
||||||
PointValues values = ctx.reader().getPointValues();
|
PointValues values = ctx.reader().getPointValues("dim");
|
||||||
|
|
||||||
NumericDocValues idValues = ctx.reader().getNumericDocValues("id");
|
NumericDocValues idValues = ctx.reader().getNumericDocValues("id");
|
||||||
if (idValues == null) {
|
if (idValues == null) {
|
||||||
|
@ -184,7 +184,7 @@ public abstract class BasePointsFormatTestCase extends BaseIndexFileFormatTestCa
|
||||||
|
|
||||||
if (values != null) {
|
if (values != null) {
|
||||||
BitSet seen = new BitSet();
|
BitSet seen = new BitSet();
|
||||||
values.intersect("dim",
|
values.intersect(
|
||||||
new IntersectVisitor() {
|
new IntersectVisitor() {
|
||||||
@Override
|
@Override
|
||||||
public Relation compare(byte[] minPacked, byte[] maxPacked) {
|
public Relation compare(byte[] minPacked, byte[] maxPacked) {
|
||||||
|
@ -430,14 +430,14 @@ public abstract class BasePointsFormatTestCase extends BaseIndexFileFormatTestCa
|
||||||
|
|
||||||
final BitSet hits = new BitSet();
|
final BitSet hits = new BitSet();
|
||||||
for(LeafReaderContext ctx : r.leaves()) {
|
for(LeafReaderContext ctx : r.leaves()) {
|
||||||
PointValues dimValues = ctx.reader().getPointValues();
|
PointValues dimValues = ctx.reader().getPointValues("field");
|
||||||
if (dimValues == null) {
|
if (dimValues == null) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
final int docBase = ctx.docBase;
|
final int docBase = ctx.docBase;
|
||||||
|
|
||||||
dimValues.intersect("field", new IntersectVisitor() {
|
dimValues.intersect(new IntersectVisitor() {
|
||||||
@Override
|
@Override
|
||||||
public void visit(int docID) {
|
public void visit(int docID) {
|
||||||
hits.set(docBase+docID);
|
hits.set(docBase+docID);
|
||||||
|
@ -735,13 +735,13 @@ public abstract class BasePointsFormatTestCase extends BaseIndexFileFormatTestCa
|
||||||
byte[] maxValues = new byte[numDims*numBytesPerDim];
|
byte[] maxValues = new byte[numDims*numBytesPerDim];
|
||||||
|
|
||||||
for(LeafReaderContext ctx : r.leaves()) {
|
for(LeafReaderContext ctx : r.leaves()) {
|
||||||
PointValues dimValues = ctx.reader().getPointValues();
|
PointValues dimValues = ctx.reader().getPointValues("field");
|
||||||
if (dimValues == null) {
|
if (dimValues == null) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
byte[] leafMinValues = dimValues.getMinPackedValue("field");
|
byte[] leafMinValues = dimValues.getMinPackedValue();
|
||||||
byte[] leafMaxValues = dimValues.getMaxPackedValue("field");
|
byte[] leafMaxValues = dimValues.getMaxPackedValue();
|
||||||
for(int dim=0;dim<numDims;dim++) {
|
for(int dim=0;dim<numDims;dim++) {
|
||||||
if (StringHelper.compare(numBytesPerDim, leafMinValues, dim*numBytesPerDim, minValues, dim*numBytesPerDim) < 0) {
|
if (StringHelper.compare(numBytesPerDim, leafMinValues, dim*numBytesPerDim, minValues, dim*numBytesPerDim) < 0) {
|
||||||
System.arraycopy(leafMinValues, dim*numBytesPerDim, minValues, dim*numBytesPerDim, numBytesPerDim);
|
System.arraycopy(leafMinValues, dim*numBytesPerDim, minValues, dim*numBytesPerDim, numBytesPerDim);
|
||||||
|
@ -792,14 +792,14 @@ public abstract class BasePointsFormatTestCase extends BaseIndexFileFormatTestCa
|
||||||
final BitSet hits = new BitSet();
|
final BitSet hits = new BitSet();
|
||||||
|
|
||||||
for(LeafReaderContext ctx : r.leaves()) {
|
for(LeafReaderContext ctx : r.leaves()) {
|
||||||
PointValues dimValues = ctx.reader().getPointValues();
|
PointValues dimValues = ctx.reader().getPointValues("field");
|
||||||
if (dimValues == null) {
|
if (dimValues == null) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
final int docBase = ctx.docBase;
|
final int docBase = ctx.docBase;
|
||||||
|
|
||||||
dimValues.intersect("field", new PointValues.IntersectVisitor() {
|
dimValues.intersect(new PointValues.IntersectVisitor() {
|
||||||
@Override
|
@Override
|
||||||
public void visit(int docID) {
|
public void visit(int docID) {
|
||||||
if (liveDocs == null || liveDocs.get(docBase+docID)) {
|
if (liveDocs == null || liveDocs.get(docBase+docID)) {
|
||||||
|
|
|
@ -105,9 +105,10 @@ public class RandomCodec extends AssertingCodec {
|
||||||
|
|
||||||
return new Lucene60PointsWriter(writeState, maxPointsInLeafNode, maxMBSortInHeap) {
|
return new Lucene60PointsWriter(writeState, maxPointsInLeafNode, maxMBSortInHeap) {
|
||||||
@Override
|
@Override
|
||||||
public void writeField(FieldInfo fieldInfo, PointsReader values) throws IOException {
|
public void writeField(FieldInfo fieldInfo, PointsReader reader) throws IOException {
|
||||||
|
|
||||||
boolean singleValuePerDoc = values.size(fieldInfo.name) == values.getDocCount(fieldInfo.name);
|
PointValues values = reader.getValues(fieldInfo.name);
|
||||||
|
boolean singleValuePerDoc = values.size() == values.getDocCount();
|
||||||
|
|
||||||
try (BKDWriter writer = new RandomlySplittingBKDWriter(writeState.segmentInfo.maxDoc(),
|
try (BKDWriter writer = new RandomlySplittingBKDWriter(writeState.segmentInfo.maxDoc(),
|
||||||
writeState.directory,
|
writeState.directory,
|
||||||
|
@ -116,10 +117,10 @@ public class RandomCodec extends AssertingCodec {
|
||||||
fieldInfo.getPointNumBytes(),
|
fieldInfo.getPointNumBytes(),
|
||||||
maxPointsInLeafNode,
|
maxPointsInLeafNode,
|
||||||
maxMBSortInHeap,
|
maxMBSortInHeap,
|
||||||
values.size(fieldInfo.name),
|
values.size(),
|
||||||
singleValuePerDoc,
|
singleValuePerDoc,
|
||||||
bkdSplitRandomSeed ^ fieldInfo.name.hashCode())) {
|
bkdSplitRandomSeed ^ fieldInfo.name.hashCode())) {
|
||||||
values.intersect(fieldInfo.name, new IntersectVisitor() {
|
values.intersect(new IntersectVisitor() {
|
||||||
@Override
|
@Override
|
||||||
public void visit(int docID) {
|
public void visit(int docID) {
|
||||||
throw new IllegalStateException();
|
throw new IllegalStateException();
|
||||||
|
|
|
@ -262,7 +262,7 @@ public class QueryUtils {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public PointValues getPointValues() {
|
public PointValues getPointValues(String fieldName) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2601,12 +2601,12 @@ public abstract class LuceneTestCase extends Assert {
|
||||||
final Map<Integer,Set<BytesRef>> docValues = new HashMap<>();
|
final Map<Integer,Set<BytesRef>> docValues = new HashMap<>();
|
||||||
for(LeafReaderContext ctx : reader.leaves()) {
|
for(LeafReaderContext ctx : reader.leaves()) {
|
||||||
|
|
||||||
PointValues points = ctx.reader().getPointValues();
|
PointValues points = ctx.reader().getPointValues(fieldName);
|
||||||
if (points == null) {
|
if (points == null) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
points.intersect(fieldName,
|
points.intersect(
|
||||||
new PointValues.IntersectVisitor() {
|
new PointValues.IntersectVisitor() {
|
||||||
@Override
|
@Override
|
||||||
public void visit(int docID) {
|
public void visit(int docID) {
|
||||||
|
|
|
@ -237,7 +237,7 @@ public final class SlowCompositeReaderWrapper extends LeafReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public PointValues getPointValues() {
|
public PointValues getPointValues(String field) {
|
||||||
ensureOpen();
|
ensureOpen();
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
|
@ -277,12 +277,12 @@ class FieldCacheImpl implements FieldCache {
|
||||||
|
|
||||||
final void uninvertPoints(LeafReader reader, String field) throws IOException {
|
final void uninvertPoints(LeafReader reader, String field) throws IOException {
|
||||||
final int maxDoc = reader.maxDoc();
|
final int maxDoc = reader.maxDoc();
|
||||||
PointValues values = reader.getPointValues();
|
PointValues values = reader.getPointValues(field);
|
||||||
assert values != null;
|
assert values != null;
|
||||||
assert values.size(field) > 0;
|
assert values.size() > 0;
|
||||||
|
|
||||||
final boolean setDocsWithField;
|
final boolean setDocsWithField;
|
||||||
final int docCount = values.getDocCount(field);
|
final int docCount = values.getDocCount();
|
||||||
assert docCount <= maxDoc;
|
assert docCount <= maxDoc;
|
||||||
if (docCount == maxDoc) {
|
if (docCount == maxDoc) {
|
||||||
// Fast case: all docs have this field:
|
// Fast case: all docs have this field:
|
||||||
|
@ -293,7 +293,7 @@ class FieldCacheImpl implements FieldCache {
|
||||||
}
|
}
|
||||||
|
|
||||||
BytesRef scratch = new BytesRef();
|
BytesRef scratch = new BytesRef();
|
||||||
values.intersect(field, new IntersectVisitor() {
|
values.intersect(new IntersectVisitor() {
|
||||||
@Override
|
@Override
|
||||||
public void visit(int docID) throws IOException {
|
public void visit(int docID) throws IOException {
|
||||||
throw new AssertionError();
|
throw new AssertionError();
|
||||||
|
@ -512,11 +512,11 @@ class FieldCacheImpl implements FieldCache {
|
||||||
|
|
||||||
private BitsEntry createValuePoints(LeafReader reader, String field) throws IOException {
|
private BitsEntry createValuePoints(LeafReader reader, String field) throws IOException {
|
||||||
final int maxDoc = reader.maxDoc();
|
final int maxDoc = reader.maxDoc();
|
||||||
PointValues values = reader.getPointValues();
|
PointValues values = reader.getPointValues(field);
|
||||||
assert values != null;
|
assert values != null;
|
||||||
assert values.size(field) > 0;
|
assert values.size() > 0;
|
||||||
|
|
||||||
final int docCount = values.getDocCount(field);
|
final int docCount = values.getDocCount();
|
||||||
assert docCount <= maxDoc;
|
assert docCount <= maxDoc;
|
||||||
if (docCount == maxDoc) {
|
if (docCount == maxDoc) {
|
||||||
// Fast case: all docs have this field:
|
// Fast case: all docs have this field:
|
||||||
|
@ -615,14 +615,14 @@ class FieldCacheImpl implements FieldCache {
|
||||||
if (info.getPointDimensionCount() != 1) {
|
if (info.getPointDimensionCount() != 1) {
|
||||||
throw new IllegalStateException("Type mismatch: " + field + " was indexed with dimensions=" + info.getPointDimensionCount());
|
throw new IllegalStateException("Type mismatch: " + field + " was indexed with dimensions=" + info.getPointDimensionCount());
|
||||||
}
|
}
|
||||||
PointValues values = reader.getPointValues();
|
PointValues values = reader.getPointValues(field);
|
||||||
// no actual points for this field (e.g. all points deleted)
|
// no actual points for this field (e.g. all points deleted)
|
||||||
if (values == null || values.size(field) == 0) {
|
if (values == null || values.size() == 0) {
|
||||||
return DocValues.emptyNumeric();
|
return DocValues.emptyNumeric();
|
||||||
}
|
}
|
||||||
// not single-valued
|
// not single-valued
|
||||||
if (values.size(field) != values.getDocCount(field)) {
|
if (values.size() != values.getDocCount()) {
|
||||||
throw new IllegalStateException("Type mismatch: " + field + " was indexed with multiple values, numValues=" + values.size(field) + ",numDocs=" + values.getDocCount(field));
|
throw new IllegalStateException("Type mismatch: " + field + " was indexed with multiple values, numValues=" + values.size() + ",numDocs=" + values.getDocCount());
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// postings case
|
// postings case
|
||||||
|
|
|
@ -448,7 +448,7 @@ public class TestDocSet extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public PointValues getPointValues() {
|
public PointValues getPointValues(String field) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue