Merge pull request #18011 from jimferenczi/point_values_fix

Add XPointValues
This commit is contained in:
Jim Ferenczi 2016-04-27 21:26:55 +02:00
commit 4953bf02f6
5 changed files with 167 additions and 32 deletions

View File

@ -0,0 +1,130 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.lucene.index;
import org.apache.lucene.util.StringHelper;
import java.io.IOException;
/**
* Forked utility methods from Lucene's PointValues until LUCENE-7257 is released.
*/
public class XPointValues {
/** Return the cumulated number of points across all leaves of the given
* {@link IndexReader}. Leaves that do not have points for the given field
* are ignored.
* @see PointValues#size(String) */
public static long size(IndexReader reader, String field) throws IOException {
long size = 0;
for (LeafReaderContext ctx : reader.leaves()) {
FieldInfo info = ctx.reader().getFieldInfos().fieldInfo(field);
if (info == null || info.getPointDimensionCount() == 0) {
continue;
}
PointValues values = ctx.reader().getPointValues();
size += values.size(field);
}
return size;
}
/** Return the cumulated number of docs that have points across all leaves
* of the given {@link IndexReader}. Leaves that do not have points for the
* given field are ignored.
* @see PointValues#getDocCount(String) */
public static int getDocCount(IndexReader reader, String field) throws IOException {
int count = 0;
for (LeafReaderContext ctx : reader.leaves()) {
FieldInfo info = ctx.reader().getFieldInfos().fieldInfo(field);
if (info == null || info.getPointDimensionCount() == 0) {
continue;
}
PointValues values = ctx.reader().getPointValues();
count += values.getDocCount(field);
}
return count;
}
/** Return the minimum packed values across all leaves of the given
* {@link IndexReader}. Leaves that do not have points for the given field
* are ignored.
* @see PointValues#getMinPackedValue(String) */
public static byte[] getMinPackedValue(IndexReader reader, String field) throws IOException {
byte[] minValue = null;
for (LeafReaderContext ctx : reader.leaves()) {
FieldInfo info = ctx.reader().getFieldInfos().fieldInfo(field);
if (info == null || info.getPointDimensionCount() == 0) {
continue;
}
PointValues values = ctx.reader().getPointValues();
byte[] leafMinValue = values.getMinPackedValue(field);
if (leafMinValue == null) {
continue;
}
if (minValue == null) {
minValue = leafMinValue.clone();
} else {
final int numDimensions = values.getNumDimensions(field);
final int numBytesPerDimension = values.getBytesPerDimension(field);
for (int i = 0; i < numDimensions; ++i) {
int offset = i * numBytesPerDimension;
if (StringHelper.compare(numBytesPerDimension, leafMinValue, offset, minValue, offset) < 0) {
System.arraycopy(leafMinValue, offset, minValue, offset, numBytesPerDimension);
}
}
}
}
return minValue;
}
/** Return the maximum packed values across all leaves of the given
* {@link IndexReader}. Leaves that do not have points for the given field
* are ignored.
* @see PointValues#getMaxPackedValue(String) */
public static byte[] getMaxPackedValue(IndexReader reader, String field) throws IOException {
byte[] maxValue = null;
for (LeafReaderContext ctx : reader.leaves()) {
FieldInfo info = ctx.reader().getFieldInfos().fieldInfo(field);
if (info == null || info.getPointDimensionCount() == 0) {
continue;
}
PointValues values = ctx.reader().getPointValues();
byte[] leafMaxValue = values.getMaxPackedValue(field);
if (leafMaxValue == null) {
continue;
}
if (maxValue == null) {
maxValue = leafMaxValue.clone();
} else {
final int numDimensions = values.getNumDimensions(field);
final int numBytesPerDimension = values.getBytesPerDimension(field);
for (int i = 0; i < numDimensions; ++i) {
int offset = i * numBytesPerDimension;
if (StringHelper.compare(numBytesPerDimension, leafMaxValue, offset, maxValue, offset) > 0) {
System.arraycopy(leafMaxValue, offset, maxValue, offset, numBytesPerDimension);
}
}
}
}
return maxValue;
}
/** Default constructor */
private XPointValues() {
}
}

View File

@ -20,12 +20,12 @@
package org.elasticsearch.index.mapper.core;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.LongPoint;
import org.apache.lucene.document.SortedNumericDocValuesField;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.document.SortedNumericDocValuesField;
import org.apache.lucene.document.LongPoint;
import org.apache.lucene.index.XPointValues;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.PointValues;
import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
@ -394,13 +394,13 @@ public class DateFieldMapper extends FieldMapper implements AllFieldMapper.Inclu
@Override
public FieldStats.Date stats(IndexReader reader) throws IOException {
String field = name();
long size = PointValues.size(reader, field);
long size = XPointValues.size(reader, field);
if (size == 0) {
return new FieldStats.Date(reader.maxDoc(), isSearchable(), isAggregatable(), dateTimeFormatter());
}
int docCount = PointValues.getDocCount(reader, field);
byte[] min = PointValues.getMinPackedValue(reader, field);
byte[] max = PointValues.getMaxPackedValue(reader, field);
int docCount = XPointValues.getDocCount(reader, field);
byte[] min = XPointValues.getMinPackedValue(reader, field);
byte[] max = XPointValues.getMaxPackedValue(reader, field);
return new FieldStats.Date(reader.maxDoc(),docCount, -1L, size,
isSearchable(), isAggregatable(),
dateTimeFormatter(), LongPoint.decodeDimension(min, 0), LongPoint.decodeDimension(max, 0));
@ -415,13 +415,13 @@ public class DateFieldMapper extends FieldMapper implements AllFieldMapper.Inclu
dateParser = this.dateMathParser;
}
if (PointValues.size(reader, name()) == 0) {
if (XPointValues.size(reader, name()) == 0) {
// no points, so nothing matches
return Relation.DISJOINT;
}
long minValue = LongPoint.decodeDimension(PointValues.getMinPackedValue(reader, name()), 0);
long maxValue = LongPoint.decodeDimension(PointValues.getMaxPackedValue(reader, name()), 0);
long minValue = LongPoint.decodeDimension(XPointValues.getMinPackedValue(reader, name()), 0);
long maxValue = LongPoint.decodeDimension(XPointValues.getMaxPackedValue(reader, name()), 0);
long fromInclusive = Long.MIN_VALUE;
if (from != null) {

View File

@ -28,7 +28,7 @@ import org.apache.lucene.document.SortedNumericDocValuesField;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.PointValues;
import org.apache.lucene.index.XPointValues;
import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
@ -260,13 +260,13 @@ public class NumberFieldMapper extends FieldMapper implements AllFieldMapper.Inc
@Override
FieldStats.Double stats(IndexReader reader, String fieldName,
boolean isSearchable, boolean isAggregatable) throws IOException {
long size = PointValues.size(reader, fieldName);
long size = XPointValues.size(reader, fieldName);
if (size == 0) {
return new FieldStats.Double(reader.maxDoc(), isSearchable, isAggregatable);
}
int docCount = PointValues.getDocCount(reader, fieldName);
byte[] min = PointValues.getMinPackedValue(reader, fieldName);
byte[] max = PointValues.getMaxPackedValue(reader, fieldName);
int docCount = XPointValues.getDocCount(reader, fieldName);
byte[] min = XPointValues.getMinPackedValue(reader, fieldName);
byte[] max = XPointValues.getMaxPackedValue(reader, fieldName);
return new FieldStats.Double(reader.maxDoc(),docCount, -1L, size,
isSearchable, isAggregatable,
FloatPoint.decodeDimension(min, 0), FloatPoint.decodeDimension(max, 0));
@ -351,13 +351,13 @@ public class NumberFieldMapper extends FieldMapper implements AllFieldMapper.Inc
@Override
FieldStats.Double stats(IndexReader reader, String fieldName,
boolean isSearchable, boolean isAggregatable) throws IOException {
long size = PointValues.size(reader, fieldName);
long size = XPointValues.size(reader, fieldName);
if (size == 0) {
return new FieldStats.Double(reader.maxDoc(), isSearchable, isAggregatable);
}
int docCount = PointValues.getDocCount(reader, fieldName);
byte[] min = PointValues.getMinPackedValue(reader, fieldName);
byte[] max = PointValues.getMaxPackedValue(reader, fieldName);
int docCount = XPointValues.getDocCount(reader, fieldName);
byte[] min = XPointValues.getMinPackedValue(reader, fieldName);
byte[] max = XPointValues.getMaxPackedValue(reader, fieldName);
return new FieldStats.Double(reader.maxDoc(),docCount, -1L, size,
isSearchable, isAggregatable,
DoublePoint.decodeDimension(min, 0), DoublePoint.decodeDimension(max, 0));
@ -565,13 +565,13 @@ public class NumberFieldMapper extends FieldMapper implements AllFieldMapper.Inc
@Override
FieldStats.Long stats(IndexReader reader, String fieldName,
boolean isSearchable, boolean isAggregatable) throws IOException {
long size = PointValues.size(reader, fieldName);
long size = XPointValues.size(reader, fieldName);
if (size == 0) {
return new FieldStats.Long(reader.maxDoc(), isSearchable, isAggregatable);
}
int docCount = PointValues.getDocCount(reader, fieldName);
byte[] min = PointValues.getMinPackedValue(reader, fieldName);
byte[] max = PointValues.getMaxPackedValue(reader, fieldName);
int docCount = XPointValues.getDocCount(reader, fieldName);
byte[] min = XPointValues.getMinPackedValue(reader, fieldName);
byte[] max = XPointValues.getMaxPackedValue(reader, fieldName);
return new FieldStats.Long(reader.maxDoc(),docCount, -1L, size,
isSearchable, isAggregatable,
IntPoint.decodeDimension(min, 0), IntPoint.decodeDimension(max, 0));
@ -661,13 +661,13 @@ public class NumberFieldMapper extends FieldMapper implements AllFieldMapper.Inc
@Override
FieldStats.Long stats(IndexReader reader, String fieldName,
boolean isSearchable, boolean isAggregatable) throws IOException {
long size = PointValues.size(reader, fieldName);
long size = XPointValues.size(reader, fieldName);
if (size == 0) {
return new FieldStats.Long(reader.maxDoc(), isSearchable, isAggregatable);
}
int docCount = PointValues.getDocCount(reader, fieldName);
byte[] min = PointValues.getMinPackedValue(reader, fieldName);
byte[] max = PointValues.getMaxPackedValue(reader, fieldName);
int docCount = XPointValues.getDocCount(reader, fieldName);
byte[] min = XPointValues.getMinPackedValue(reader, fieldName);
byte[] max = XPointValues.getMaxPackedValue(reader, fieldName);
return new FieldStats.Long(reader.maxDoc(),docCount, -1L, size,
isSearchable, isAggregatable,
LongPoint.decodeDimension(min, 0), LongPoint.decodeDimension(max, 0));

View File

@ -26,7 +26,7 @@ import org.apache.lucene.document.StoredField;
import org.apache.lucene.document.XInetAddressPoint;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.PointValues;
import org.apache.lucene.index.XPointValues;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
@ -227,13 +227,13 @@ public class IpFieldMapper extends FieldMapper implements AllFieldMapper.Include
@Override
public FieldStats.Ip stats(IndexReader reader) throws IOException {
String field = name();
long size = PointValues.size(reader, field);
long size = XPointValues.size(reader, field);
if (size == 0) {
return new FieldStats.Ip(reader.maxDoc(), isSearchable(), isAggregatable());
}
int docCount = PointValues.getDocCount(reader, field);
byte[] min = PointValues.getMinPackedValue(reader, field);
byte[] max = PointValues.getMaxPackedValue(reader, field);
int docCount = XPointValues.getDocCount(reader, field);
byte[] min = XPointValues.getMinPackedValue(reader, field);
byte[] max = XPointValues.getMaxPackedValue(reader, field);
return new FieldStats.Ip(reader.maxDoc(), docCount, -1L, size,
isSearchable(), isAggregatable(),
InetAddressPoint.decode(min), InetAddressPoint.decode(max));

View File

@ -113,6 +113,11 @@ public class DateFieldTypeTests extends FieldTypeTestCase {
doTestIsFieldWithinQuery(ft, reader, null, alternateFormat);
doTestIsFieldWithinQuery(ft, reader, DateTimeZone.UTC, null);
doTestIsFieldWithinQuery(ft, reader, DateTimeZone.UTC, alternateFormat);
// Fields with no value indexed.
DateFieldType ft2 = new DateFieldType();
ft2.setName("my_date2");
assertEquals(Relation.DISJOINT, ft2.isFieldWithinQuery(reader, "2015-10-09", "2016-01-02", false, false, null, null));
IOUtils.close(reader, w, dir);
}