mirror of https://github.com/apache/lucene.git
Merge remote-tracking branch 'origin/master'
This commit is contained in:
commit
982654b795
|
@ -6,6 +6,9 @@
|
|||
<content url="file://$MODULE_DIR$">
|
||||
<sourceFolder url="file://$MODULE_DIR$" isTestSource="false" />
|
||||
</content>
|
||||
<content url="file://$MODULE_DIR$/../resources">
|
||||
<sourceFolder url="file://$MODULE_DIR$/../resources" type="java-resource" />
|
||||
</content>
|
||||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
<orderEntry type="library" name="Solr core library" level="project" />
|
||||
|
@ -27,14 +30,5 @@
|
|||
<orderEntry type="module" module-name="queryparser" />
|
||||
<orderEntry type="module" module-name="join" />
|
||||
<orderEntry type="module" module-name="sandbox" />
|
||||
<orderEntry type="module-library">
|
||||
<library>
|
||||
<CLASSES>
|
||||
<root url="file://$MODULE_DIR$/resources" />
|
||||
</CLASSES>
|
||||
<JAVADOC />
|
||||
<SOURCES />
|
||||
</library>
|
||||
</orderEntry>
|
||||
</component>
|
||||
</module>
|
||||
|
|
|
@ -31,14 +31,5 @@
|
|||
<orderEntry type="module" scope="TEST" module-name="expressions" />
|
||||
<orderEntry type="module" scope="TEST" module-name="icu" />
|
||||
<orderEntry type="module" scope="TEST" module-name="analysis-extras" />
|
||||
<orderEntry type="module-library">
|
||||
<library>
|
||||
<CLASSES>
|
||||
<root url="file://$MODULE_DIR$/resources" />
|
||||
</CLASSES>
|
||||
<JAVADOC />
|
||||
<SOURCES />
|
||||
</library>
|
||||
</orderEntry>
|
||||
</component>
|
||||
</module>
|
||||
|
|
|
@ -29,14 +29,5 @@
|
|||
<orderEntry type="module" scope="TEST" module-name="solr-core" />
|
||||
<orderEntry type="module" scope="TEST" module-name="analysis-common" />
|
||||
<orderEntry type="module" scope="TEST" module-name="lucene-core" />
|
||||
<orderEntry type="module-library">
|
||||
<library>
|
||||
<CLASSES>
|
||||
<root url="file://$MODULE_DIR$/../../core/src/resources" />
|
||||
</CLASSES>
|
||||
<JAVADOC />
|
||||
<SOURCES />
|
||||
</library>
|
||||
</orderEntry>
|
||||
</component>
|
||||
</module>
|
||||
|
|
|
@ -47,6 +47,11 @@
|
|||
</dependencies>
|
||||
<build>
|
||||
<sourceDirectory>${module-path}</sourceDirectory>
|
||||
<resources>
|
||||
<resource>
|
||||
<directory>${module-path}/../resources</directory>
|
||||
</resource>
|
||||
</resources>
|
||||
<testSourceDirectory/>
|
||||
<testResources/>
|
||||
<plugins>
|
||||
|
|
|
@ -64,6 +64,9 @@ New Features
|
|||
* LUCENE-6989: Add preliminary support for MMapDirectory unmapping in Java 9.
|
||||
(Uwe Schindler, Chris Hegarty, Peter Levart)
|
||||
|
||||
* LUCENE-7040: Upgrade morfologik-stemming to version 2.1.0.
|
||||
(Dawid Weiss)
|
||||
|
||||
API Changes
|
||||
|
||||
* LUCENE-6067: Accountable.getChildResources has a default
|
||||
|
|
|
@ -48,7 +48,6 @@ import java.io.IOException;
|
|||
import java.io.InputStream;
|
||||
import java.io.FileInputStream;
|
||||
|
||||
import static org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_CHAR;
|
||||
import org.apache.lucene.util.ArrayUtil;
|
||||
|
||||
/**
|
||||
|
@ -453,7 +452,7 @@ class PorterStemmer
|
|||
public boolean stem(char[] wordBuffer, int offset, int wordLen) {
|
||||
reset();
|
||||
if (b.length < wordLen) {
|
||||
b = new char[ArrayUtil.oversize(wordLen, NUM_BYTES_CHAR)];
|
||||
b = new char[ArrayUtil.oversize(wordLen, Character.BYTES)];
|
||||
}
|
||||
System.arraycopy(wordBuffer, offset, b, 0, wordLen);
|
||||
i = wordLen;
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.lucene.analysis.miscellaneous;
|
||||
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.analysis.TokenFilter;
|
||||
|
@ -24,7 +23,6 @@ import org.apache.lucene.analysis.TokenStream;
|
|||
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
||||
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
|
||||
import org.apache.lucene.util.ArrayUtil;
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
|
||||
/**
|
||||
* This class converts alphabetic, numeric, and symbolic Unicode characters
|
||||
|
@ -142,7 +140,7 @@ public final class ASCIIFoldingFilter extends TokenFilter {
|
|||
// Worst-case length required:
|
||||
final int maxSizeNeeded = 4 * length;
|
||||
if (output.length < maxSizeNeeded) {
|
||||
output = new char[ArrayUtil.oversize(maxSizeNeeded, RamUsageEstimator.NUM_BYTES_CHAR)];
|
||||
output = new char[ArrayUtil.oversize(maxSizeNeeded, Character.BYTES)];
|
||||
}
|
||||
|
||||
outputPos = foldToASCII(input, 0, output, 0, length);
|
||||
|
|
|
@ -28,7 +28,6 @@ import org.apache.lucene.analysis.util.CharArraySet;
|
|||
import org.apache.lucene.util.ArrayUtil;
|
||||
import org.apache.lucene.util.AttributeSource;
|
||||
import org.apache.lucene.util.InPlaceMergeSorter;
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
@ -429,7 +428,7 @@ public final class WordDelimiterFilter extends TokenFilter {
|
|||
savedType = typeAttribute.type();
|
||||
|
||||
if (savedBuffer.length < termAttribute.length()) {
|
||||
savedBuffer = new char[ArrayUtil.oversize(termAttribute.length(), RamUsageEstimator.NUM_BYTES_CHAR)];
|
||||
savedBuffer = new char[ArrayUtil.oversize(termAttribute.length(), Character.BYTES)];
|
||||
}
|
||||
|
||||
System.arraycopy(termAttribute.buffer(), 0, savedBuffer, 0, termAttribute.length());
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.lucene.analysis.synonym;
|
||||
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
||||
|
@ -31,11 +30,9 @@ import org.apache.lucene.store.ByteArrayDataInput;
|
|||
import org.apache.lucene.util.ArrayUtil;
|
||||
import org.apache.lucene.util.AttributeSource;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.apache.lucene.util.CharsRef;
|
||||
import org.apache.lucene.util.CharsRefBuilder;
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
import org.apache.lucene.util.UnicodeUtil;
|
||||
import org.apache.lucene.util.fst.FST;
|
||||
|
||||
/**
|
||||
|
@ -207,12 +204,12 @@ public final class SynonymFilter extends TokenFilter {
|
|||
outputs = Arrays.copyOf(outputs, ArrayUtil.oversize(1+count, RamUsageEstimator.NUM_BYTES_OBJECT_REF));
|
||||
}
|
||||
if (count == endOffsets.length) {
|
||||
final int[] next = new int[ArrayUtil.oversize(1+count, RamUsageEstimator.NUM_BYTES_INT)];
|
||||
final int[] next = new int[ArrayUtil.oversize(1+count, Integer.BYTES)];
|
||||
System.arraycopy(endOffsets, 0, next, 0, count);
|
||||
endOffsets = next;
|
||||
}
|
||||
if (count == posLengths.length) {
|
||||
final int[] next = new int[ArrayUtil.oversize(1+count, RamUsageEstimator.NUM_BYTES_INT)];
|
||||
final int[] next = new int[ArrayUtil.oversize(1+count, Integer.BYTES)];
|
||||
System.arraycopy(posLengths, 0, next, 0, count);
|
||||
posLengths = next;
|
||||
}
|
||||
|
|
|
@ -16,12 +16,10 @@
|
|||
*/
|
||||
package org.apache.lucene.analysis.util;
|
||||
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Reader;
|
||||
|
||||
import org.apache.lucene.util.ArrayUtil;
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
|
||||
/** Acts like a forever growing char[] as you read
|
||||
* characters into it from the provided reader, but
|
||||
|
@ -71,7 +69,7 @@ public final class RollingCharBuffer {
|
|||
}
|
||||
if (count == buffer.length) {
|
||||
// Grow
|
||||
final char[] newBuffer = new char[ArrayUtil.oversize(1+count, RamUsageEstimator.NUM_BYTES_CHAR)];
|
||||
final char[] newBuffer = new char[ArrayUtil.oversize(1+count, Character.BYTES)];
|
||||
//System.out.println(Thread.currentThread().getName() + ": cb grow " + newBuffer.length);
|
||||
System.arraycopy(buffer, nextWrite, newBuffer, 0, buffer.length - nextWrite);
|
||||
System.arraycopy(buffer, 0, newBuffer, buffer.length - nextWrite, nextWrite);
|
||||
|
|
|
@ -29,11 +29,9 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
|
||||
*/
|
||||
|
||||
|
||||
package org.tartarus.snowball;
|
||||
|
||||
import org.apache.lucene.util.ArrayUtil;
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
|
||||
/**
|
||||
* This is the rev 502 of the Snowball SVN trunk,
|
||||
|
@ -397,7 +395,7 @@ public abstract class SnowballProgram {
|
|||
final int newLength = limit + adjustment;
|
||||
//resize if necessary
|
||||
if (newLength > current.length) {
|
||||
char newBuffer[] = new char[ArrayUtil.oversize(newLength, RamUsageEstimator.NUM_BYTES_CHAR)];
|
||||
char newBuffer[] = new char[ArrayUtil.oversize(newLength, Character.BYTES)];
|
||||
System.arraycopy(current, 0, newBuffer, 0, limit);
|
||||
current = newBuffer;
|
||||
}
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.lucene.analysis.ja;
|
||||
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.StringReader;
|
||||
import java.util.ArrayList;
|
||||
|
@ -1053,7 +1052,7 @@ public final class JapaneseTokenizer extends Tokenizer {
|
|||
assert baseOffset <= lastOffset;
|
||||
int size = lastOffset - baseOffset + 1;
|
||||
if (rootCapacity < size) {
|
||||
int oversize = ArrayUtil.oversize(size, RamUsageEstimator.NUM_BYTES_INT);
|
||||
int oversize = ArrayUtil.oversize(size, Integer.BYTES);
|
||||
lRoot = new int[oversize];
|
||||
rRoot = new int[oversize];
|
||||
rootCapacity = oversize;
|
||||
|
@ -1067,7 +1066,7 @@ public final class JapaneseTokenizer extends Tokenizer {
|
|||
// Reserve at least N nodes.
|
||||
private void reserve(int n) {
|
||||
if (capacity < n) {
|
||||
int oversize = ArrayUtil.oversize(n, RamUsageEstimator.NUM_BYTES_INT);
|
||||
int oversize = ArrayUtil.oversize(n, Integer.BYTES);
|
||||
nodeDicType = new Type[oversize];
|
||||
nodeWordID = new int[oversize];
|
||||
nodeMark = new int[oversize];
|
||||
|
|
|
@ -537,7 +537,7 @@ class Lucene50DocValuesProducer extends DocValuesProducer implements Closeable {
|
|||
addresses = MonotonicBlockPackedReader.of(data, bytes.packedIntsVersion, bytes.blockSize, bytes.count+1, false);
|
||||
if (!merging) {
|
||||
addressInstances.put(field.name, addresses);
|
||||
ramBytesUsed.addAndGet(addresses.ramBytesUsed() + RamUsageEstimator.NUM_BYTES_INT);
|
||||
ramBytesUsed.addAndGet(addresses.ramBytesUsed() + Integer.BYTES);
|
||||
}
|
||||
}
|
||||
return addresses;
|
||||
|
@ -577,7 +577,7 @@ class Lucene50DocValuesProducer extends DocValuesProducer implements Closeable {
|
|||
addresses = MonotonicBlockPackedReader.of(data, bytes.packedIntsVersion, bytes.blockSize, size, false);
|
||||
if (!merging) {
|
||||
addressInstances.put(field.name, addresses);
|
||||
ramBytesUsed.addAndGet(addresses.ramBytesUsed() + RamUsageEstimator.NUM_BYTES_INT);
|
||||
ramBytesUsed.addAndGet(addresses.ramBytesUsed() + Integer.BYTES);
|
||||
}
|
||||
}
|
||||
return addresses;
|
||||
|
@ -662,7 +662,7 @@ class Lucene50DocValuesProducer extends DocValuesProducer implements Closeable {
|
|||
instance = MonotonicBlockPackedReader.of(data, entry.packedIntsVersion, entry.blockSize, entry.count+1, false);
|
||||
if (!merging) {
|
||||
ordIndexInstances.put(field.name, instance);
|
||||
ramBytesUsed.addAndGet(instance.ramBytesUsed() + RamUsageEstimator.NUM_BYTES_INT);
|
||||
ramBytesUsed.addAndGet(instance.ramBytesUsed() + Integer.BYTES);
|
||||
}
|
||||
}
|
||||
return instance;
|
||||
|
|
|
@ -228,6 +228,6 @@ final class FSTOrdsOutputs extends Outputs<FSTOrdsOutputs.Output> {
|
|||
|
||||
@Override
|
||||
public long ramBytesUsed(Output output) {
|
||||
return 2 * RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + 2 * RamUsageEstimator.NUM_BYTES_LONG + 2 * RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + 2 * RamUsageEstimator.NUM_BYTES_INT + output.bytes.length;
|
||||
return 2 * RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + 2 * Long.BYTES + 2 * RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + 2 * Integer.BYTES + output.bytes.length;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.lucene.codecs.bloom;
|
||||
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
|
@ -45,10 +44,8 @@ import org.apache.lucene.store.DataOutput;
|
|||
import org.apache.lucene.store.IndexOutput;
|
||||
import org.apache.lucene.util.Accountable;
|
||||
import org.apache.lucene.util.Accountables;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
import org.apache.lucene.util.automaton.CompiledAutomaton;
|
||||
|
||||
/**
|
||||
|
@ -380,7 +377,7 @@ public final class BloomFilteringPostingsFormat extends PostingsFormat {
|
|||
public long ramBytesUsed() {
|
||||
long sizeInBytes = ((delegateFieldsProducer!=null) ? delegateFieldsProducer.ramBytesUsed() : 0);
|
||||
for(Map.Entry<String,FuzzySet> entry: bloomsByFieldName.entrySet()) {
|
||||
sizeInBytes += entry.getKey().length() * RamUsageEstimator.NUM_BYTES_CHAR;
|
||||
sizeInBytes += entry.getKey().length() * Character.BYTES;
|
||||
sizeInBytes += entry.getValue().ramBytesUsed();
|
||||
}
|
||||
return sizeInBytes;
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.lucene.codecs.memory;
|
||||
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
|
@ -154,7 +153,7 @@ public final class DirectPostingsFormat extends PostingsFormat {
|
|||
public long ramBytesUsed() {
|
||||
long sizeInBytes = 0;
|
||||
for(Map.Entry<String,DirectField> entry: fields.entrySet()) {
|
||||
sizeInBytes += entry.getKey().length() * RamUsageEstimator.NUM_BYTES_CHAR;
|
||||
sizeInBytes += entry.getKey().length() * Character.BYTES;
|
||||
sizeInBytes += entry.getValue().ramBytesUsed();
|
||||
}
|
||||
return sizeInBytes;
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.lucene.codecs.memory;
|
||||
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
|
@ -50,12 +49,10 @@ import org.apache.lucene.store.RAMOutputStream;
|
|||
import org.apache.lucene.util.Accountable;
|
||||
import org.apache.lucene.util.Accountables;
|
||||
import org.apache.lucene.util.ArrayUtil;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.FixedBitSet;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.IntsRefBuilder;
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
import org.apache.lucene.util.fst.Builder;
|
||||
import org.apache.lucene.util.fst.ByteSequenceOutputs;
|
||||
import org.apache.lucene.util.fst.BytesRefFSTEnum;
|
||||
|
@ -1016,7 +1013,7 @@ public final class MemoryPostingsFormat extends PostingsFormat {
|
|||
public long ramBytesUsed() {
|
||||
long sizeInBytes = 0;
|
||||
for(Map.Entry<String,TermsReader> entry: fields.entrySet()) {
|
||||
sizeInBytes += (entry.getKey().length() * RamUsageEstimator.NUM_BYTES_CHAR);
|
||||
sizeInBytes += (entry.getKey().length() * Character.BYTES);
|
||||
sizeInBytes += entry.getValue().ramBytesUsed();
|
||||
}
|
||||
return sizeInBytes;
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.lucene.analysis.tokenattributes;
|
||||
|
||||
|
||||
import java.nio.CharBuffer;
|
||||
|
||||
import org.apache.lucene.util.ArrayUtil;
|
||||
|
@ -24,13 +23,12 @@ import org.apache.lucene.util.AttributeImpl;
|
|||
import org.apache.lucene.util.AttributeReflector;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
|
||||
/** Default implementation of {@link CharTermAttribute}. */
|
||||
public class CharTermAttributeImpl extends AttributeImpl implements CharTermAttribute, TermToBytesRefAttribute, Cloneable {
|
||||
private static int MIN_BUFFER_SIZE = 10;
|
||||
|
||||
private char[] termBuffer = new char[ArrayUtil.oversize(MIN_BUFFER_SIZE, RamUsageEstimator.NUM_BYTES_CHAR)];
|
||||
private char[] termBuffer = new char[ArrayUtil.oversize(MIN_BUFFER_SIZE, Character.BYTES)];
|
||||
private int termLength = 0;
|
||||
|
||||
/** May be used by subclasses to convert to different charsets / encodings for implementing {@link #getBytesRef()}. */
|
||||
|
@ -56,7 +54,7 @@ public class CharTermAttributeImpl extends AttributeImpl implements CharTermAttr
|
|||
if(termBuffer.length < newSize){
|
||||
// Not big enough; create a new array with slight
|
||||
// over allocation and preserve content
|
||||
final char[] newCharBuffer = new char[ArrayUtil.oversize(newSize, RamUsageEstimator.NUM_BYTES_CHAR)];
|
||||
final char[] newCharBuffer = new char[ArrayUtil.oversize(newSize, Character.BYTES)];
|
||||
System.arraycopy(termBuffer, 0, newCharBuffer, 0, termBuffer.length);
|
||||
termBuffer = newCharBuffer;
|
||||
}
|
||||
|
@ -67,7 +65,7 @@ public class CharTermAttributeImpl extends AttributeImpl implements CharTermAttr
|
|||
if(termBuffer.length < newSize){
|
||||
// Not big enough; create a new array with slight
|
||||
// over allocation:
|
||||
termBuffer = new char[ArrayUtil.oversize(newSize, RamUsageEstimator.NUM_BYTES_CHAR)];
|
||||
termBuffer = new char[ArrayUtil.oversize(newSize, Character.BYTES)];
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -37,7 +37,7 @@ public abstract class PointWriter implements Closeable {
|
|||
/** Write all values contained in the provided reader */
|
||||
public abstract void writeField(FieldInfo fieldInfo, PointReader values) throws IOException;
|
||||
|
||||
/** Default naive merge implemenation for one field: it just re-indexes all the values
|
||||
/** Default naive merge implementation for one field: it just re-indexes all the values
|
||||
* from the incoming segment. The default codec overrides this for 1D fields and uses
|
||||
* a faster but more complex implementation. */
|
||||
protected void mergeOneField(MergeState mergeState, FieldInfo fieldInfo) throws IOException {
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.lucene.codecs.lucene54;
|
||||
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -753,7 +752,7 @@ final class Lucene54DocValuesProducer extends DocValuesProducer implements Close
|
|||
addresses = MonotonicBlockPackedReader.of(data, bytes.packedIntsVersion, bytes.blockSize, size, false);
|
||||
if (!merging) {
|
||||
addressInstances.put(field.name, addresses);
|
||||
ramBytesUsed.addAndGet(addresses.ramBytesUsed() + RamUsageEstimator.NUM_BYTES_INT);
|
||||
ramBytesUsed.addAndGet(addresses.ramBytesUsed() + Integer.BYTES);
|
||||
}
|
||||
}
|
||||
return addresses;
|
||||
|
|
|
@ -64,10 +64,8 @@ import org.apache.lucene.index.SegmentWriteState;
|
|||
* <li> (fieldNumber (vInt), fieldFilePointer (vLong))<sup>fieldCount</sup>
|
||||
* </ul>
|
||||
*
|
||||
* <p> After that, {@link CodecUtil#writeFooter} writes the checksum.
|
||||
*
|
||||
* <p>After all fields blocks + index data are written, {@link CodecUtil#writeFooter} writes the checksum.
|
||||
|
||||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.lucene.codecs.perfield;
|
||||
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
|
@ -44,7 +43,6 @@ import org.apache.lucene.util.Accountables;
|
|||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
|
||||
/**
|
||||
* Enables per field docvalues support.
|
||||
|
@ -324,8 +322,7 @@ public abstract class PerFieldDocValuesFormat extends DocValuesFormat {
|
|||
public long ramBytesUsed() {
|
||||
long size = 0;
|
||||
for (Map.Entry<String,DocValuesProducer> entry : formats.entrySet()) {
|
||||
size += (entry.getKey().length() * RamUsageEstimator.NUM_BYTES_CHAR) +
|
||||
entry.getValue().ramBytesUsed();
|
||||
size += (entry.getKey().length() * Character.BYTES) + entry.getValue().ramBytesUsed();
|
||||
}
|
||||
return size;
|
||||
}
|
||||
|
|
|
@ -21,7 +21,7 @@ import org.apache.lucene.util.BytesRef;
|
|||
|
||||
/** A binary field that is indexed dimensionally such that finding
|
||||
* all documents within an N-dimensional shape or range at search time is
|
||||
* efficient. Muliple values for the same field in one documents
|
||||
* efficient. Multiple values for the same field in one documents
|
||||
* is allowed. */
|
||||
|
||||
public final class BinaryPoint extends Field {
|
||||
|
|
|
@ -16,21 +16,19 @@
|
|||
*/
|
||||
package org.apache.lucene.document;
|
||||
|
||||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
|
||||
/** A double field that is indexed dimensionally such that finding
|
||||
* all documents within an N-dimensional shape or range at search time is
|
||||
* efficient. Muliple values for the same field in one documents
|
||||
* efficient. Multiple values for the same field in one documents
|
||||
* is allowed. */
|
||||
|
||||
public final class DoublePoint extends Field {
|
||||
|
||||
private static FieldType getType(int numDims) {
|
||||
FieldType type = new FieldType();
|
||||
type.setDimensions(numDims, RamUsageEstimator.NUM_BYTES_LONG);
|
||||
type.setDimensions(numDims, Double.BYTES);
|
||||
type.freeze();
|
||||
return type;
|
||||
}
|
||||
|
@ -59,8 +57,8 @@ public final class DoublePoint extends Field {
|
|||
throw new IllegalStateException("this field (name=" + name + ") uses " + type.pointDimensionCount() + " dimensions; cannot convert to a single numeric value");
|
||||
}
|
||||
BytesRef bytes = (BytesRef) fieldsData;
|
||||
assert bytes.length == RamUsageEstimator.NUM_BYTES_LONG;
|
||||
return NumericUtils.sortableLongToDouble(NumericUtils.bytesToLongDirect(bytes.bytes, bytes.offset));
|
||||
assert bytes.length == Double.BYTES;
|
||||
return decodeDimension(bytes.bytes, bytes.offset);
|
||||
}
|
||||
|
||||
private static BytesRef pack(double... point) {
|
||||
|
@ -70,10 +68,10 @@ public final class DoublePoint extends Field {
|
|||
if (point.length == 0) {
|
||||
throw new IllegalArgumentException("point cannot be 0 dimensions");
|
||||
}
|
||||
byte[] packed = new byte[point.length * RamUsageEstimator.NUM_BYTES_LONG];
|
||||
byte[] packed = new byte[point.length * Double.BYTES];
|
||||
|
||||
for(int dim=0;dim<point.length;dim++) {
|
||||
NumericUtils.longToBytesDirect(NumericUtils.doubleToSortableLong(point[dim]), packed, dim);
|
||||
for (int dim = 0; dim < point.length ; dim++) {
|
||||
encodeDimension(point[dim], packed, dim * Double.BYTES);
|
||||
}
|
||||
|
||||
return new BytesRef(packed);
|
||||
|
@ -89,4 +87,28 @@ public final class DoublePoint extends Field {
|
|||
public DoublePoint(String name, double... point) {
|
||||
super(name, pack(point), getType(point.length));
|
||||
}
|
||||
|
||||
// public helper methods (e.g. for queries)
|
||||
|
||||
/** Encode n-dimensional double point into binary encoding */
|
||||
public static byte[][] encode(Double value[]) {
|
||||
byte[][] encoded = new byte[value.length][];
|
||||
for (int i = 0; i < value.length; i++) {
|
||||
if (value[i] != null) {
|
||||
encoded[i] = new byte[Double.BYTES];
|
||||
encodeDimension(value[i], encoded[i], 0);
|
||||
}
|
||||
}
|
||||
return encoded;
|
||||
}
|
||||
|
||||
/** Encode single double dimension */
|
||||
public static void encodeDimension(Double value, byte dest[], int offset) {
|
||||
NumericUtils.longToBytesDirect(NumericUtils.doubleToSortableLong(value), dest, offset);
|
||||
}
|
||||
|
||||
/** Decode single double dimension */
|
||||
public static Double decodeDimension(byte value[], int offset) {
|
||||
return NumericUtils.sortableLongToDouble(NumericUtils.bytesToLongDirect(value, offset));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,21 +16,19 @@
|
|||
*/
|
||||
package org.apache.lucene.document;
|
||||
|
||||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
|
||||
/** A field that is indexed dimensionally such that finding
|
||||
* all documents within an N-dimensional at search time is
|
||||
* efficient. Muliple values for the same field in one documents
|
||||
* efficient. Multiple values for the same field in one documents
|
||||
* is allowed. */
|
||||
|
||||
public final class FloatPoint extends Field {
|
||||
|
||||
private static FieldType getType(int numDims) {
|
||||
FieldType type = new FieldType();
|
||||
type.setDimensions(numDims, RamUsageEstimator.NUM_BYTES_INT);
|
||||
type.setDimensions(numDims, Float.BYTES);
|
||||
type.freeze();
|
||||
return type;
|
||||
}
|
||||
|
@ -59,8 +57,8 @@ public final class FloatPoint extends Field {
|
|||
throw new IllegalStateException("this field (name=" + name + ") uses " + type.pointDimensionCount() + " dimensions; cannot convert to a single numeric value");
|
||||
}
|
||||
BytesRef bytes = (BytesRef) fieldsData;
|
||||
assert bytes.length == RamUsageEstimator.NUM_BYTES_INT;
|
||||
return NumericUtils.sortableIntToFloat(NumericUtils.bytesToIntDirect(bytes.bytes, bytes.offset));
|
||||
assert bytes.length == Float.BYTES;
|
||||
return decodeDimension(bytes.bytes, bytes.offset);
|
||||
}
|
||||
|
||||
private static BytesRef pack(float... point) {
|
||||
|
@ -70,10 +68,10 @@ public final class FloatPoint extends Field {
|
|||
if (point.length == 0) {
|
||||
throw new IllegalArgumentException("point cannot be 0 dimensions");
|
||||
}
|
||||
byte[] packed = new byte[point.length * RamUsageEstimator.NUM_BYTES_INT];
|
||||
byte[] packed = new byte[point.length * Float.BYTES];
|
||||
|
||||
for(int dim=0;dim<point.length;dim++) {
|
||||
NumericUtils.intToBytesDirect(NumericUtils.floatToSortableInt(point[dim]), packed, dim);
|
||||
for (int dim = 0; dim < point.length; dim++) {
|
||||
encodeDimension(point[dim], packed, dim * Float.BYTES);
|
||||
}
|
||||
|
||||
return new BytesRef(packed);
|
||||
|
@ -89,4 +87,28 @@ public final class FloatPoint extends Field {
|
|||
public FloatPoint(String name, float... point) {
|
||||
super(name, pack(point), getType(point.length));
|
||||
}
|
||||
|
||||
// public helper methods (e.g. for queries)
|
||||
|
||||
/** Encode n-dimensional float values into binary encoding */
|
||||
public static byte[][] encode(Float value[]) {
|
||||
byte[][] encoded = new byte[value.length][];
|
||||
for (int i = 0; i < value.length; i++) {
|
||||
if (value[i] != null) {
|
||||
encoded[i] = new byte[Float.BYTES];
|
||||
encodeDimension(value[i], encoded[i], 0);
|
||||
}
|
||||
}
|
||||
return encoded;
|
||||
}
|
||||
|
||||
/** Encode single float dimension */
|
||||
public static void encodeDimension(Float value, byte dest[], int offset) {
|
||||
NumericUtils.intToBytesDirect(NumericUtils.floatToSortableInt(value), dest, offset);
|
||||
}
|
||||
|
||||
/** Decode single float dimension */
|
||||
public static Float decodeDimension(byte value[], int offset) {
|
||||
return NumericUtils.sortableIntToFloat(NumericUtils.bytesToIntDirect(value, offset));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,21 +16,19 @@
|
|||
*/
|
||||
package org.apache.lucene.document;
|
||||
|
||||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
|
||||
/** An int field that is indexed dimensionally such that finding
|
||||
* all documents within an N-dimensional shape or range at search time is
|
||||
* efficient. Muliple values for the same field in one documents
|
||||
* efficient. Multiple values for the same field in one documents
|
||||
* is allowed. */
|
||||
|
||||
public final class IntPoint extends Field {
|
||||
|
||||
private static FieldType getType(int numDims) {
|
||||
FieldType type = new FieldType();
|
||||
type.setDimensions(numDims, RamUsageEstimator.NUM_BYTES_INT);
|
||||
type.setDimensions(numDims, Integer.BYTES);
|
||||
type.freeze();
|
||||
return type;
|
||||
}
|
||||
|
@ -59,8 +57,8 @@ public final class IntPoint extends Field {
|
|||
throw new IllegalStateException("this field (name=" + name + ") uses " + type.pointDimensionCount() + " dimensions; cannot convert to a single numeric value");
|
||||
}
|
||||
BytesRef bytes = (BytesRef) fieldsData;
|
||||
assert bytes.length == RamUsageEstimator.NUM_BYTES_INT;
|
||||
return NumericUtils.bytesToInt(bytes.bytes, bytes.offset);
|
||||
assert bytes.length == Integer.BYTES;
|
||||
return decodeDimension(bytes.bytes, bytes.offset);
|
||||
}
|
||||
|
||||
private static BytesRef pack(int... point) {
|
||||
|
@ -70,10 +68,10 @@ public final class IntPoint extends Field {
|
|||
if (point.length == 0) {
|
||||
throw new IllegalArgumentException("point cannot be 0 dimensions");
|
||||
}
|
||||
byte[] packed = new byte[point.length * RamUsageEstimator.NUM_BYTES_INT];
|
||||
byte[] packed = new byte[point.length * Integer.BYTES];
|
||||
|
||||
for(int dim=0;dim<point.length;dim++) {
|
||||
NumericUtils.intToBytes(point[dim], packed, dim);
|
||||
for (int dim = 0; dim < point.length; dim++) {
|
||||
encodeDimension(point[dim], packed, dim * Integer.BYTES);
|
||||
}
|
||||
|
||||
return new BytesRef(packed);
|
||||
|
@ -89,4 +87,28 @@ public final class IntPoint extends Field {
|
|||
public IntPoint(String name, int... point) {
|
||||
super(name, pack(point), getType(point.length));
|
||||
}
|
||||
|
||||
// public helper methods (e.g. for queries)
|
||||
|
||||
/** Encode n-dimensional integer values into binary encoding */
|
||||
public static byte[][] encode(Integer value[]) {
|
||||
byte[][] encoded = new byte[value.length][];
|
||||
for (int i = 0; i < value.length; i++) {
|
||||
if (value[i] != null) {
|
||||
encoded[i] = new byte[Integer.BYTES];
|
||||
encodeDimension(value[i], encoded[i], 0);
|
||||
}
|
||||
}
|
||||
return encoded;
|
||||
}
|
||||
|
||||
/** Encode single integer dimension */
|
||||
public static void encodeDimension(Integer value, byte dest[], int offset) {
|
||||
NumericUtils.intToBytes(value, dest, offset);
|
||||
}
|
||||
|
||||
/** Decode single integer dimension */
|
||||
public static Integer decodeDimension(byte value[], int offset) {
|
||||
return NumericUtils.bytesToInt(value, offset);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,21 +16,19 @@
|
|||
*/
|
||||
package org.apache.lucene.document;
|
||||
|
||||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
|
||||
/** A long field that is indexed dimensionally such that finding
|
||||
* all documents within an N-dimensional shape or range at search time is
|
||||
* efficient. Muliple values for the same field in one documents
|
||||
* efficient. Multiple values for the same field in one documents
|
||||
* is allowed. */
|
||||
|
||||
public final class LongPoint extends Field {
|
||||
|
||||
private static FieldType getType(int numDims) {
|
||||
FieldType type = new FieldType();
|
||||
type.setDimensions(numDims, RamUsageEstimator.NUM_BYTES_LONG);
|
||||
type.setDimensions(numDims, Long.BYTES);
|
||||
type.freeze();
|
||||
return type;
|
||||
}
|
||||
|
@ -59,8 +57,8 @@ public final class LongPoint extends Field {
|
|||
throw new IllegalStateException("this field (name=" + name + ") uses " + type.pointDimensionCount() + " dimensions; cannot convert to a single numeric value");
|
||||
}
|
||||
BytesRef bytes = (BytesRef) fieldsData;
|
||||
assert bytes.length == RamUsageEstimator.NUM_BYTES_LONG;
|
||||
return NumericUtils.bytesToLong(bytes.bytes, bytes.offset);
|
||||
assert bytes.length == Long.BYTES;
|
||||
return decodeDimension(bytes.bytes, bytes.offset);
|
||||
}
|
||||
|
||||
private static BytesRef pack(long... point) {
|
||||
|
@ -70,10 +68,10 @@ public final class LongPoint extends Field {
|
|||
if (point.length == 0) {
|
||||
throw new IllegalArgumentException("point cannot be 0 dimensions");
|
||||
}
|
||||
byte[] packed = new byte[point.length * RamUsageEstimator.NUM_BYTES_LONG];
|
||||
byte[] packed = new byte[point.length * Long.BYTES];
|
||||
|
||||
for(int dim=0;dim<point.length;dim++) {
|
||||
NumericUtils.longToBytes(point[dim], packed, dim);
|
||||
for (int dim = 0; dim < point.length; dim++) {
|
||||
encodeDimension(point[dim], packed, dim * Long.BYTES);
|
||||
}
|
||||
|
||||
return new BytesRef(packed);
|
||||
|
@ -89,4 +87,28 @@ public final class LongPoint extends Field {
|
|||
public LongPoint(String name, long... point) {
|
||||
super(name, pack(point), getType(point.length));
|
||||
}
|
||||
|
||||
// public helper methods (e.g. for queries)
|
||||
|
||||
/** Encode n-dimensional long values into binary encoding */
|
||||
public static byte[][] encode(Long value[]) {
|
||||
byte[][] encoded = new byte[value.length][];
|
||||
for (int i = 0; i < value.length; i++) {
|
||||
if (value[i] != null) {
|
||||
encoded[i] = new byte[Long.BYTES];
|
||||
encodeDimension(value[i], encoded[i], 0);
|
||||
}
|
||||
}
|
||||
return encoded;
|
||||
}
|
||||
|
||||
/** Encode single long dimension */
|
||||
public static void encodeDimension(Long value, byte dest[], int offset) {
|
||||
NumericUtils.longToBytes(value, dest, offset);
|
||||
}
|
||||
|
||||
/** Decode single long dimension */
|
||||
public static Long decodeDimension(byte value[], int offset) {
|
||||
return NumericUtils.bytesToLong(value, offset);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -52,19 +52,19 @@ class BufferedUpdates {
|
|||
Term's text is String (OBJ_HEADER + 4*INT + POINTER +
|
||||
OBJ_HEADER + string.length*CHAR). Integer is
|
||||
OBJ_HEADER + INT. */
|
||||
final static int BYTES_PER_DEL_TERM = 9*RamUsageEstimator.NUM_BYTES_OBJECT_REF + 7*RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + 10*RamUsageEstimator.NUM_BYTES_INT;
|
||||
final static int BYTES_PER_DEL_TERM = 9*RamUsageEstimator.NUM_BYTES_OBJECT_REF + 7*RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + 10*Integer.BYTES;
|
||||
|
||||
/* Rough logic: del docIDs are List<Integer>. Say list
|
||||
allocates ~2X size (2*POINTER). Integer is OBJ_HEADER
|
||||
+ int */
|
||||
final static int BYTES_PER_DEL_DOCID = 2*RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + RamUsageEstimator.NUM_BYTES_INT;
|
||||
final static int BYTES_PER_DEL_DOCID = 2*RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + Integer.BYTES;
|
||||
|
||||
/* Rough logic: HashMap has an array[Entry] w/ varying
|
||||
load factor (say 2 * POINTER). Entry is object w/
|
||||
Query key, Integer val, int hash, Entry next
|
||||
(OBJ_HEADER + 3*POINTER + INT). Query we often
|
||||
undercount (say 24 bytes). Integer is OBJ_HEADER + INT. */
|
||||
final static int BYTES_PER_DEL_QUERY = 5*RamUsageEstimator.NUM_BYTES_OBJECT_REF + 2*RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + 2*RamUsageEstimator.NUM_BYTES_INT + 24;
|
||||
final static int BYTES_PER_DEL_QUERY = 5*RamUsageEstimator.NUM_BYTES_OBJECT_REF + 2*RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + 2*Integer.BYTES + 24;
|
||||
|
||||
/* Rough logic: NumericUpdate calculates its actual size,
|
||||
* including the update Term and DV field (String). The
|
||||
|
@ -82,7 +82,7 @@ class BufferedUpdates {
|
|||
*/
|
||||
final static int BYTES_PER_NUMERIC_FIELD_ENTRY =
|
||||
7*RamUsageEstimator.NUM_BYTES_OBJECT_REF + 3*RamUsageEstimator.NUM_BYTES_OBJECT_HEADER +
|
||||
RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + 5*RamUsageEstimator.NUM_BYTES_INT + RamUsageEstimator.NUM_BYTES_FLOAT;
|
||||
RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + 5*Integer.BYTES + Float.BYTES;
|
||||
|
||||
/* Rough logic: Incremented when we see another Term for an already updated
|
||||
* field.
|
||||
|
@ -93,7 +93,7 @@ class BufferedUpdates {
|
|||
* Term (key) is counted only as POINTER.
|
||||
* NumericUpdate (val) counts its own size and isn't accounted for here.
|
||||
*/
|
||||
final static int BYTES_PER_NUMERIC_UPDATE_ENTRY = 7*RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + RamUsageEstimator.NUM_BYTES_INT;
|
||||
final static int BYTES_PER_NUMERIC_UPDATE_ENTRY = 7*RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + Integer.BYTES;
|
||||
|
||||
/* Rough logic: BinaryUpdate calculates its actual size,
|
||||
* including the update Term and DV field (String). The
|
||||
|
@ -111,7 +111,7 @@ class BufferedUpdates {
|
|||
*/
|
||||
final static int BYTES_PER_BINARY_FIELD_ENTRY =
|
||||
7*RamUsageEstimator.NUM_BYTES_OBJECT_REF + 3*RamUsageEstimator.NUM_BYTES_OBJECT_HEADER +
|
||||
RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + 5*RamUsageEstimator.NUM_BYTES_INT + RamUsageEstimator.NUM_BYTES_FLOAT;
|
||||
RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + 5*Integer.BYTES + Float.BYTES;
|
||||
|
||||
/* Rough logic: Incremented when we see another Term for an already updated
|
||||
* field.
|
||||
|
@ -122,7 +122,7 @@ class BufferedUpdates {
|
|||
* Term (key) is counted only as POINTER.
|
||||
* BinaryUpdate (val) counts its own size and isn't accounted for here.
|
||||
*/
|
||||
final static int BYTES_PER_BINARY_UPDATE_ENTRY = 7*RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + RamUsageEstimator.NUM_BYTES_INT;
|
||||
final static int BYTES_PER_BINARY_UPDATE_ENTRY = 7*RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + Integer.BYTES;
|
||||
|
||||
final AtomicInteger numTermDeletes = new AtomicInteger();
|
||||
final AtomicInteger numNumericUpdates = new AtomicInteger();
|
||||
|
@ -226,7 +226,7 @@ class BufferedUpdates {
|
|||
// is done to respect IndexWriterConfig.setMaxBufferedDeleteTerms.
|
||||
numTermDeletes.incrementAndGet();
|
||||
if (current == null) {
|
||||
bytesUsed.addAndGet(BYTES_PER_DEL_TERM + term.bytes.length + (RamUsageEstimator.NUM_BYTES_CHAR * term.field().length()));
|
||||
bytesUsed.addAndGet(BYTES_PER_DEL_TERM + term.bytes.length + (Character.BYTES * term.field().length()));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -672,11 +672,17 @@ public class ConcurrentMergeScheduler extends MergeScheduler {
|
|||
|
||||
/** Used for testing */
|
||||
void setSuppressExceptions() {
|
||||
if (verbose()) {
|
||||
message("will suppress merge exceptions");
|
||||
}
|
||||
suppressExceptions = true;
|
||||
}
|
||||
|
||||
/** Used for testing */
|
||||
void clearSuppressExceptions() {
|
||||
if (verbose()) {
|
||||
message("will not suppress merge exceptions");
|
||||
}
|
||||
suppressExceptions = false;
|
||||
}
|
||||
|
||||
|
|
|
@ -16,16 +16,12 @@
|
|||
*/
|
||||
package org.apache.lucene.index;
|
||||
|
||||
|
||||
import static org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_ARRAY_HEADER;
|
||||
import static org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_CHAR;
|
||||
import static org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_INT;
|
||||
import static org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_OBJECT_HEADER;
|
||||
import static org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_OBJECT_REF;
|
||||
|
||||
import org.apache.lucene.document.NumericDocValuesField;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
|
||||
/** An in-place update to a DocValues field. */
|
||||
abstract class DocValuesUpdate {
|
||||
|
@ -37,7 +33,7 @@ abstract class DocValuesUpdate {
|
|||
* String: 2*OBJ_HEADER + 4*INT + PTR + string.length*CHAR
|
||||
* T: OBJ_HEADER
|
||||
*/
|
||||
private static final int RAW_SIZE_IN_BYTES = 8*NUM_BYTES_OBJECT_HEADER + 8*NUM_BYTES_OBJECT_REF + 8*NUM_BYTES_INT;
|
||||
private static final int RAW_SIZE_IN_BYTES = 8*NUM_BYTES_OBJECT_HEADER + 8*NUM_BYTES_OBJECT_REF + 8*Integer.BYTES;
|
||||
|
||||
final DocValuesType type;
|
||||
final Term term;
|
||||
|
@ -63,9 +59,9 @@ abstract class DocValuesUpdate {
|
|||
|
||||
final int sizeInBytes() {
|
||||
int sizeInBytes = RAW_SIZE_IN_BYTES;
|
||||
sizeInBytes += term.field.length() * NUM_BYTES_CHAR;
|
||||
sizeInBytes += term.field.length() * Character.BYTES;
|
||||
sizeInBytes += term.bytes.bytes.length;
|
||||
sizeInBytes += field.length() * NUM_BYTES_CHAR;
|
||||
sizeInBytes += field.length() * Character.BYTES;
|
||||
sizeInBytes += valueSizeInBytes();
|
||||
return sizeInBytes;
|
||||
}
|
||||
|
@ -79,7 +75,7 @@ abstract class DocValuesUpdate {
|
|||
static final class BinaryDocValuesUpdate extends DocValuesUpdate {
|
||||
|
||||
/* Size of BytesRef: 2*INT + ARRAY_HEADER + PTR */
|
||||
private static final long RAW_VALUE_SIZE_IN_BYTES = NUM_BYTES_ARRAY_HEADER + 2*NUM_BYTES_INT + NUM_BYTES_OBJECT_REF;
|
||||
private static final long RAW_VALUE_SIZE_IN_BYTES = NUM_BYTES_ARRAY_HEADER + 2*Integer.BYTES + NUM_BYTES_OBJECT_REF;
|
||||
|
||||
BinaryDocValuesUpdate(Term term, String field, BytesRef value) {
|
||||
super(DocValuesType.BINARY, term, field, value);
|
||||
|
@ -101,7 +97,7 @@ abstract class DocValuesUpdate {
|
|||
|
||||
@Override
|
||||
long valueSizeInBytes() {
|
||||
return RamUsageEstimator.NUM_BYTES_LONG;
|
||||
return Long.BYTES;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.lucene.index;
|
||||
|
||||
|
||||
import java.io.IOException;
|
||||
import java.text.NumberFormat;
|
||||
import java.util.Collections;
|
||||
|
@ -40,7 +39,6 @@ import org.apache.lucene.util.Counter;
|
|||
import org.apache.lucene.util.InfoStream;
|
||||
import org.apache.lucene.util.IntBlockPool;
|
||||
import org.apache.lucene.util.MutableBits;
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
import org.apache.lucene.util.StringHelper;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
|
@ -576,14 +574,13 @@ class DocumentsWriterPerThread {
|
|||
@Override
|
||||
public int[] getIntBlock() {
|
||||
int[] b = new int[IntBlockPool.INT_BLOCK_SIZE];
|
||||
bytesUsed.addAndGet(IntBlockPool.INT_BLOCK_SIZE
|
||||
* RamUsageEstimator.NUM_BYTES_INT);
|
||||
bytesUsed.addAndGet(IntBlockPool.INT_BLOCK_SIZE * Integer.BYTES);
|
||||
return b;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void recycleIntBlocks(int[][] blocks, int offset, int length) {
|
||||
bytesUsed.addAndGet(-(length * (IntBlockPool.INT_BLOCK_SIZE * RamUsageEstimator.NUM_BYTES_INT)));
|
||||
bytesUsed.addAndGet(-(length * (IntBlockPool.INT_BLOCK_SIZE * Integer.BYTES)));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -16,13 +16,11 @@
|
|||
*/
|
||||
package org.apache.lucene.index;
|
||||
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
|
||||
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
|
||||
// TODO: break into separate freq and prox writers as
|
||||
// codecs; make separate container (tii/tis/skip/*) that can
|
||||
|
@ -257,15 +255,15 @@ final class FreqProxTermsWriterPerField extends TermsHashPerField {
|
|||
|
||||
@Override
|
||||
int bytesPerPosting() {
|
||||
int bytes = ParallelPostingsArray.BYTES_PER_POSTING + 2 * RamUsageEstimator.NUM_BYTES_INT;
|
||||
int bytes = ParallelPostingsArray.BYTES_PER_POSTING + 2 * Integer.BYTES;
|
||||
if (lastPositions != null) {
|
||||
bytes += RamUsageEstimator.NUM_BYTES_INT;
|
||||
bytes += Integer.BYTES;
|
||||
}
|
||||
if (lastOffsets != null) {
|
||||
bytes += RamUsageEstimator.NUM_BYTES_INT;
|
||||
bytes += Integer.BYTES;
|
||||
}
|
||||
if (termFreqs != null) {
|
||||
bytes += RamUsageEstimator.NUM_BYTES_INT;
|
||||
bytes += Integer.BYTES;
|
||||
}
|
||||
|
||||
return bytes;
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.lucene.index;
|
||||
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
|
@ -40,7 +39,7 @@ import org.apache.lucene.util.RamUsageEstimator;
|
|||
class FrozenBufferedUpdates {
|
||||
|
||||
/* Query we often undercount (say 24 bytes), plus int. */
|
||||
final static int BYTES_PER_DEL_QUERY = RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_INT + 24;
|
||||
final static int BYTES_PER_DEL_QUERY = RamUsageEstimator.NUM_BYTES_OBJECT_REF + Integer.BYTES + 24;
|
||||
|
||||
// Terms, in sorted order:
|
||||
final PrefixCodedTerms terms;
|
||||
|
|
|
@ -16,12 +16,10 @@
|
|||
*/
|
||||
package org.apache.lucene.index;
|
||||
|
||||
|
||||
import org.apache.lucene.util.ArrayUtil;
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
|
||||
class ParallelPostingsArray {
|
||||
final static int BYTES_PER_POSTING = 3 * RamUsageEstimator.NUM_BYTES_INT;
|
||||
final static int BYTES_PER_POSTING = 3 * Integer.BYTES;
|
||||
|
||||
final int size;
|
||||
final int[] textStarts;
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.lucene.index;
|
||||
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.codecs.PointReader;
|
||||
|
@ -25,7 +24,6 @@ import org.apache.lucene.util.ArrayUtil;
|
|||
import org.apache.lucene.util.ByteBlockPool;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.Counter;
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
|
||||
/** Buffers up pending byte[][] value(s) per doc, then flushes when segment flushes. */
|
||||
class PointValuesWriter {
|
||||
|
@ -41,7 +39,7 @@ class PointValuesWriter {
|
|||
this.iwBytesUsed = docWriter.bytesUsed;
|
||||
this.bytes = new ByteBlockPool(docWriter.byteBlockAllocator);
|
||||
docIDs = new int[16];
|
||||
iwBytesUsed.addAndGet(16 * RamUsageEstimator.NUM_BYTES_INT);
|
||||
iwBytesUsed.addAndGet(16 * Integer.BYTES);
|
||||
packedValue = new byte[fieldInfo.getPointDimensionCount() * fieldInfo.getPointNumBytes()];
|
||||
}
|
||||
|
||||
|
@ -54,7 +52,7 @@ class PointValuesWriter {
|
|||
}
|
||||
if (docIDs.length == numDocs) {
|
||||
docIDs = ArrayUtil.grow(docIDs, numDocs+1);
|
||||
iwBytesUsed.addAndGet((docIDs.length - numDocs) * RamUsageEstimator.NUM_BYTES_INT);
|
||||
iwBytesUsed.addAndGet((docIDs.length - numDocs) * Integer.BYTES);
|
||||
}
|
||||
bytes.append(value);
|
||||
docIDs[numDocs] = docID;
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.lucene.index;
|
||||
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
|
@ -27,7 +26,6 @@ import org.apache.lucene.store.RAMOutputStream;
|
|||
import org.apache.lucene.util.Accountable;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
|
||||
/**
|
||||
* Prefix codes term instances (prefixes are shared)
|
||||
|
@ -45,7 +43,7 @@ public class PrefixCodedTerms implements Accountable {
|
|||
|
||||
@Override
|
||||
public long ramBytesUsed() {
|
||||
return buffer.ramBytesUsed() + 2 * RamUsageEstimator.NUM_BYTES_LONG;
|
||||
return buffer.ramBytesUsed() + 2 * Long.BYTES;
|
||||
}
|
||||
|
||||
/** Records del gen for this packet. */
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.lucene.index;
|
||||
|
||||
|
||||
import static org.apache.lucene.util.ByteBlockPool.BYTE_BLOCK_SIZE;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -29,7 +28,6 @@ import org.apache.lucene.util.BytesRef;
|
|||
import org.apache.lucene.util.BytesRefHash.DirectBytesStartArray;
|
||||
import org.apache.lucene.util.BytesRefHash;
|
||||
import org.apache.lucene.util.Counter;
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
import org.apache.lucene.util.packed.PackedInts;
|
||||
import org.apache.lucene.util.packed.PackedLongValues;
|
||||
|
||||
|
@ -93,7 +91,7 @@ class SortedDocValuesWriter extends DocValuesWriter {
|
|||
// 1. when indexing, when hash is 50% full, rehash() suddenly needs 2*size ints.
|
||||
// TODO: can this same OOM happen in THPF?
|
||||
// 2. when flushing, we need 1 int per value (slot in the ordMap).
|
||||
iwBytesUsed.addAndGet(2 * RamUsageEstimator.NUM_BYTES_INT);
|
||||
iwBytesUsed.addAndGet(2 * Integer.BYTES);
|
||||
}
|
||||
|
||||
pending.add(termID);
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.lucene.index;
|
||||
|
||||
|
||||
import static org.apache.lucene.util.ByteBlockPool.BYTE_BLOCK_SIZE;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -31,7 +30,6 @@ import org.apache.lucene.util.BytesRef;
|
|||
import org.apache.lucene.util.BytesRefHash.DirectBytesStartArray;
|
||||
import org.apache.lucene.util.BytesRefHash;
|
||||
import org.apache.lucene.util.Counter;
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
import org.apache.lucene.util.packed.PackedInts;
|
||||
import org.apache.lucene.util.packed.PackedLongValues;
|
||||
|
||||
|
@ -125,14 +123,14 @@ class SortedSetDocValuesWriter extends DocValuesWriter {
|
|||
// 1. when indexing, when hash is 50% full, rehash() suddenly needs 2*size ints.
|
||||
// TODO: can this same OOM happen in THPF?
|
||||
// 2. when flushing, we need 1 int per value (slot in the ordMap).
|
||||
iwBytesUsed.addAndGet(2 * RamUsageEstimator.NUM_BYTES_INT);
|
||||
iwBytesUsed.addAndGet(2 * Integer.BYTES);
|
||||
}
|
||||
|
||||
if (currentUpto == currentValues.length) {
|
||||
currentValues = ArrayUtil.grow(currentValues, currentValues.length+1);
|
||||
// reserve additional space for max # values per-doc
|
||||
// when flushing, we need an int[] to sort the mapped-ords within the doc
|
||||
iwBytesUsed.addAndGet((currentValues.length - currentUpto) * 2 * RamUsageEstimator.NUM_BYTES_INT);
|
||||
iwBytesUsed.addAndGet((currentValues.length - currentUpto) * 2 * Integer.BYTES);
|
||||
}
|
||||
|
||||
currentValues[currentUpto] = termID;
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.lucene.index;
|
||||
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
|
||||
|
@ -283,7 +282,7 @@ final class TermVectorsConsumerPerField extends TermsHashPerField {
|
|||
|
||||
@Override
|
||||
int bytesPerPosting() {
|
||||
return super.bytesPerPosting() + 3 * RamUsageEstimator.NUM_BYTES_INT;
|
||||
return super.bytesPerPosting() + 3 * Integer.BYTES;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.lucene.search;
|
||||
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
|
@ -24,7 +23,6 @@ import java.util.List;
|
|||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.util.ArrayUtil;
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
|
||||
/**
|
||||
* Caches all docs, and optionally also scores, coming from
|
||||
|
@ -233,7 +231,7 @@ public abstract class CachingCollector extends FilterCollector {
|
|||
if (docCount >= maxDocsToCache) {
|
||||
invalidate();
|
||||
} else {
|
||||
final int newLen = Math.min(ArrayUtil.oversize(docCount + 1, RamUsageEstimator.NUM_BYTES_INT), maxDocsToCache);
|
||||
final int newLen = Math.min(ArrayUtil.oversize(docCount + 1, Integer.BYTES), maxDocsToCache);
|
||||
grow(newLen);
|
||||
}
|
||||
}
|
||||
|
@ -329,9 +327,9 @@ public abstract class CachingCollector extends FilterCollector {
|
|||
* scores are cached.
|
||||
*/
|
||||
public static CachingCollector create(Collector other, boolean cacheScores, double maxRAMMB) {
|
||||
int bytesPerDoc = RamUsageEstimator.NUM_BYTES_INT;
|
||||
int bytesPerDoc = Integer.BYTES;
|
||||
if (cacheScores) {
|
||||
bytesPerDoc += RamUsageEstimator.NUM_BYTES_FLOAT;
|
||||
bytesPerDoc += Float.BYTES;
|
||||
}
|
||||
final int maxDocsToCache = (int) ((maxRAMMB * 1024 * 1024) / bytesPerDoc);
|
||||
return create(other, cacheScores, maxDocsToCache);
|
||||
|
|
|
@ -1,153 +0,0 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.search;
|
||||
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Objects;
|
||||
|
||||
import org.apache.lucene.index.FieldInfo;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.LeafReader;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.PointValues.IntersectVisitor;
|
||||
import org.apache.lucene.index.PointValues.Relation;
|
||||
import org.apache.lucene.index.PointValues;
|
||||
import org.apache.lucene.util.DocIdSetBuilder;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
import org.apache.lucene.util.StringHelper;
|
||||
|
||||
/** Searches for single points in fields previously indexed using points
|
||||
* e.g. {@link org.apache.lucene.document.LongPoint}. */
|
||||
|
||||
public class ExactPointQuery extends Query {
|
||||
final String field;
|
||||
final int numDims;
|
||||
final byte[][] point;
|
||||
final int bytesPerDim;
|
||||
|
||||
public ExactPointQuery(String field, byte[][] point) {
|
||||
this.field = field;
|
||||
if (point == null) {
|
||||
throw new IllegalArgumentException("point must not be null");
|
||||
}
|
||||
this.point = point;
|
||||
this.numDims = point.length;
|
||||
|
||||
int bytesPerDim = -1;
|
||||
for(byte[] value : point) {
|
||||
if (value == null) {
|
||||
throw new IllegalArgumentException("point's dimensional values must not be null");
|
||||
}
|
||||
if (bytesPerDim == -1) {
|
||||
bytesPerDim = value.length;
|
||||
} else if (value.length != bytesPerDim) {
|
||||
throw new IllegalArgumentException("all dimensions must have same bytes length, but saw " + bytesPerDim + " and " + value.length);
|
||||
}
|
||||
}
|
||||
this.bytesPerDim = bytesPerDim;
|
||||
}
|
||||
|
||||
/** Use in the 1D case when you indexed 1D int values using {@link org.apache.lucene.document.IntPoint} */
|
||||
public static ExactPointQuery new1DIntExact(String field, int value) {
|
||||
return new ExactPointQuery(field, pack(value));
|
||||
}
|
||||
|
||||
/** Use in the 1D case when you indexed 1D long values using {@link org.apache.lucene.document.LongPoint} */
|
||||
public static ExactPointQuery new1DLongExact(String field, long value) {
|
||||
return new ExactPointQuery(field, pack(value));
|
||||
}
|
||||
|
||||
/** Use in the 1D case when you indexed 1D float values using {@link org.apache.lucene.document.FloatPoint} */
|
||||
public static ExactPointQuery new1DFloatExact(String field, float value) {
|
||||
return new ExactPointQuery(field, pack(value));
|
||||
}
|
||||
|
||||
/** Use in the 1D case when you indexed 1D double values using {@link org.apache.lucene.document.DoublePoint} */
|
||||
public static ExactPointQuery new1DDoubleExact(String field, double value) {
|
||||
return new ExactPointQuery(field, pack(value));
|
||||
}
|
||||
|
||||
private static byte[][] pack(long value) {
|
||||
byte[][] result = new byte[][] {new byte[RamUsageEstimator.NUM_BYTES_LONG]};
|
||||
NumericUtils.longToBytes(value, result[0], 0);
|
||||
return result;
|
||||
}
|
||||
|
||||
private static byte[][] pack(double value) {
|
||||
byte[][] result = new byte[][] {new byte[RamUsageEstimator.NUM_BYTES_LONG]};
|
||||
NumericUtils.longToBytesDirect(NumericUtils.doubleToSortableLong(value), result[0], 0);
|
||||
return result;
|
||||
}
|
||||
|
||||
private static byte[][] pack(int value) {
|
||||
byte[][] result = new byte[][] {new byte[RamUsageEstimator.NUM_BYTES_INT]};
|
||||
NumericUtils.intToBytes(value, result[0], 0);
|
||||
return result;
|
||||
}
|
||||
|
||||
private static byte[][] pack(float value) {
|
||||
byte[][] result = new byte[][] {new byte[RamUsageEstimator.NUM_BYTES_INT]};
|
||||
NumericUtils.intToBytesDirect(NumericUtils.floatToSortableInt(value), result[0], 0);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query rewrite(IndexReader reader) throws IOException {
|
||||
boolean[] inclusive = new boolean[] {true};
|
||||
return new PointRangeQuery(field, point, inclusive, point, inclusive);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int hash = super.hashCode();
|
||||
hash += Arrays.hashCode(point)^0x14fa55fb;
|
||||
hash += numDims^0x14fa55fb;
|
||||
hash += Objects.hashCode(bytesPerDim);
|
||||
return hash;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (super.equals(other)) {
|
||||
final ExactPointQuery q = (ExactPointQuery) other;
|
||||
return q.numDims == numDims &&
|
||||
q.bytesPerDim == bytesPerDim &&
|
||||
Arrays.equals(point, q.point);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString(String field) {
|
||||
final StringBuilder sb = new StringBuilder();
|
||||
sb.append(getClass().getSimpleName());
|
||||
sb.append(':');
|
||||
if (this.field.equals(field) == false) {
|
||||
sb.append("field=");
|
||||
sb.append(this.field);
|
||||
sb.append(':');
|
||||
}
|
||||
|
||||
return sb.append(" point=")
|
||||
.append(Arrays.toString(point))
|
||||
.toString();
|
||||
}
|
||||
}
|
|
@ -24,18 +24,22 @@ import java.util.Objects;
|
|||
import org.apache.lucene.index.PointValues;
|
||||
import org.apache.lucene.index.PointValues.IntersectVisitor;
|
||||
import org.apache.lucene.index.PointValues.Relation;
|
||||
import org.apache.lucene.document.BinaryPoint;
|
||||
import org.apache.lucene.document.DoublePoint;
|
||||
import org.apache.lucene.document.FloatPoint;
|
||||
import org.apache.lucene.document.IntPoint;
|
||||
import org.apache.lucene.document.LongPoint;
|
||||
import org.apache.lucene.index.FieldInfo;
|
||||
import org.apache.lucene.index.LeafReader;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.util.DocIdSetBuilder;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
import org.apache.lucene.util.StringHelper;
|
||||
|
||||
/** Searches for ranges in fields previously indexed using points e.g.
|
||||
* {@link org.apache.lucene.document.LongPoint}. In a 1D field this is
|
||||
* a simple range query; in a multi-dimensional field it's a box shape. */
|
||||
|
||||
// TODO: enhance this and add simple example
|
||||
public class PointRangeQuery extends Query {
|
||||
final String field;
|
||||
final int numDims;
|
||||
|
@ -46,16 +50,37 @@ public class PointRangeQuery extends Query {
|
|||
// This is null only in the "fully open range" case
|
||||
final Integer bytesPerDim;
|
||||
|
||||
public PointRangeQuery(String field,
|
||||
/**
|
||||
* Expert: create a multidimensional range query for point values.
|
||||
* <p>
|
||||
* This is for subclasses and works on the underlying binary encoding: to
|
||||
* create range queries for lucene's standard {@code Point} types, refer to these factory methods:
|
||||
* <ul>
|
||||
* <li>{@link #newIntRange newIntRange()}/{@link #newMultiIntRange newMultiIntRange()} for fields indexed with {@link IntPoint}
|
||||
* <li>{@link #newIntRange newLongRange()}/{@link #newMultiIntRange newMultiLongRange()} for fields indexed with {@link LongPoint}
|
||||
* <li>{@link #newIntRange newFloatRange()}/{@link #newMultiIntRange newMultiFloatRange()} for fields indexed with {@link FloatPoint}
|
||||
* <li>{@link #newIntRange newDoubleRange()}/{@link #newMultiIntRange newMultiDoubleRange()} for fields indexed with {@link DoublePoint}
|
||||
* <li>{@link #newIntRange newBinaryRange()}/{@link #newMultiIntRange newMultiBinaryRange()} for fields indexed with {@link BinaryPoint}
|
||||
* </ul>
|
||||
* <p>
|
||||
* You can have half-open ranges (which are in fact </≤ or >/≥ queries)
|
||||
* by setting a {@code lowerValue} element or {@code upperValue} element to {@code null}.
|
||||
* <p>
|
||||
* By setting a dimension's inclusive ({@code lowerInclusive} or {@code upperInclusive}) to false, it will
|
||||
* match all documents excluding the bounds, with inclusive on, the boundaries are hits, too.
|
||||
*
|
||||
* @param field field name. must not be {@code null}.
|
||||
* @param lowerPoint lower portion of the range. {@code null} values mean "open" for that dimension.
|
||||
* @param lowerInclusive {@code true} if the lower portion of the range is inclusive, {@code false} if it should be excluded.
|
||||
* @param upperPoint upper portion of the range. {@code null} values mean "open" for that dimension.
|
||||
* @param upperInclusive {@code true} if the upper portion of the range is inclusive, {@code false} if it should be excluded.
|
||||
* @throws IllegalArgumentException if {@code field} is null, or if {@code lowerValue.length != upperValue.length}
|
||||
*/
|
||||
protected PointRangeQuery(String field,
|
||||
byte[][] lowerPoint, boolean[] lowerInclusive,
|
||||
byte[][] upperPoint, boolean[] upperInclusive) {
|
||||
checkArgs(field, lowerPoint, upperPoint);
|
||||
this.field = field;
|
||||
if (lowerPoint == null) {
|
||||
throw new IllegalArgumentException("lowerPoint must not be null");
|
||||
}
|
||||
if (upperPoint == null) {
|
||||
throw new IllegalArgumentException("upperPoint must not be null");
|
||||
}
|
||||
numDims = lowerPoint.length;
|
||||
if (upperPoint.length != numDims) {
|
||||
throw new IllegalArgumentException("lowerPoint has length=" + numDims + " but upperPoint has different length=" + upperPoint.length);
|
||||
|
@ -91,69 +116,361 @@ public class PointRangeQuery extends Query {
|
|||
}
|
||||
}
|
||||
|
||||
/** Use in the 1D case when you indexed 1D int values using {@link org.apache.lucene.document.IntPoint} */
|
||||
public static PointRangeQuery new1DIntRange(String field, Integer lowerValue, boolean lowerInclusive, Integer upperValue, boolean upperInclusive) {
|
||||
return new PointRangeQuery(field, pack(lowerValue), new boolean[] {lowerInclusive}, pack(upperValue), new boolean[] {upperInclusive});
|
||||
}
|
||||
|
||||
/** Use in the 1D case when you indexed 1D long values using {@link org.apache.lucene.document.LongPoint} */
|
||||
public static PointRangeQuery new1DLongRange(String field, Long lowerValue, boolean lowerInclusive, Long upperValue, boolean upperInclusive) {
|
||||
return new PointRangeQuery(field, pack(lowerValue), new boolean[] {lowerInclusive}, pack(upperValue), new boolean[] {upperInclusive});
|
||||
}
|
||||
|
||||
/** Use in the 1D case when you indexed 1D float values using {@link org.apache.lucene.document.FloatPoint} */
|
||||
public static PointRangeQuery new1DFloatRange(String field, Float lowerValue, boolean lowerInclusive, Float upperValue, boolean upperInclusive) {
|
||||
return new PointRangeQuery(field, pack(lowerValue), new boolean[] {lowerInclusive}, pack(upperValue), new boolean[] {upperInclusive});
|
||||
}
|
||||
|
||||
/** Use in the 1D case when you indexed 1D double values using {@link org.apache.lucene.document.DoublePoint} */
|
||||
public static PointRangeQuery new1DDoubleRange(String field, Double lowerValue, boolean lowerInclusive, Double upperValue, boolean upperInclusive) {
|
||||
return new PointRangeQuery(field, pack(lowerValue), new boolean[] {lowerInclusive}, pack(upperValue), new boolean[] {upperInclusive});
|
||||
}
|
||||
|
||||
/** Use in the 1D case when you indexed binary values using {@link org.apache.lucene.document.BinaryPoint} */
|
||||
public static PointRangeQuery new1DBinaryRange(String field, byte[] lowerValue, boolean lowerInclusive, byte[] upperValue, boolean upperInclusive) {
|
||||
return new PointRangeQuery(field, new byte[][] {lowerValue}, new boolean[] {lowerInclusive}, new byte[][] {upperValue}, new boolean[] {upperInclusive});
|
||||
}
|
||||
|
||||
private static byte[][] pack(Long value) {
|
||||
if (value == null) {
|
||||
// OK: open ended range
|
||||
return new byte[1][];
|
||||
/** Check preconditions for all factory methods */
|
||||
private static void checkArgs(String field, Object lowerPoint, Object upperPoint) {
|
||||
if (field == null) {
|
||||
throw new IllegalArgumentException("field must not be null");
|
||||
}
|
||||
if (lowerPoint == null) {
|
||||
throw new IllegalArgumentException("lowerPoint must not be null");
|
||||
}
|
||||
if (upperPoint == null) {
|
||||
throw new IllegalArgumentException("upperPoint must not be null");
|
||||
}
|
||||
byte[][] result = new byte[][] {new byte[RamUsageEstimator.NUM_BYTES_LONG]};
|
||||
NumericUtils.longToBytes(value, result[0], 0);
|
||||
return result;
|
||||
}
|
||||
|
||||
private static byte[][] pack(Double value) {
|
||||
if (value == null) {
|
||||
// OK: open ended range
|
||||
return new byte[1][];
|
||||
}
|
||||
byte[][] result = new byte[][] {new byte[RamUsageEstimator.NUM_BYTES_LONG]};
|
||||
NumericUtils.longToBytesDirect(NumericUtils.doubleToSortableLong(value), result[0], 0);
|
||||
return result;
|
||||
/**
|
||||
* Create a range query for matching an exact integer value.
|
||||
* <p>
|
||||
* This is for simple one-dimension points, for multidimensional points use
|
||||
* {@link #newMultiIntRange newMultiIntRange()} instead.
|
||||
*
|
||||
* @param field field name. must not be {@code null}.
|
||||
* @param value exact value
|
||||
* @throws IllegalArgumentException if {@code field} is null.
|
||||
* @return a query matching documents with this exact value
|
||||
*/
|
||||
public static PointRangeQuery newIntExact(String field, int value) {
|
||||
return newIntRange(field, value, true, value, true);
|
||||
}
|
||||
|
||||
private static byte[][] pack(Integer value) {
|
||||
if (value == null) {
|
||||
// OK: open ended range
|
||||
return new byte[1][];
|
||||
}
|
||||
byte[][] result = new byte[][] {new byte[RamUsageEstimator.NUM_BYTES_INT]};
|
||||
NumericUtils.intToBytes(value, result[0], 0);
|
||||
return result;
|
||||
/**
|
||||
* Create a range query for integer values indexed with {@link IntPoint}.
|
||||
* <p>
|
||||
* This is for simple one-dimension ranges, for multidimensional ranges use
|
||||
* {@link #newMultiIntRange newMultiIntRange()} instead.
|
||||
* <p>
|
||||
* You can have half-open ranges (which are in fact </≤ or >/≥ queries)
|
||||
* by setting the {@code lowerValue} or {@code upperValue} to {@code null}.
|
||||
* <p>
|
||||
* By setting inclusive ({@code lowerInclusive} or {@code upperInclusive}) to false, it will
|
||||
* match all documents excluding the bounds, with inclusive on, the boundaries are hits, too.
|
||||
*
|
||||
* @param field field name. must not be {@code null}.
|
||||
* @param lowerValue lower portion of the range. {@code null} means "open".
|
||||
* @param lowerInclusive {@code true} if the lower portion of the range is inclusive, {@code false} if it should be excluded.
|
||||
* @param upperValue upper portion of the range. {@code null} means "open".
|
||||
* @param upperInclusive {@code true} if the upper portion of the range is inclusive, {@code false} if it should be excluded.
|
||||
* @throws IllegalArgumentException if {@code field} is null.
|
||||
* @return a query matching documents within this range.
|
||||
*/
|
||||
public static PointRangeQuery newIntRange(String field, Integer lowerValue, boolean lowerInclusive, Integer upperValue, boolean upperInclusive) {
|
||||
return newMultiIntRange(field,
|
||||
new Integer[] { lowerValue },
|
||||
new boolean[] { lowerInclusive },
|
||||
new Integer[] { upperValue },
|
||||
new boolean[] { upperInclusive });
|
||||
}
|
||||
|
||||
private static byte[][] pack(Float value) {
|
||||
/**
|
||||
* Create a multidimensional range query for integer values indexed with {@link IntPoint}.
|
||||
* <p>
|
||||
* You can have half-open ranges (which are in fact </≤ or >/≥ queries)
|
||||
* by setting a {@code lowerValue} element or {@code upperValue} element to {@code null}.
|
||||
* <p>
|
||||
* By setting a dimension's inclusive ({@code lowerInclusive} or {@code upperInclusive}) to false, it will
|
||||
* match all documents excluding the bounds, with inclusive on, the boundaries are hits, too.
|
||||
*
|
||||
* @param field field name. must not be {@code null}.
|
||||
* @param lowerValue lower portion of the range. {@code null} values mean "open" for that dimension.
|
||||
* @param lowerInclusive {@code true} if the lower portion of the range is inclusive, {@code false} if it should be excluded.
|
||||
* @param upperValue upper portion of the range. {@code null} values mean "open" for that dimension.
|
||||
* @param upperInclusive {@code true} if the upper portion of the range is inclusive, {@code false} if it should be excluded.
|
||||
* @throws IllegalArgumentException if {@code field} is null, or if {@code lowerValue.length != upperValue.length}
|
||||
* @return a query matching documents within this range.
|
||||
*/
|
||||
public static PointRangeQuery newMultiIntRange(String field, Integer[] lowerValue, boolean lowerInclusive[], Integer[] upperValue, boolean upperInclusive[]) {
|
||||
checkArgs(field, lowerValue, upperValue);
|
||||
return new PointRangeQuery(field, IntPoint.encode(lowerValue), lowerInclusive, IntPoint.encode(upperValue), upperInclusive) {
|
||||
@Override
|
||||
protected String toString(byte[] value) {
|
||||
return IntPoint.decodeDimension(value, 0).toString();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a range query for matching an exact long value.
|
||||
* <p>
|
||||
* This is for simple one-dimension points, for multidimensional points use
|
||||
* {@link #newMultiLongRange newMultiLongRange()} instead.
|
||||
*
|
||||
* @param field field name. must not be {@code null}.
|
||||
* @param value exact value
|
||||
* @throws IllegalArgumentException if {@code field} is null.
|
||||
* @return a query matching documents with this exact value
|
||||
*/
|
||||
public static PointRangeQuery newLongExact(String field, long value) {
|
||||
return newLongRange(field, value, true, value, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a range query for long values indexed with {@link LongPoint}.
|
||||
* <p>
|
||||
* This is for simple one-dimension ranges, for multidimensional ranges use
|
||||
* {@link #newMultiLongRange newMultiLongRange()} instead.
|
||||
* <p>
|
||||
* You can have half-open ranges (which are in fact </≤ or >/≥ queries)
|
||||
* by setting the {@code lowerValue} or {@code upperValue} to {@code null}.
|
||||
* <p>
|
||||
* By setting inclusive ({@code lowerInclusive} or {@code upperInclusive}) to false, it will
|
||||
* match all documents excluding the bounds, with inclusive on, the boundaries are hits, too.
|
||||
*
|
||||
* @param field field name. must not be {@code null}.
|
||||
* @param lowerValue lower portion of the range. {@code null} means "open".
|
||||
* @param lowerInclusive {@code true} if the lower portion of the range is inclusive, {@code false} if it should be excluded.
|
||||
* @param upperValue upper portion of the range. {@code null} means "open".
|
||||
* @param upperInclusive {@code true} if the upper portion of the range is inclusive, {@code false} if it should be excluded.
|
||||
* @throws IllegalArgumentException if {@code field} is null.
|
||||
* @return a query matching documents within this range.
|
||||
*/
|
||||
public static PointRangeQuery newLongRange(String field, Long lowerValue, boolean lowerInclusive, Long upperValue, boolean upperInclusive) {
|
||||
return newMultiLongRange(field,
|
||||
new Long[] { lowerValue },
|
||||
new boolean[] { lowerInclusive },
|
||||
new Long[] { upperValue },
|
||||
new boolean[] { upperInclusive });
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a multidimensional range query for long values indexed with {@link LongPoint}.
|
||||
* <p>
|
||||
* You can have half-open ranges (which are in fact </≤ or >/≥ queries)
|
||||
* by setting a {@code lowerValue} element or {@code upperValue} element to {@code null}.
|
||||
* <p>
|
||||
* By setting a dimension's inclusive ({@code lowerInclusive} or {@code upperInclusive}) to false, it will
|
||||
* match all documents excluding the bounds, with inclusive on, the boundaries are hits, too.
|
||||
*
|
||||
* @param field field name. must not be {@code null}.
|
||||
* @param lowerValue lower portion of the range. {@code null} values mean "open" for that dimension.
|
||||
* @param lowerInclusive {@code true} if the lower portion of the range is inclusive, {@code false} if it should be excluded.
|
||||
* @param upperValue upper portion of the range. {@code null} values mean "open" for that dimension.
|
||||
* @param upperInclusive {@code true} if the upper portion of the range is inclusive, {@code false} if it should be excluded.
|
||||
* @throws IllegalArgumentException if {@code field} is null, or if {@code lowerValue.length != upperValue.length}
|
||||
* @return a query matching documents within this range.
|
||||
*/
|
||||
public static PointRangeQuery newMultiLongRange(String field, Long[] lowerValue, boolean lowerInclusive[], Long[] upperValue, boolean upperInclusive[]) {
|
||||
checkArgs(field, lowerValue, upperValue);
|
||||
return new PointRangeQuery(field, LongPoint.encode(lowerValue), lowerInclusive, LongPoint.encode(upperValue), upperInclusive) {
|
||||
@Override
|
||||
protected String toString(byte[] value) {
|
||||
return LongPoint.decodeDimension(value, 0).toString();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a range query for matching an exact float value.
|
||||
* <p>
|
||||
* This is for simple one-dimension points, for multidimensional points use
|
||||
* {@link #newMultiFloatRange newMultiFloatRange()} instead.
|
||||
*
|
||||
* @param field field name. must not be {@code null}.
|
||||
* @param value float value
|
||||
* @throws IllegalArgumentException if {@code field} is null.
|
||||
* @return a query matching documents with this exact value
|
||||
*/
|
||||
public static PointRangeQuery newFloatExact(String field, float value) {
|
||||
return newFloatRange(field, value, true, value, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a range query for float values indexed with {@link FloatPoint}.
|
||||
* <p>
|
||||
* This is for simple one-dimension ranges, for multidimensional ranges use
|
||||
* {@link #newMultiFloatRange newMultiFloatRange()} instead.
|
||||
* <p>
|
||||
* You can have half-open ranges (which are in fact </≤ or >/≥ queries)
|
||||
* by setting the {@code lowerValue} or {@code upperValue} to {@code null}.
|
||||
* <p>
|
||||
* By setting inclusive ({@code lowerInclusive} or {@code upperInclusive}) to false, it will
|
||||
* match all documents excluding the bounds, with inclusive on, the boundaries are hits, too.
|
||||
*
|
||||
* @param field field name. must not be {@code null}.
|
||||
* @param lowerValue lower portion of the range. {@code null} means "open".
|
||||
* @param lowerInclusive {@code true} if the lower portion of the range is inclusive, {@code false} if it should be excluded.
|
||||
* @param upperValue upper portion of the range. {@code null} means "open".
|
||||
* @param upperInclusive {@code true} if the upper portion of the range is inclusive, {@code false} if it should be excluded.
|
||||
* @throws IllegalArgumentException if {@code field} is null.
|
||||
* @return a query matching documents within this range.
|
||||
*/
|
||||
public static PointRangeQuery newFloatRange(String field, Float lowerValue, boolean lowerInclusive, Float upperValue, boolean upperInclusive) {
|
||||
return newMultiFloatRange(field,
|
||||
new Float[] { lowerValue },
|
||||
new boolean[] { lowerInclusive },
|
||||
new Float[] { upperValue },
|
||||
new boolean[] { upperInclusive });
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a multidimensional range query for float values indexed with {@link FloatPoint}.
|
||||
* <p>
|
||||
* You can have half-open ranges (which are in fact </≤ or >/≥ queries)
|
||||
* by setting a {@code lowerValue} element or {@code upperValue} element to {@code null}.
|
||||
* <p>
|
||||
* By setting a dimension's inclusive ({@code lowerInclusive} or {@code upperInclusive}) to false, it will
|
||||
* match all documents excluding the bounds, with inclusive on, the boundaries are hits, too.
|
||||
*
|
||||
* @param field field name. must not be {@code null}.
|
||||
* @param lowerValue lower portion of the range. {@code null} values mean "open" for that dimension.
|
||||
* @param lowerInclusive {@code true} if the lower portion of the range is inclusive, {@code false} if it should be excluded.
|
||||
* @param upperValue upper portion of the range. {@code null} values mean "open" for that dimension.
|
||||
* @param upperInclusive {@code true} if the upper portion of the range is inclusive, {@code false} if it should be excluded.
|
||||
* @throws IllegalArgumentException if {@code field} is null, or if {@code lowerValue.length != upperValue.length}
|
||||
* @return a query matching documents within this range.
|
||||
*/
|
||||
public static PointRangeQuery newMultiFloatRange(String field, Float[] lowerValue, boolean lowerInclusive[], Float[] upperValue, boolean upperInclusive[]) {
|
||||
checkArgs(field, lowerValue, upperValue);
|
||||
return new PointRangeQuery(field, FloatPoint.encode(lowerValue), lowerInclusive, FloatPoint.encode(upperValue), upperInclusive) {
|
||||
@Override
|
||||
protected String toString(byte[] value) {
|
||||
return FloatPoint.decodeDimension(value, 0).toString();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a range query for matching an exact double value.
|
||||
* <p>
|
||||
* This is for simple one-dimension points, for multidimensional points use
|
||||
* {@link #newMultiDoubleRange newMultiDoubleRange()} instead.
|
||||
*
|
||||
* @param field field name. must not be {@code null}.
|
||||
* @param value double value
|
||||
* @throws IllegalArgumentException if {@code field} is null.
|
||||
* @return a query matching documents with this exact value
|
||||
*/
|
||||
public static PointRangeQuery newDoubleExact(String field, double value) {
|
||||
return newDoubleRange(field, value, true, value, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a range query for double values indexed with {@link DoublePoint}.
|
||||
* <p>
|
||||
* This is for simple one-dimension ranges, for multidimensional ranges use
|
||||
* {@link #newMultiDoubleRange newMultiDoubleRange()} instead.
|
||||
* <p>
|
||||
* You can have half-open ranges (which are in fact </≤ or >/≥ queries)
|
||||
* by setting the {@code lowerValue} or {@code upperValue} to {@code null}.
|
||||
* <p>
|
||||
* By setting inclusive ({@code lowerInclusive} or {@code upperInclusive}) to false, it will
|
||||
* match all documents excluding the bounds, with inclusive on, the boundaries are hits, too.
|
||||
*
|
||||
* @param field field name. must not be {@code null}.
|
||||
* @param lowerValue lower portion of the range. {@code null} means "open".
|
||||
* @param lowerInclusive {@code true} if the lower portion of the range is inclusive, {@code false} if it should be excluded.
|
||||
* @param upperValue upper portion of the range. {@code null} means "open".
|
||||
* @param upperInclusive {@code true} if the upper portion of the range is inclusive, {@code false} if it should be excluded.
|
||||
* @throws IllegalArgumentException if {@code field} is null.
|
||||
* @return a query matching documents within this range.
|
||||
*/
|
||||
public static PointRangeQuery newDoubleRange(String field, Double lowerValue, boolean lowerInclusive, Double upperValue, boolean upperInclusive) {
|
||||
return newMultiDoubleRange(field,
|
||||
new Double[] { lowerValue },
|
||||
new boolean[] { lowerInclusive },
|
||||
new Double[] { upperValue },
|
||||
new boolean[] { upperInclusive });
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a multidimensional range query for double values indexed with {@link DoublePoint}.
|
||||
* <p>
|
||||
* You can have half-open ranges (which are in fact </≤ or >/≥ queries)
|
||||
* by setting a {@code lowerValue} element or {@code upperValue} element to {@code null}.
|
||||
* <p>
|
||||
* By setting a dimension's inclusive ({@code lowerInclusive} or {@code upperInclusive}) to false, it will
|
||||
* match all documents excluding the bounds, with inclusive on, the boundaries are hits, too.
|
||||
*
|
||||
* @param field field name. must not be {@code null}.
|
||||
* @param lowerValue lower portion of the range. {@code null} values mean "open" for that dimension.
|
||||
* @param lowerInclusive {@code true} if the lower portion of the range is inclusive, {@code false} if it should be excluded.
|
||||
* @param upperValue upper portion of the range. {@code null} values mean "open" for that dimension.
|
||||
* @param upperInclusive {@code true} if the upper portion of the range is inclusive, {@code false} if it should be excluded.
|
||||
* @throws IllegalArgumentException if {@code field} is null, or if {@code lowerValue.length != upperValue.length}
|
||||
* @return a query matching documents within this range.
|
||||
*/
|
||||
public static PointRangeQuery newMultiDoubleRange(String field, Double[] lowerValue, boolean lowerInclusive[], Double[] upperValue, boolean upperInclusive[]) {
|
||||
checkArgs(field, lowerValue, upperValue);
|
||||
return new PointRangeQuery(field, DoublePoint.encode(lowerValue), lowerInclusive, DoublePoint.encode(upperValue), upperInclusive) {
|
||||
@Override
|
||||
protected String toString(byte[] value) {
|
||||
return DoublePoint.decodeDimension(value, 0).toString();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a range query for matching an exact binary value.
|
||||
* <p>
|
||||
* This is for simple one-dimension points, for multidimensional points use
|
||||
* {@link #newMultiBinaryRange newMultiBinaryRange()} instead.
|
||||
*
|
||||
* @param field field name. must not be {@code null}.
|
||||
* @param value binary value
|
||||
* @throws IllegalArgumentException if {@code field} is null or {@code value} is null
|
||||
* @return a query matching documents with this exact value
|
||||
*/
|
||||
public static PointRangeQuery newBinaryExact(String field, byte[] value) {
|
||||
if (value == null) {
|
||||
// OK: open ended range
|
||||
return new byte[1][];
|
||||
throw new IllegalArgumentException("value cannot be null");
|
||||
}
|
||||
byte[][] result = new byte[][] {new byte[RamUsageEstimator.NUM_BYTES_INT]};
|
||||
NumericUtils.intToBytesDirect(NumericUtils.floatToSortableInt(value), result[0], 0);
|
||||
return result;
|
||||
return newBinaryRange(field, value, true, value, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a range query for binary values indexed with {@link BinaryPoint}.
|
||||
* <p>
|
||||
* This is for simple one-dimension ranges, for multidimensional ranges use
|
||||
* {@link #newMultiBinaryRange newMultiBinaryRange()} instead.
|
||||
* <p>
|
||||
* You can have half-open ranges (which are in fact </≤ or >/≥ queries)
|
||||
* by setting the {@code lowerValue} or {@code upperValue} to {@code null}.
|
||||
* <p>
|
||||
* By setting inclusive ({@code lowerInclusive} or {@code upperInclusive}) to false, it will
|
||||
* match all documents excluding the bounds, with inclusive on, the boundaries are hits, too.
|
||||
*
|
||||
* @param field field name. must not be {@code null}.
|
||||
* @param lowerValue lower portion of the range. {@code null} means "open".
|
||||
* @param lowerInclusive {@code true} if the lower portion of the range is inclusive, {@code false} if it should be excluded.
|
||||
* @param upperValue upper portion of the range. {@code null} means "open".
|
||||
* @param upperInclusive {@code true} if the upper portion of the range is inclusive, {@code false} if it should be excluded.
|
||||
* @throws IllegalArgumentException if {@code field} is null.
|
||||
* @return a query matching documents within this range.
|
||||
*/
|
||||
public static PointRangeQuery newBinaryRange(String field, byte[] lowerValue, boolean lowerInclusive, byte[] upperValue, boolean upperInclusive) {
|
||||
return newMultiBinaryRange(field, new byte[][] {lowerValue}, new boolean[] {lowerInclusive}, new byte[][] {upperValue}, new boolean[] {upperInclusive});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a multidimensional range query for binary values indexed with {@link BinaryPoint}.
|
||||
* <p>
|
||||
* You can have half-open ranges (which are in fact </≤ or >/≥ queries)
|
||||
* by setting a {@code lowerValue} element or {@code upperValue} element to {@code null}.
|
||||
* <p>
|
||||
* By setting a dimension's inclusive ({@code lowerInclusive} or {@code upperInclusive}) to false, it will
|
||||
* match all documents excluding the bounds, with inclusive on, the boundaries are hits, too.
|
||||
*
|
||||
* @param field field name. must not be {@code null}.
|
||||
* @param lowerValue lower portion of the range. {@code null} values mean "open" for that dimension.
|
||||
* @param lowerInclusive {@code true} if the lower portion of the range is inclusive, {@code false} if it should be excluded.
|
||||
* @param upperValue upper portion of the range. {@code null} values mean "open" for that dimension.
|
||||
* @param upperInclusive {@code true} if the upper portion of the range is inclusive, {@code false} if it should be excluded.
|
||||
* @throws IllegalArgumentException if {@code field} is null, or if {@code lowerValue.length != upperValue.length}
|
||||
* @return a query matching documents within this range.
|
||||
*/
|
||||
public static PointRangeQuery newMultiBinaryRange(String field, byte[][] lowerValue, boolean[] lowerInclusive, byte[][] upperValue, boolean[] upperInclusive) {
|
||||
checkArgs(field, lowerValue, upperValue);
|
||||
return new PointRangeQuery(field, lowerValue, lowerInclusive, upperValue, upperInclusive);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -330,19 +647,67 @@ public class PointRangeQuery extends Query {
|
|||
@Override
|
||||
public String toString(String field) {
|
||||
final StringBuilder sb = new StringBuilder();
|
||||
sb.append(getClass().getSimpleName());
|
||||
sb.append(':');
|
||||
if (this.field.equals(field) == false) {
|
||||
sb.append("field=");
|
||||
sb.append(this.field);
|
||||
sb.append(':');
|
||||
}
|
||||
|
||||
return sb.append('[')
|
||||
.append(Arrays.toString(lowerPoint))
|
||||
.append(" TO ")
|
||||
.append(Arrays.toString(upperPoint))
|
||||
.append(']')
|
||||
.toString();
|
||||
// print ourselves as "range per dimension"
|
||||
for (int i = 0; i < numDims; i++) {
|
||||
if (i > 0) {
|
||||
sb.append(',');
|
||||
}
|
||||
|
||||
if (lowerInclusive[i]) {
|
||||
sb.append('[');
|
||||
} else {
|
||||
sb.append('{');
|
||||
}
|
||||
|
||||
if (lowerPoint[i] == null) {
|
||||
sb.append('*');
|
||||
} else {
|
||||
sb.append(toString(lowerPoint[i]));
|
||||
}
|
||||
|
||||
sb.append(" TO ");
|
||||
|
||||
if (upperPoint[i] == null) {
|
||||
sb.append('*');
|
||||
} else {
|
||||
sb.append(toString(upperPoint[i]));
|
||||
}
|
||||
|
||||
if (upperInclusive[i]) {
|
||||
sb.append(']');
|
||||
} else {
|
||||
sb.append('}');
|
||||
}
|
||||
}
|
||||
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a string of a single value in a human-readable format for debugging.
|
||||
* This is used by {@link #toString()}.
|
||||
*
|
||||
* The default implementation encodes the individual byte values.
|
||||
*
|
||||
* @param value single value, never null
|
||||
* @return human readable value for debugging
|
||||
*/
|
||||
protected String toString(byte[] value) {
|
||||
assert value != null;
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append("binary(");
|
||||
for (int i = 0; i < value.length; i++) {
|
||||
if (i > 0) {
|
||||
sb.append(' ');
|
||||
}
|
||||
sb.append(Integer.toHexString(value[i] & 0xFF));
|
||||
}
|
||||
sb.append(')');
|
||||
return sb.toString();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.lucene.search;
|
||||
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
|
@ -168,7 +167,7 @@ public abstract class ScoringRewrite<B> extends TermCollectingRewrite<B> {
|
|||
@Override
|
||||
public int[] init() {
|
||||
final int[] ord = super.init();
|
||||
boost = new float[ArrayUtil.oversize(ord.length, RamUsageEstimator.NUM_BYTES_FLOAT)];
|
||||
boost = new float[ArrayUtil.oversize(ord.length, Float.BYTES)];
|
||||
termState = new TermContext[ArrayUtil.oversize(ord.length, RamUsageEstimator.NUM_BYTES_OBJECT_REF)];
|
||||
assert termState.length >= ord.length && boost.length >= ord.length;
|
||||
return ord;
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.lucene.util;
|
||||
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Comparator;
|
||||
|
@ -248,7 +247,7 @@ public final class ArrayUtil {
|
|||
public static short[] grow(short[] array, int minSize) {
|
||||
assert minSize >= 0: "size must be positive (got " + minSize + "): likely integer overflow?";
|
||||
if (array.length < minSize) {
|
||||
short[] newArray = new short[oversize(minSize, RamUsageEstimator.NUM_BYTES_SHORT)];
|
||||
short[] newArray = new short[oversize(minSize, Short.BYTES)];
|
||||
System.arraycopy(array, 0, newArray, 0, array.length);
|
||||
return newArray;
|
||||
} else
|
||||
|
@ -262,7 +261,7 @@ public final class ArrayUtil {
|
|||
public static float[] grow(float[] array, int minSize) {
|
||||
assert minSize >= 0: "size must be positive (got " + minSize + "): likely integer overflow?";
|
||||
if (array.length < minSize) {
|
||||
float[] newArray = new float[oversize(minSize, RamUsageEstimator.NUM_BYTES_FLOAT)];
|
||||
float[] newArray = new float[oversize(minSize, Float.BYTES)];
|
||||
System.arraycopy(array, 0, newArray, 0, array.length);
|
||||
return newArray;
|
||||
} else
|
||||
|
@ -276,7 +275,7 @@ public final class ArrayUtil {
|
|||
public static double[] grow(double[] array, int minSize) {
|
||||
assert minSize >= 0: "size must be positive (got " + minSize + "): likely integer overflow?";
|
||||
if (array.length < minSize) {
|
||||
double[] newArray = new double[oversize(minSize, RamUsageEstimator.NUM_BYTES_DOUBLE)];
|
||||
double[] newArray = new double[oversize(minSize, Double.BYTES)];
|
||||
System.arraycopy(array, 0, newArray, 0, array.length);
|
||||
return newArray;
|
||||
} else
|
||||
|
@ -289,7 +288,7 @@ public final class ArrayUtil {
|
|||
|
||||
public static short[] shrink(short[] array, int targetSize) {
|
||||
assert targetSize >= 0: "size must be positive (got " + targetSize + "): likely integer overflow?";
|
||||
final int newSize = getShrinkSize(array.length, targetSize, RamUsageEstimator.NUM_BYTES_SHORT);
|
||||
final int newSize = getShrinkSize(array.length, targetSize, Short.BYTES);
|
||||
if (newSize != array.length) {
|
||||
short[] newArray = new short[newSize];
|
||||
System.arraycopy(array, 0, newArray, 0, newSize);
|
||||
|
@ -301,7 +300,7 @@ public final class ArrayUtil {
|
|||
public static int[] grow(int[] array, int minSize) {
|
||||
assert minSize >= 0: "size must be positive (got " + minSize + "): likely integer overflow?";
|
||||
if (array.length < minSize) {
|
||||
int[] newArray = new int[oversize(minSize, RamUsageEstimator.NUM_BYTES_INT)];
|
||||
int[] newArray = new int[oversize(minSize, Integer.BYTES)];
|
||||
System.arraycopy(array, 0, newArray, 0, array.length);
|
||||
return newArray;
|
||||
} else
|
||||
|
@ -314,7 +313,7 @@ public final class ArrayUtil {
|
|||
|
||||
public static int[] shrink(int[] array, int targetSize) {
|
||||
assert targetSize >= 0: "size must be positive (got " + targetSize + "): likely integer overflow?";
|
||||
final int newSize = getShrinkSize(array.length, targetSize, RamUsageEstimator.NUM_BYTES_INT);
|
||||
final int newSize = getShrinkSize(array.length, targetSize, Integer.BYTES);
|
||||
if (newSize != array.length) {
|
||||
int[] newArray = new int[newSize];
|
||||
System.arraycopy(array, 0, newArray, 0, newSize);
|
||||
|
@ -326,7 +325,7 @@ public final class ArrayUtil {
|
|||
public static long[] grow(long[] array, int minSize) {
|
||||
assert minSize >= 0: "size must be positive (got " + minSize + "): likely integer overflow?";
|
||||
if (array.length < minSize) {
|
||||
long[] newArray = new long[oversize(minSize, RamUsageEstimator.NUM_BYTES_LONG)];
|
||||
long[] newArray = new long[oversize(minSize, Long.BYTES)];
|
||||
System.arraycopy(array, 0, newArray, 0, array.length);
|
||||
return newArray;
|
||||
} else
|
||||
|
@ -339,7 +338,7 @@ public final class ArrayUtil {
|
|||
|
||||
public static long[] shrink(long[] array, int targetSize) {
|
||||
assert targetSize >= 0: "size must be positive (got " + targetSize + "): likely integer overflow?";
|
||||
final int newSize = getShrinkSize(array.length, targetSize, RamUsageEstimator.NUM_BYTES_LONG);
|
||||
final int newSize = getShrinkSize(array.length, targetSize, Long.BYTES);
|
||||
if (newSize != array.length) {
|
||||
long[] newArray = new long[newSize];
|
||||
System.arraycopy(array, 0, newArray, 0, newSize);
|
||||
|
@ -401,7 +400,7 @@ public final class ArrayUtil {
|
|||
public static char[] grow(char[] array, int minSize) {
|
||||
assert minSize >= 0: "size must be positive (got " + minSize + "): likely integer overflow?";
|
||||
if (array.length < minSize) {
|
||||
char[] newArray = new char[oversize(minSize, RamUsageEstimator.NUM_BYTES_CHAR)];
|
||||
char[] newArray = new char[oversize(minSize, Character.BYTES)];
|
||||
System.arraycopy(array, 0, newArray, 0, array.length);
|
||||
return newArray;
|
||||
} else
|
||||
|
@ -414,7 +413,7 @@ public final class ArrayUtil {
|
|||
|
||||
public static char[] shrink(char[] array, int targetSize) {
|
||||
assert targetSize >= 0: "size must be positive (got " + targetSize + "): likely integer overflow?";
|
||||
final int newSize = getShrinkSize(array.length, targetSize, RamUsageEstimator.NUM_BYTES_CHAR);
|
||||
final int newSize = getShrinkSize(array.length, targetSize, Character.BYTES);
|
||||
if (newSize != array.length) {
|
||||
char[] newArray = new char[newSize];
|
||||
System.arraycopy(array, 0, newArray, 0, newSize);
|
||||
|
|
|
@ -51,8 +51,7 @@ public final class BytesRefArray {
|
|||
this.pool = new ByteBlockPool(new ByteBlockPool.DirectTrackingAllocator(
|
||||
bytesUsed));
|
||||
pool.nextBuffer();
|
||||
bytesUsed.addAndGet(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER
|
||||
+ RamUsageEstimator.NUM_BYTES_INT);
|
||||
bytesUsed.addAndGet(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER * Integer.BYTES);
|
||||
this.bytesUsed = bytesUsed;
|
||||
}
|
||||
|
||||
|
@ -75,8 +74,7 @@ public final class BytesRefArray {
|
|||
if (lastElement >= offsets.length) {
|
||||
int oldLen = offsets.length;
|
||||
offsets = ArrayUtil.grow(offsets, offsets.length + 1);
|
||||
bytesUsed.addAndGet((offsets.length - oldLen)
|
||||
* RamUsageEstimator.NUM_BYTES_INT);
|
||||
bytesUsed.addAndGet((offsets.length - oldLen) * Integer.BYTES);
|
||||
}
|
||||
pool.append(bytes);
|
||||
offsets[lastElement++] = currentOffset;
|
||||
|
|
|
@ -90,7 +90,7 @@ public final class BytesRefHash {
|
|||
this.bytesStartArray = bytesStartArray;
|
||||
bytesStart = bytesStartArray.init();
|
||||
bytesUsed = bytesStartArray.bytesUsed() == null? Counter.newCounter() : bytesStartArray.bytesUsed();
|
||||
bytesUsed.addAndGet(hashSize * RamUsageEstimator.NUM_BYTES_INT);
|
||||
bytesUsed.addAndGet(hashSize * Integer.BYTES);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -213,7 +213,7 @@ public final class BytesRefHash {
|
|||
newSize /= 2;
|
||||
}
|
||||
if (newSize != hashSize) {
|
||||
bytesUsed.addAndGet(RamUsageEstimator.NUM_BYTES_INT * -(hashSize - newSize));
|
||||
bytesUsed.addAndGet(Integer.BYTES * -(hashSize - newSize));
|
||||
hashSize = newSize;
|
||||
ids = new int[hashSize];
|
||||
Arrays.fill(ids, -1);
|
||||
|
@ -252,7 +252,7 @@ public final class BytesRefHash {
|
|||
public void close() {
|
||||
clear(true);
|
||||
ids = null;
|
||||
bytesUsed.addAndGet(RamUsageEstimator.NUM_BYTES_INT * -hashSize);
|
||||
bytesUsed.addAndGet(Integer.BYTES * -hashSize);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -408,7 +408,7 @@ public final class BytesRefHash {
|
|||
*/
|
||||
private void rehash(final int newSize, boolean hashOnData) {
|
||||
final int newMask = newSize - 1;
|
||||
bytesUsed.addAndGet(RamUsageEstimator.NUM_BYTES_INT * (newSize));
|
||||
bytesUsed.addAndGet(Integer.BYTES * (newSize));
|
||||
final int[] newHash = new int[newSize];
|
||||
Arrays.fill(newHash, -1);
|
||||
for (int i = 0; i < hashSize; i++) {
|
||||
|
@ -449,7 +449,7 @@ public final class BytesRefHash {
|
|||
}
|
||||
|
||||
hashMask = newMask;
|
||||
bytesUsed.addAndGet(RamUsageEstimator.NUM_BYTES_INT * (-ids.length));
|
||||
bytesUsed.addAndGet(Integer.BYTES * (-ids.length));
|
||||
ids = newHash;
|
||||
hashSize = newSize;
|
||||
hashHalfSize = newSize / 2;
|
||||
|
@ -472,7 +472,7 @@ public final class BytesRefHash {
|
|||
|
||||
if (ids == null) {
|
||||
ids = new int[hashSize];
|
||||
bytesUsed.addAndGet(RamUsageEstimator.NUM_BYTES_INT * hashSize);
|
||||
bytesUsed.addAndGet(Integer.BYTES * hashSize);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -570,8 +570,7 @@ public final class BytesRefHash {
|
|||
|
||||
@Override
|
||||
public int[] init() {
|
||||
return bytesStart = new int[ArrayUtil.oversize(initSize,
|
||||
RamUsageEstimator.NUM_BYTES_INT)];
|
||||
return bytesStart = new int[ArrayUtil.oversize(initSize, Integer.BYTES)];
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.lucene.util;
|
||||
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.search.DocIdSet;
|
||||
|
@ -68,7 +67,7 @@ public final class DocIdSetBuilder {
|
|||
private void growBuffer(int minSize) {
|
||||
assert minSize < threshold;
|
||||
if (buffer.length < minSize) {
|
||||
int nextSize = Math.min(threshold, ArrayUtil.oversize(minSize, RamUsageEstimator.NUM_BYTES_INT));
|
||||
int nextSize = Math.min(threshold, ArrayUtil.oversize(minSize, Integer.BYTES));
|
||||
int[] newBuffer = new int[nextSize];
|
||||
System.arraycopy(buffer, 0, newBuffer, 0, buffer.length);
|
||||
buffer = newBuffer;
|
||||
|
|
|
@ -155,16 +155,15 @@ public final class NumericUtils {
|
|||
return true;
|
||||
}
|
||||
|
||||
public static void intToBytes(int x, byte[] dest, int index) {
|
||||
public static void intToBytes(int x, byte[] dest, int offset) {
|
||||
// Flip the sign bit, so negative ints sort before positive ints correctly:
|
||||
x ^= 0x80000000;
|
||||
intToBytesDirect(x, dest, index);
|
||||
intToBytesDirect(x, dest, offset);
|
||||
}
|
||||
|
||||
public static void intToBytesDirect(int x, byte[] dest, int index) {
|
||||
// Flip the sign bit, so negative ints sort before positive ints correctly:
|
||||
for(int i=0;i<4;i++) {
|
||||
dest[4*index+i] = (byte) (x >> 24-i*8);
|
||||
public static void intToBytesDirect(int x, byte[] dest, int offset) {
|
||||
for (int i = 0; i < 4; i++) {
|
||||
dest[offset+i] = (byte) (x >> 24-i*8);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -174,22 +173,21 @@ public final class NumericUtils {
|
|||
return x ^ 0x80000000;
|
||||
}
|
||||
|
||||
public static int bytesToIntDirect(byte[] src, int index) {
|
||||
public static int bytesToIntDirect(byte[] src, int offset) {
|
||||
int x = 0;
|
||||
for(int i=0;i<4;i++) {
|
||||
x |= (src[4*index+i] & 0xff) << (24-i*8);
|
||||
for (int i = 0; i < 4; i++) {
|
||||
x |= (src[offset+i] & 0xff) << (24-i*8);
|
||||
}
|
||||
return x;
|
||||
}
|
||||
|
||||
public static void longToBytes(long v, byte[] bytes, int dim) {
|
||||
public static void longToBytes(long v, byte[] bytes, int offset) {
|
||||
// Flip the sign bit so negative longs sort before positive longs:
|
||||
v ^= 0x8000000000000000L;
|
||||
longToBytesDirect(v, bytes, dim);
|
||||
longToBytesDirect(v, bytes, offset);
|
||||
}
|
||||
|
||||
public static void longToBytesDirect(long v, byte[] bytes, int dim) {
|
||||
int offset = 8 * dim;
|
||||
public static void longToBytesDirect(long v, byte[] bytes, int offset) {
|
||||
bytes[offset] = (byte) (v >> 56);
|
||||
bytes[offset+1] = (byte) (v >> 48);
|
||||
bytes[offset+2] = (byte) (v >> 40);
|
||||
|
@ -200,15 +198,14 @@ public final class NumericUtils {
|
|||
bytes[offset+7] = (byte) v;
|
||||
}
|
||||
|
||||
public static long bytesToLong(byte[] bytes, int index) {
|
||||
long v = bytesToLongDirect(bytes, index);
|
||||
public static long bytesToLong(byte[] bytes, int offset) {
|
||||
long v = bytesToLongDirect(bytes, offset);
|
||||
// Flip the sign bit back
|
||||
v ^= 0x8000000000000000L;
|
||||
return v;
|
||||
}
|
||||
|
||||
public static long bytesToLongDirect(byte[] bytes, int index) {
|
||||
int offset = 8 * index;
|
||||
public static long bytesToLongDirect(byte[] bytes, int offset) {
|
||||
long v = ((bytes[offset] & 0xffL) << 56) |
|
||||
((bytes[offset+1] & 0xffL) << 48) |
|
||||
((bytes[offset+2] & 0xffL) << 40) |
|
||||
|
|
|
@ -55,15 +55,6 @@ public final class RamUsageEstimator {
|
|||
/** No instantiation. */
|
||||
private RamUsageEstimator() {}
|
||||
|
||||
public final static int NUM_BYTES_BOOLEAN = 1;
|
||||
public final static int NUM_BYTES_BYTE = 1;
|
||||
public final static int NUM_BYTES_CHAR = 2;
|
||||
public final static int NUM_BYTES_SHORT = 2;
|
||||
public final static int NUM_BYTES_INT = 4;
|
||||
public final static int NUM_BYTES_FLOAT = 4;
|
||||
public final static int NUM_BYTES_LONG = 8;
|
||||
public final static int NUM_BYTES_DOUBLE = 8;
|
||||
|
||||
/**
|
||||
* True, iff compressed references (oops) are enabled by this JVM
|
||||
*/
|
||||
|
@ -95,14 +86,14 @@ public final class RamUsageEstimator {
|
|||
*/
|
||||
private static final Map<Class<?>,Integer> primitiveSizes = new IdentityHashMap<>();
|
||||
static {
|
||||
primitiveSizes.put(boolean.class, Integer.valueOf(NUM_BYTES_BOOLEAN));
|
||||
primitiveSizes.put(byte.class, Integer.valueOf(NUM_BYTES_BYTE));
|
||||
primitiveSizes.put(char.class, Integer.valueOf(NUM_BYTES_CHAR));
|
||||
primitiveSizes.put(short.class, Integer.valueOf(NUM_BYTES_SHORT));
|
||||
primitiveSizes.put(int.class, Integer.valueOf(NUM_BYTES_INT));
|
||||
primitiveSizes.put(float.class, Integer.valueOf(NUM_BYTES_FLOAT));
|
||||
primitiveSizes.put(double.class, Integer.valueOf(NUM_BYTES_DOUBLE));
|
||||
primitiveSizes.put(long.class, Integer.valueOf(NUM_BYTES_LONG));
|
||||
primitiveSizes.put(boolean.class, 1);
|
||||
primitiveSizes.put(byte.class, 1);
|
||||
primitiveSizes.put(char.class, Integer.valueOf(Character.BYTES));
|
||||
primitiveSizes.put(short.class, Integer.valueOf(Short.BYTES));
|
||||
primitiveSizes.put(int.class, Integer.valueOf(Integer.BYTES));
|
||||
primitiveSizes.put(float.class, Integer.valueOf(Float.BYTES));
|
||||
primitiveSizes.put(double.class, Integer.valueOf(Double.BYTES));
|
||||
primitiveSizes.put(long.class, Integer.valueOf(Long.BYTES));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -165,7 +156,7 @@ public final class RamUsageEstimator {
|
|||
// "best guess" based on reference size:
|
||||
NUM_BYTES_OBJECT_HEADER = 8 + NUM_BYTES_OBJECT_REF;
|
||||
// array header is NUM_BYTES_OBJECT_HEADER + NUM_BYTES_INT, but aligned (object alignment):
|
||||
NUM_BYTES_ARRAY_HEADER = (int) alignObjectSize(NUM_BYTES_OBJECT_HEADER + NUM_BYTES_INT);
|
||||
NUM_BYTES_ARRAY_HEADER = (int) alignObjectSize(NUM_BYTES_OBJECT_HEADER + Integer.BYTES);
|
||||
} else {
|
||||
JVM_IS_HOTSPOT_64BIT = false;
|
||||
COMPRESSED_REFS_ENABLED = false;
|
||||
|
@ -173,7 +164,7 @@ public final class RamUsageEstimator {
|
|||
NUM_BYTES_OBJECT_REF = 4;
|
||||
NUM_BYTES_OBJECT_HEADER = 8;
|
||||
// For 32 bit JVMs, no extra alignment of array header:
|
||||
NUM_BYTES_ARRAY_HEADER = NUM_BYTES_OBJECT_HEADER + NUM_BYTES_INT;
|
||||
NUM_BYTES_ARRAY_HEADER = NUM_BYTES_OBJECT_HEADER + Integer.BYTES;
|
||||
}
|
||||
|
||||
// get min/max value of cached Long class instances:
|
||||
|
@ -223,32 +214,32 @@ public final class RamUsageEstimator {
|
|||
|
||||
/** Returns the size in bytes of the char[] object. */
|
||||
public static long sizeOf(char[] arr) {
|
||||
return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) NUM_BYTES_CHAR * arr.length);
|
||||
return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) Character.BYTES * arr.length);
|
||||
}
|
||||
|
||||
/** Returns the size in bytes of the short[] object. */
|
||||
public static long sizeOf(short[] arr) {
|
||||
return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) NUM_BYTES_SHORT * arr.length);
|
||||
return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) Short.BYTES * arr.length);
|
||||
}
|
||||
|
||||
/** Returns the size in bytes of the int[] object. */
|
||||
public static long sizeOf(int[] arr) {
|
||||
return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) NUM_BYTES_INT * arr.length);
|
||||
return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) Integer.BYTES * arr.length);
|
||||
}
|
||||
|
||||
/** Returns the size in bytes of the float[] object. */
|
||||
public static long sizeOf(float[] arr) {
|
||||
return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) NUM_BYTES_FLOAT * arr.length);
|
||||
return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) Float.BYTES * arr.length);
|
||||
}
|
||||
|
||||
/** Returns the size in bytes of the long[] object. */
|
||||
public static long sizeOf(long[] arr) {
|
||||
return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) NUM_BYTES_LONG * arr.length);
|
||||
return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) Long.BYTES * arr.length);
|
||||
}
|
||||
|
||||
/** Returns the size in bytes of the double[] object. */
|
||||
public static long sizeOf(double[] arr) {
|
||||
return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) NUM_BYTES_DOUBLE * arr.length);
|
||||
return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) Double.BYTES * arr.length);
|
||||
}
|
||||
|
||||
/** Returns the shallow size in bytes of the Object[] object. */
|
||||
|
|
|
@ -78,7 +78,7 @@ public final class RecyclingIntBlockAllocator extends Allocator {
|
|||
@Override
|
||||
public int[] getIntBlock() {
|
||||
if (freeBlocks == 0) {
|
||||
bytesUsed.addAndGet(blockSize*RamUsageEstimator.NUM_BYTES_INT);
|
||||
bytesUsed.addAndGet(blockSize*Integer.BYTES);
|
||||
return new int[blockSize];
|
||||
}
|
||||
final int[] b = freeByteBlocks[--freeBlocks];
|
||||
|
@ -104,7 +104,7 @@ public final class RecyclingIntBlockAllocator extends Allocator {
|
|||
for (int i = stop; i < end; i++) {
|
||||
blocks[i] = null;
|
||||
}
|
||||
bytesUsed.addAndGet(-(end - stop) * (blockSize * RamUsageEstimator.NUM_BYTES_INT));
|
||||
bytesUsed.addAndGet(-(end - stop) * (blockSize * Integer.BYTES));
|
||||
assert bytesUsed.get() >= 0;
|
||||
}
|
||||
|
||||
|
@ -150,7 +150,7 @@ public final class RecyclingIntBlockAllocator extends Allocator {
|
|||
while (freeBlocks > stop) {
|
||||
freeByteBlocks[--freeBlocks] = null;
|
||||
}
|
||||
bytesUsed.addAndGet(-count*blockSize* RamUsageEstimator.NUM_BYTES_INT);
|
||||
bytesUsed.addAndGet(-count*blockSize*Integer.BYTES);
|
||||
assert bytesUsed.get() >= 0;
|
||||
return count;
|
||||
}
|
||||
|
|
|
@ -149,7 +149,7 @@ public class SentinelIntSet {
|
|||
/** Return the memory footprint of this class in bytes. */
|
||||
public long ramBytesUsed() {
|
||||
return RamUsageEstimator.alignObjectSize(
|
||||
RamUsageEstimator.NUM_BYTES_INT * 3
|
||||
Integer.BYTES * 3
|
||||
+ RamUsageEstimator.NUM_BYTES_OBJECT_REF)
|
||||
+ RamUsageEstimator.sizeOf(keys);
|
||||
}
|
||||
|
|
|
@ -893,7 +893,7 @@ public class Automaton implements Accountable {
|
|||
return RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + RamUsageEstimator.sizeOf(states) + RamUsageEstimator.sizeOf(transitions) +
|
||||
RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + (isAccept.size() / 8) + RamUsageEstimator.NUM_BYTES_OBJECT_REF +
|
||||
2 * RamUsageEstimator.NUM_BYTES_OBJECT_REF +
|
||||
3 * RamUsageEstimator.NUM_BYTES_INT +
|
||||
RamUsageEstimator.NUM_BYTES_BOOLEAN;
|
||||
3 * Integer.BYTES +
|
||||
1;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,11 +16,9 @@
|
|||
*/
|
||||
package org.apache.lucene.util.automaton;
|
||||
|
||||
|
||||
import java.util.TreeMap;
|
||||
import java.util.Map;
|
||||
import org.apache.lucene.util.ArrayUtil;
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
|
||||
// Just holds a set of int[] states, plus a corresponding
|
||||
// int[] count per state. Used by
|
||||
|
@ -135,7 +133,7 @@ final class SortedIntSet {
|
|||
public void computeHash() {
|
||||
if (useTreeMap) {
|
||||
if (map.size() > values.length) {
|
||||
final int size = ArrayUtil.oversize(map.size(), RamUsageEstimator.NUM_BYTES_INT);
|
||||
final int size = ArrayUtil.oversize(map.size(), Integer.BYTES);
|
||||
values = new int[size];
|
||||
counts = new int[size];
|
||||
}
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.lucene.util.bkd;
|
||||
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
||||
|
@ -27,7 +26,6 @@ import org.apache.lucene.store.IndexInput;
|
|||
import org.apache.lucene.util.Accountable;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
import org.apache.lucene.util.StringHelper;
|
||||
|
||||
/** Handles intersection of an multi-dimensional shape in byte[] space with a block KD-tree previously written with {@link BKDWriter}.
|
||||
|
@ -415,7 +413,7 @@ public class BKDReader implements Accountable {
|
|||
@Override
|
||||
public long ramBytesUsed() {
|
||||
return splitPackedValues.length +
|
||||
leafBlockFPs.length * RamUsageEstimator.NUM_BYTES_LONG;
|
||||
leafBlockFPs.length * Long.BYTES;
|
||||
}
|
||||
|
||||
public byte[] getMinPackedValue() {
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.lucene.util.bkd;
|
||||
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.EOFException;
|
||||
import java.io.IOException;
|
||||
|
@ -40,9 +39,7 @@ import org.apache.lucene.util.IntroSorter;
|
|||
import org.apache.lucene.util.LongBitSet;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.apache.lucene.util.OfflineSorter;
|
||||
import org.apache.lucene.util.OfflineSorter.ByteSequencesWriter;
|
||||
import org.apache.lucene.util.PriorityQueue;
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
import org.apache.lucene.util.StringHelper;
|
||||
|
||||
// TODO
|
||||
|
@ -152,7 +149,7 @@ public class BKDWriter implements Closeable {
|
|||
maxPackedValue = new byte[packedBytesLength];
|
||||
|
||||
// dimensional values (numDims * bytesPerDim) + ord (long) + docID (int)
|
||||
bytesPerDoc = packedBytesLength + RamUsageEstimator.NUM_BYTES_LONG + RamUsageEstimator.NUM_BYTES_INT;
|
||||
bytesPerDoc = packedBytesLength + Long.BYTES + Integer.BYTES;
|
||||
|
||||
// As we recurse, we compute temporary partitions of the data, halving the
|
||||
// number of points at each recursion. Once there are few enough points,
|
||||
|
|
|
@ -16,12 +16,10 @@
|
|||
*/
|
||||
package org.apache.lucene.util.bkd;
|
||||
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.lucene.util.ArrayUtil;
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
|
||||
final class HeapPointWriter implements PointWriter {
|
||||
int[] docIDs;
|
||||
|
@ -94,7 +92,7 @@ final class HeapPointWriter implements PointWriter {
|
|||
assert closed == false;
|
||||
assert packedValue.length == packedBytesLength;
|
||||
if (ords.length == nextWrite) {
|
||||
int nextSize = Math.min(maxSize, ArrayUtil.oversize(nextWrite+1, RamUsageEstimator.NUM_BYTES_INT));
|
||||
int nextSize = Math.min(maxSize, ArrayUtil.oversize(nextWrite+1, Integer.BYTES));
|
||||
assert nextSize > nextWrite: "nextSize=" + nextSize + " vs nextWrite=" + nextWrite;
|
||||
ords = growExact(ords, nextSize);
|
||||
docIDs = growExact(docIDs, nextSize);
|
||||
|
|
|
@ -16,15 +16,12 @@
|
|||
*/
|
||||
package org.apache.lucene.util.bkd;
|
||||
|
||||
|
||||
import java.io.EOFException;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.store.IndexInput;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
|
||||
/** Reads points from disk in a fixed-with format, previously written with {@link OfflinePointWriter}. */
|
||||
final class OfflinePointReader implements PointReader {
|
||||
|
@ -41,7 +38,7 @@ final class OfflinePointReader implements PointReader {
|
|||
|
||||
private OfflinePointReader(IndexInput in, int packedBytesLength, long start, long length) throws IOException {
|
||||
this.in = in;
|
||||
bytesPerDoc = packedBytesLength + RamUsageEstimator.NUM_BYTES_LONG + RamUsageEstimator.NUM_BYTES_INT;
|
||||
bytesPerDoc = packedBytesLength + Long.BYTES + Integer.BYTES;
|
||||
long seekFP = start * bytesPerDoc;
|
||||
in.seek(seekFP);
|
||||
this.countLeft = length;
|
||||
|
|
|
@ -16,13 +16,11 @@
|
|||
*/
|
||||
package org.apache.lucene.util.bkd;
|
||||
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.store.IndexOutput;
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
|
||||
/** Writes points to disk in a fixed-with format. */
|
||||
final class OfflinePointWriter implements PointWriter {
|
||||
|
@ -38,7 +36,7 @@ final class OfflinePointWriter implements PointWriter {
|
|||
this.out = tempDir.createTempOutput(tempFileNamePrefix, "bkd", IOContext.DEFAULT);
|
||||
this.tempDir = tempDir;
|
||||
this.packedBytesLength = packedBytesLength;
|
||||
bytesPerDoc = packedBytesLength + RamUsageEstimator.NUM_BYTES_LONG + RamUsageEstimator.NUM_BYTES_INT;
|
||||
bytesPerDoc = packedBytesLength + Long.BYTES + Integer.BYTES;
|
||||
}
|
||||
|
||||
/** Initializes on an already written/closed file, just so consumers can use {@link #getReader} to read the file. */
|
||||
|
@ -46,7 +44,7 @@ final class OfflinePointWriter implements PointWriter {
|
|||
this.out = out;
|
||||
this.tempDir = tempDir;
|
||||
this.packedBytesLength = packedBytesLength;
|
||||
bytesPerDoc = packedBytesLength + RamUsageEstimator.NUM_BYTES_LONG + RamUsageEstimator.NUM_BYTES_INT;
|
||||
bytesPerDoc = packedBytesLength + Long.BYTES + Integer.BYTES;
|
||||
this.count = count;
|
||||
closed = true;
|
||||
}
|
||||
|
|
|
@ -101,8 +101,8 @@ abstract class AbstractPagedMutable<T extends AbstractPagedMutable<T>> extends L
|
|||
protected long baseRamBytesUsed() {
|
||||
return RamUsageEstimator.NUM_BYTES_OBJECT_HEADER
|
||||
+ RamUsageEstimator.NUM_BYTES_OBJECT_REF
|
||||
+ RamUsageEstimator.NUM_BYTES_LONG
|
||||
+ 3 * RamUsageEstimator.NUM_BYTES_INT;
|
||||
+ Long.BYTES
|
||||
+ 3 * Integer.BYTES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -62,7 +62,7 @@ final class Direct16 extends PackedInts.MutableImpl {
|
|||
public long ramBytesUsed() {
|
||||
return RamUsageEstimator.alignObjectSize(
|
||||
RamUsageEstimator.NUM_BYTES_OBJECT_HEADER
|
||||
+ 2 * RamUsageEstimator.NUM_BYTES_INT // valueCount,bitsPerValue
|
||||
+ 2 * Integer.BYTES // valueCount,bitsPerValue
|
||||
+ RamUsageEstimator.NUM_BYTES_OBJECT_REF) // values ref
|
||||
+ RamUsageEstimator.sizeOf(values);
|
||||
}
|
||||
|
|
|
@ -62,7 +62,7 @@ final class Direct32 extends PackedInts.MutableImpl {
|
|||
public long ramBytesUsed() {
|
||||
return RamUsageEstimator.alignObjectSize(
|
||||
RamUsageEstimator.NUM_BYTES_OBJECT_HEADER
|
||||
+ 2 * RamUsageEstimator.NUM_BYTES_INT // valueCount,bitsPerValue
|
||||
+ 2 * Integer.BYTES // valueCount,bitsPerValue
|
||||
+ RamUsageEstimator.NUM_BYTES_OBJECT_REF) // values ref
|
||||
+ RamUsageEstimator.sizeOf(values);
|
||||
}
|
||||
|
|
|
@ -57,7 +57,7 @@ final class Direct64 extends PackedInts.MutableImpl {
|
|||
public long ramBytesUsed() {
|
||||
return RamUsageEstimator.alignObjectSize(
|
||||
RamUsageEstimator.NUM_BYTES_OBJECT_HEADER
|
||||
+ 2 * RamUsageEstimator.NUM_BYTES_INT // valueCount,bitsPerValue
|
||||
+ 2 * Integer.BYTES // valueCount,bitsPerValue
|
||||
+ RamUsageEstimator.NUM_BYTES_OBJECT_REF) // values ref
|
||||
+ RamUsageEstimator.sizeOf(values);
|
||||
}
|
||||
|
|
|
@ -60,7 +60,7 @@ final class Direct8 extends PackedInts.MutableImpl {
|
|||
public long ramBytesUsed() {
|
||||
return RamUsageEstimator.alignObjectSize(
|
||||
RamUsageEstimator.NUM_BYTES_OBJECT_HEADER
|
||||
+ 2 * RamUsageEstimator.NUM_BYTES_INT // valueCount,bitsPerValue
|
||||
+ 2 * Integer.BYTES // valueCount,bitsPerValue
|
||||
+ RamUsageEstimator.NUM_BYTES_OBJECT_REF) // values ref
|
||||
+ RamUsageEstimator.sizeOf(values);
|
||||
}
|
||||
|
|
|
@ -131,8 +131,8 @@ public class GrowableWriter extends PackedInts.Mutable {
|
|||
return RamUsageEstimator.alignObjectSize(
|
||||
RamUsageEstimator.NUM_BYTES_OBJECT_HEADER
|
||||
+ RamUsageEstimator.NUM_BYTES_OBJECT_REF
|
||||
+ RamUsageEstimator.NUM_BYTES_LONG
|
||||
+ RamUsageEstimator.NUM_BYTES_FLOAT)
|
||||
+ Long.BYTES
|
||||
+ Float.BYTES)
|
||||
+ current.ramBytesUsed();
|
||||
}
|
||||
|
||||
|
|
|
@ -112,7 +112,7 @@ final class Packed16ThreeBlocks extends PackedInts.MutableImpl {
|
|||
public long ramBytesUsed() {
|
||||
return RamUsageEstimator.alignObjectSize(
|
||||
RamUsageEstimator.NUM_BYTES_OBJECT_HEADER
|
||||
+ 2 * RamUsageEstimator.NUM_BYTES_INT // valueCount,bitsPerValue
|
||||
+ 2 * Integer.BYTES // valueCount,bitsPerValue
|
||||
+ RamUsageEstimator.NUM_BYTES_OBJECT_REF) // blocks ref
|
||||
+ RamUsageEstimator.sizeOf(blocks);
|
||||
}
|
||||
|
|
|
@ -246,8 +246,8 @@ class Packed64 extends PackedInts.MutableImpl {
|
|||
public long ramBytesUsed() {
|
||||
return RamUsageEstimator.alignObjectSize(
|
||||
RamUsageEstimator.NUM_BYTES_OBJECT_HEADER
|
||||
+ 3 * RamUsageEstimator.NUM_BYTES_INT // bpvMinusBlockSize,valueCount,bitsPerValue
|
||||
+ RamUsageEstimator.NUM_BYTES_LONG // maskRight
|
||||
+ 3 * Integer.BYTES // bpvMinusBlockSize,valueCount,bitsPerValue
|
||||
+ Long.BYTES // maskRight
|
||||
+ RamUsageEstimator.NUM_BYTES_OBJECT_REF) // blocks ref
|
||||
+ RamUsageEstimator.sizeOf(blocks);
|
||||
}
|
||||
|
|
|
@ -61,7 +61,7 @@ abstract class Packed64SingleBlock extends PackedInts.MutableImpl {
|
|||
public long ramBytesUsed() {
|
||||
return RamUsageEstimator.alignObjectSize(
|
||||
RamUsageEstimator.NUM_BYTES_OBJECT_HEADER
|
||||
+ 2 * RamUsageEstimator.NUM_BYTES_INT // valueCount,bitsPerValue
|
||||
+ 2 * Integer.BYTES // valueCount,bitsPerValue
|
||||
+ RamUsageEstimator.NUM_BYTES_OBJECT_REF) // blocks ref
|
||||
+ RamUsageEstimator.sizeOf(blocks);
|
||||
}
|
||||
|
|
|
@ -110,7 +110,7 @@ final class Packed8ThreeBlocks extends PackedInts.MutableImpl {
|
|||
public long ramBytesUsed() {
|
||||
return RamUsageEstimator.alignObjectSize(
|
||||
RamUsageEstimator.NUM_BYTES_OBJECT_HEADER
|
||||
+ 2 * RamUsageEstimator.NUM_BYTES_INT // valueCount,bitsPerValue
|
||||
+ 2 * Integer.BYTES // valueCount,bitsPerValue
|
||||
+ RamUsageEstimator.NUM_BYTES_OBJECT_REF) // blocks ref
|
||||
+ RamUsageEstimator.sizeOf(blocks);
|
||||
}
|
||||
|
|
|
@ -681,7 +681,7 @@ public class PackedInts {
|
|||
|
||||
@Override
|
||||
public long ramBytesUsed() {
|
||||
return RamUsageEstimator.alignObjectSize(RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + RamUsageEstimator.NUM_BYTES_INT);
|
||||
return RamUsageEstimator.alignObjectSize(RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + Integer.BYTES);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -16,8 +16,6 @@
|
|||
*/
|
||||
package org.apache.lucene.util.packed;
|
||||
|
||||
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
import org.apache.lucene.util.packed.PackedInts.Mutable;
|
||||
|
||||
/**
|
||||
|
@ -65,7 +63,7 @@ public final class PagedGrowableWriter extends AbstractPagedMutable<PagedGrowabl
|
|||
|
||||
@Override
|
||||
protected long baseRamBytesUsed() {
|
||||
return super.baseRamBytesUsed() + RamUsageEstimator.NUM_BYTES_FLOAT;
|
||||
return super.baseRamBytesUsed() + Float.BYTES;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -74,6 +74,9 @@ public class TestMergeSchedulerExternal extends LuceneTestCase {
|
|||
@Override
|
||||
protected void handleMergeException(Directory dir, Throwable t) {
|
||||
excCalled = true;
|
||||
if (infoStream.isEnabled("IW")) {
|
||||
infoStream.message("IW", "TEST: now handleMergeException");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -1352,7 +1352,7 @@ public class TestDemoParallelLeafReader extends LuceneTestCase {
|
|||
max = x;
|
||||
}
|
||||
|
||||
TopDocs hits = s.search(PointRangeQuery.new1DLongRange("number", min, true, max, true), 100);
|
||||
TopDocs hits = s.search(PointRangeQuery.newLongRange("number", min, true, max, true), 100);
|
||||
for(ScoreDoc scoreDoc : hits.scoreDocs) {
|
||||
long value = Long.parseLong(s.doc(scoreDoc.doc).get("text").split(" ")[1]);
|
||||
assertTrue(value >= min);
|
||||
|
|
|
@ -22,7 +22,6 @@ import java.util.List;
|
|||
import org.apache.lucene.util.Counter;
|
||||
import org.apache.lucene.util.IntBlockPool;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
|
||||
/**
|
||||
* tests basic {@link IntBlockPool} functionality
|
||||
|
@ -53,8 +52,7 @@ public class TestIntBlockPool extends LuceneTestCase {
|
|||
assertEquals(0, bytesUsed.get());
|
||||
} else {
|
||||
pool.reset(true, true);
|
||||
assertEquals(IntBlockPool.INT_BLOCK_SIZE
|
||||
* RamUsageEstimator.NUM_BYTES_INT, bytesUsed.get());
|
||||
assertEquals(IntBlockPool.INT_BLOCK_SIZE * Integer.BYTES, bytesUsed.get());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -98,8 +96,7 @@ public class TestIntBlockPool extends LuceneTestCase {
|
|||
assertEquals(0, bytesUsed.get());
|
||||
} else {
|
||||
pool.reset(true, true);
|
||||
assertEquals(IntBlockPool.INT_BLOCK_SIZE
|
||||
* RamUsageEstimator.NUM_BYTES_INT, bytesUsed.get());
|
||||
assertEquals(IntBlockPool.INT_BLOCK_SIZE * Integer.BYTES, bytesUsed.get());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -118,14 +115,14 @@ public class TestIntBlockPool extends LuceneTestCase {
|
|||
|
||||
@Override
|
||||
public int[] getIntBlock() {
|
||||
bytesUsed.addAndGet(blockSize * RamUsageEstimator.NUM_BYTES_INT);
|
||||
bytesUsed.addAndGet(blockSize * Integer.BYTES);
|
||||
return new int[blockSize];
|
||||
}
|
||||
|
||||
@Override
|
||||
public void recycleIntBlocks(int[][] blocks, int start, int end) {
|
||||
bytesUsed
|
||||
.addAndGet(-((end - start) * blockSize * RamUsageEstimator.NUM_BYTES_INT));
|
||||
.addAndGet(-((end - start) * blockSize * Integer.BYTES));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -56,7 +56,6 @@ import org.apache.lucene.index.SegmentReadState;
|
|||
import org.apache.lucene.index.SegmentWriteState;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
@ -305,7 +304,7 @@ public class TestPointQueries extends LuceneTestCase {
|
|||
}
|
||||
|
||||
if (random().nextBoolean()) {
|
||||
query = PointRangeQuery.new1DLongRange("sn_value", lower, includeLower, upper, includeUpper);
|
||||
query = PointRangeQuery.newLongRange("sn_value", lower, includeLower, upper, includeUpper);
|
||||
} else {
|
||||
byte[] lowerBytes;
|
||||
if (lower == null) {
|
||||
|
@ -321,7 +320,7 @@ public class TestPointQueries extends LuceneTestCase {
|
|||
upperBytes = new byte[8];
|
||||
NumericUtils.longToBytes(upper, upperBytes, 0);
|
||||
}
|
||||
query = PointRangeQuery.new1DBinaryRange("ss_value", lowerBytes, includeLower, upperBytes, includeUpper);
|
||||
query = PointRangeQuery.newBinaryRange("ss_value", lowerBytes, includeLower, upperBytes, includeUpper);
|
||||
}
|
||||
|
||||
if (VERBOSE) {
|
||||
|
@ -737,9 +736,9 @@ public class TestPointQueries extends LuceneTestCase {
|
|||
|
||||
IndexSearcher s = newSearcher(r);
|
||||
|
||||
assertEquals(1, s.count(PointRangeQuery.new1DLongRange("value", Long.MIN_VALUE, true, 0L, true)));
|
||||
assertEquals(1, s.count(PointRangeQuery.new1DLongRange("value", 0L, true, Long.MAX_VALUE, true)));
|
||||
assertEquals(2, s.count(PointRangeQuery.new1DLongRange("value", Long.MIN_VALUE, true, Long.MAX_VALUE, true)));
|
||||
assertEquals(1, s.count(PointRangeQuery.newLongRange("value", Long.MIN_VALUE, true, 0L, true)));
|
||||
assertEquals(1, s.count(PointRangeQuery.newLongRange("value", 0L, true, Long.MAX_VALUE, true)));
|
||||
assertEquals(2, s.count(PointRangeQuery.newLongRange("value", Long.MIN_VALUE, true, Long.MAX_VALUE, true)));
|
||||
|
||||
IOUtils.close(r, w, dir);
|
||||
}
|
||||
|
@ -775,47 +774,47 @@ public class TestPointQueries extends LuceneTestCase {
|
|||
|
||||
IndexSearcher s = newSearcher(r);
|
||||
|
||||
assertEquals(1, s.count(PointRangeQuery.new1DBinaryRange("value",
|
||||
assertEquals(1, s.count(PointRangeQuery.newBinaryRange("value",
|
||||
toUTF8("aaa"),
|
||||
true,
|
||||
toUTF8("bbb"),
|
||||
true)));
|
||||
assertEquals(1, s.count(PointRangeQuery.new1DBinaryRange("value",
|
||||
assertEquals(1, s.count(PointRangeQuery.newBinaryRange("value",
|
||||
toUTF8("c", 3),
|
||||
true,
|
||||
toUTF8("e", 3),
|
||||
true)));
|
||||
assertEquals(2, s.count(PointRangeQuery.new1DBinaryRange("value",
|
||||
assertEquals(2, s.count(PointRangeQuery.newBinaryRange("value",
|
||||
toUTF8("a", 3),
|
||||
true,
|
||||
toUTF8("z", 3),
|
||||
true)));
|
||||
assertEquals(1, s.count(PointRangeQuery.new1DBinaryRange("value",
|
||||
assertEquals(1, s.count(PointRangeQuery.newBinaryRange("value",
|
||||
null,
|
||||
true,
|
||||
toUTF8("abc"),
|
||||
true)));
|
||||
assertEquals(1, s.count(PointRangeQuery.new1DBinaryRange("value",
|
||||
assertEquals(1, s.count(PointRangeQuery.newBinaryRange("value",
|
||||
toUTF8("a", 3),
|
||||
true,
|
||||
toUTF8("abc"),
|
||||
true)));
|
||||
assertEquals(0, s.count(PointRangeQuery.new1DBinaryRange("value",
|
||||
assertEquals(0, s.count(PointRangeQuery.newBinaryRange("value",
|
||||
toUTF8("a", 3),
|
||||
true,
|
||||
toUTF8("abc"),
|
||||
false)));
|
||||
assertEquals(1, s.count(PointRangeQuery.new1DBinaryRange("value",
|
||||
assertEquals(1, s.count(PointRangeQuery.newBinaryRange("value",
|
||||
toUTF8("def"),
|
||||
true,
|
||||
null,
|
||||
false)));
|
||||
assertEquals(1, s.count(PointRangeQuery.new1DBinaryRange("value",
|
||||
assertEquals(1, s.count(PointRangeQuery.newBinaryRange("value",
|
||||
toUTF8(("def")),
|
||||
true,
|
||||
toUTF8("z", 3),
|
||||
true)));
|
||||
assertEquals(0, s.count(PointRangeQuery.new1DBinaryRange("value",
|
||||
assertEquals(0, s.count(PointRangeQuery.newBinaryRange("value",
|
||||
toUTF8("def"),
|
||||
false,
|
||||
toUTF8("z", 3),
|
||||
|
@ -840,12 +839,12 @@ public class TestPointQueries extends LuceneTestCase {
|
|||
|
||||
IndexSearcher s = newSearcher(r);
|
||||
|
||||
assertEquals(2, s.count(PointRangeQuery.new1DLongRange("value", Long.MIN_VALUE, true, Long.MAX_VALUE, true)));
|
||||
assertEquals(1, s.count(PointRangeQuery.new1DLongRange("value", Long.MIN_VALUE, true, Long.MAX_VALUE, false)));
|
||||
assertEquals(1, s.count(PointRangeQuery.new1DLongRange("value", Long.MIN_VALUE, false, Long.MAX_VALUE, true)));
|
||||
assertEquals(0, s.count(PointRangeQuery.new1DLongRange("value", Long.MIN_VALUE, false, Long.MAX_VALUE, false)));
|
||||
assertEquals(2, s.count(PointRangeQuery.newLongRange("value", Long.MIN_VALUE, true, Long.MAX_VALUE, true)));
|
||||
assertEquals(1, s.count(PointRangeQuery.newLongRange("value", Long.MIN_VALUE, true, Long.MAX_VALUE, false)));
|
||||
assertEquals(1, s.count(PointRangeQuery.newLongRange("value", Long.MIN_VALUE, false, Long.MAX_VALUE, true)));
|
||||
assertEquals(0, s.count(PointRangeQuery.newLongRange("value", Long.MIN_VALUE, false, Long.MAX_VALUE, false)));
|
||||
|
||||
assertEquals(2, s.count(PointRangeQuery.new1DBinaryRange("value", (byte[]) null, true, null, true)));
|
||||
assertEquals(2, s.count(PointRangeQuery.newBinaryRange("value", (byte[]) null, true, null, true)));
|
||||
|
||||
IOUtils.close(r, w, dir);
|
||||
}
|
||||
|
@ -867,12 +866,12 @@ public class TestPointQueries extends LuceneTestCase {
|
|||
// We can't wrap with "exotic" readers because the query must see the RangeTreeDVFormat:
|
||||
IndexSearcher s = newSearcher(r, false);
|
||||
|
||||
assertEquals(2, s.count(PointRangeQuery.new1DLongRange("value", Long.MIN_VALUE, true, Long.MAX_VALUE, true)));
|
||||
assertEquals(1, s.count(PointRangeQuery.new1DLongRange("value", Long.MIN_VALUE, true, Long.MAX_VALUE, false)));
|
||||
assertEquals(1, s.count(PointRangeQuery.new1DLongRange("value", Long.MIN_VALUE, false, Long.MAX_VALUE, true)));
|
||||
assertEquals(0, s.count(PointRangeQuery.new1DLongRange("value", Long.MIN_VALUE, false, Long.MAX_VALUE, false)));
|
||||
assertEquals(2, s.count(PointRangeQuery.newLongRange("value", Long.MIN_VALUE, true, Long.MAX_VALUE, true)));
|
||||
assertEquals(1, s.count(PointRangeQuery.newLongRange("value", Long.MIN_VALUE, true, Long.MAX_VALUE, false)));
|
||||
assertEquals(1, s.count(PointRangeQuery.newLongRange("value", Long.MIN_VALUE, false, Long.MAX_VALUE, true)));
|
||||
assertEquals(0, s.count(PointRangeQuery.newLongRange("value", Long.MIN_VALUE, false, Long.MAX_VALUE, false)));
|
||||
|
||||
assertEquals(2, s.count(PointRangeQuery.new1DLongRange("value", (Long) null, true, null, true)));
|
||||
assertEquals(2, s.count(PointRangeQuery.newLongRange("value", (Long) null, true, null, true)));
|
||||
|
||||
IOUtils.close(r, w, dir);
|
||||
}
|
||||
|
@ -892,9 +891,9 @@ public class TestPointQueries extends LuceneTestCase {
|
|||
IndexReader r = w.getReader();
|
||||
|
||||
IndexSearcher s = newSearcher(r);
|
||||
assertEquals(0, s.count(PointRangeQuery.new1DBinaryRange("value", toUTF8("m"), true, toUTF8("n"), false)));
|
||||
assertEquals(0, s.count(PointRangeQuery.newBinaryRange("value", toUTF8("m"), true, toUTF8("n"), false)));
|
||||
|
||||
assertEquals(2, s.count(PointRangeQuery.new1DBinaryRange("value", (byte[]) null, true, null, true)));
|
||||
assertEquals(2, s.count(PointRangeQuery.newBinaryRange("value", (byte[]) null, true, null, true)));
|
||||
|
||||
IOUtils.close(r, w, dir);
|
||||
}
|
||||
|
@ -914,7 +913,7 @@ public class TestPointQueries extends LuceneTestCase {
|
|||
IndexReader r = w.getReader();
|
||||
|
||||
IndexSearcher s = new IndexSearcher(r);
|
||||
assertEquals(0, s.count(PointRangeQuery.new1DLongRange("value", 17L, true, 13L, false)));
|
||||
assertEquals(0, s.count(PointRangeQuery.newLongRange("value", 17L, true, 13L, false)));
|
||||
|
||||
IOUtils.close(r, w, dir);
|
||||
}
|
||||
|
@ -929,7 +928,7 @@ public class TestPointQueries extends LuceneTestCase {
|
|||
IndexReader r = w.getReader();
|
||||
|
||||
IndexSearcher s = newSearcher(r);
|
||||
assertEquals(0, s.count(PointRangeQuery.new1DLongRange("value", 17L, true, 13L, false)));
|
||||
assertEquals(0, s.count(PointRangeQuery.newLongRange("value", 17L, true, 13L, false)));
|
||||
|
||||
IOUtils.close(r, w, dir);
|
||||
}
|
||||
|
@ -1045,7 +1044,7 @@ public class TestPointQueries extends LuceneTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testExactPointQuery() throws Exception {
|
||||
public void testExactPoints() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriterConfig iwc = newIndexWriterConfig();
|
||||
iwc.setCodec(getCodec());
|
||||
|
@ -1069,19 +1068,50 @@ public class TestPointQueries extends LuceneTestCase {
|
|||
|
||||
IndexReader r = DirectoryReader.open(w);
|
||||
IndexSearcher s = newSearcher(r);
|
||||
assertEquals(1, s.count(ExactPointQuery.new1DIntExact("int", 42)));
|
||||
assertEquals(0, s.count(ExactPointQuery.new1DIntExact("int", 41)));
|
||||
assertEquals(1, s.count(PointRangeQuery.newIntExact("int", 42)));
|
||||
assertEquals(0, s.count(PointRangeQuery.newIntExact("int", 41)));
|
||||
|
||||
assertEquals(1, s.count(ExactPointQuery.new1DLongExact("long", 5L)));
|
||||
assertEquals(0, s.count(ExactPointQuery.new1DLongExact("long", -1L)));
|
||||
assertEquals(1, s.count(PointRangeQuery.newLongExact("long", 5L)));
|
||||
assertEquals(0, s.count(PointRangeQuery.newLongExact("long", -1L)));
|
||||
|
||||
assertEquals(1, s.count(ExactPointQuery.new1DFloatExact("float", 2.0f)));
|
||||
assertEquals(0, s.count(ExactPointQuery.new1DFloatExact("float", 1.0f)));
|
||||
assertEquals(1, s.count(PointRangeQuery.newFloatExact("float", 2.0f)));
|
||||
assertEquals(0, s.count(PointRangeQuery.newFloatExact("float", 1.0f)));
|
||||
|
||||
assertEquals(1, s.count(ExactPointQuery.new1DDoubleExact("double", 1.0)));
|
||||
assertEquals(0, s.count(ExactPointQuery.new1DDoubleExact("double", 2.0)));
|
||||
assertEquals(1, s.count(PointRangeQuery.newDoubleExact("double", 1.0)));
|
||||
assertEquals(0, s.count(PointRangeQuery.newDoubleExact("double", 2.0)));
|
||||
w.close();
|
||||
r.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
public void testToString() throws Exception {
|
||||
|
||||
// ints
|
||||
assertEquals("field:[1 TO 2}", PointRangeQuery.newIntRange("field", 1, true, 2, false).toString());
|
||||
assertEquals("field:{-2 TO 1]", PointRangeQuery.newIntRange("field", -2, false, 1, true).toString());
|
||||
assertEquals("field:[* TO 2}", PointRangeQuery.newIntRange("field", null, true, 2, false).toString());
|
||||
|
||||
// longs
|
||||
assertEquals("field:[1099511627776 TO 2199023255552}", PointRangeQuery.newLongRange("field", 1L<<40, true, 1L<<41, false).toString());
|
||||
assertEquals("field:{-5 TO 6]", PointRangeQuery.newLongRange("field", -5L, false, 6L, true).toString());
|
||||
assertEquals("field:[* TO 2}", PointRangeQuery.newLongRange("field", null, true, 2L, false).toString());
|
||||
|
||||
// floats
|
||||
assertEquals("field:[1.3 TO 2.5}", PointRangeQuery.newFloatRange("field", 1.3F, true, 2.5F, false).toString());
|
||||
assertEquals("field:{-2.9 TO 1.0]", PointRangeQuery.newFloatRange("field", -2.9F, false, 1.0F, true).toString());
|
||||
assertEquals("field:{-2.9 TO *]", PointRangeQuery.newFloatRange("field", -2.9F, false, null, true).toString());
|
||||
|
||||
// doubles
|
||||
assertEquals("field:[1.3 TO 2.5}", PointRangeQuery.newDoubleRange("field", 1.3, true, 2.5, false).toString());
|
||||
assertEquals("field:{-2.9 TO 1.0]", PointRangeQuery.newDoubleRange("field", -2.9, false, 1.0, true).toString());
|
||||
assertEquals("field:{-2.9 TO *]", PointRangeQuery.newDoubleRange("field", -2.9, false, null, true).toString());
|
||||
|
||||
// n-dimensional double
|
||||
assertEquals("field:[1.3 TO 2.5},{-2.9 TO 1.0]", PointRangeQuery.newMultiDoubleRange("field",
|
||||
new Double[] { 1.3, -2.9 },
|
||||
new boolean[] { true, false },
|
||||
new Double[] { 2.5, 1.0 },
|
||||
new boolean[] { false, true }).toString());
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -26,7 +26,7 @@ public class TestUsageTrackingFilterCachingPolicy extends LuceneTestCase {
|
|||
|
||||
public void testCostlyFilter() {
|
||||
assertTrue(UsageTrackingQueryCachingPolicy.isCostly(new PrefixQuery(new Term("field", "prefix"))));
|
||||
assertTrue(UsageTrackingQueryCachingPolicy.isCostly(PointRangeQuery.new1DIntRange("intField", 1, true, 1000, true)));
|
||||
assertTrue(UsageTrackingQueryCachingPolicy.isCostly(PointRangeQuery.newIntRange("intField", 1, true, 1000, true)));
|
||||
assertFalse(UsageTrackingQueryCachingPolicy.isCostly(new TermQuery(new Term("field", "value"))));
|
||||
}
|
||||
|
||||
|
|
|
@ -140,7 +140,7 @@ public class TestBKD extends LuceneTestCase {
|
|||
if (values[dim] > maxValue[dim]) {
|
||||
maxValue[dim] = values[dim];
|
||||
}
|
||||
NumericUtils.intToBytes(values[dim], scratch, dim);
|
||||
NumericUtils.intToBytes(values[dim], scratch, dim * Integer.BYTES);
|
||||
if (VERBOSE) {
|
||||
System.out.println(" " + dim + " -> " + values[dim]);
|
||||
}
|
||||
|
@ -161,8 +161,8 @@ public class TestBKD extends LuceneTestCase {
|
|||
byte[] minPackedValue = r.getMinPackedValue();
|
||||
byte[] maxPackedValue = r.getMaxPackedValue();
|
||||
for(int dim=0;dim<numDims;dim++) {
|
||||
assertEquals(minValue[dim], NumericUtils.bytesToInt(minPackedValue, dim));
|
||||
assertEquals(maxValue[dim], NumericUtils.bytesToInt(maxPackedValue, dim));
|
||||
assertEquals(minValue[dim], NumericUtils.bytesToInt(minPackedValue, dim * Integer.BYTES));
|
||||
assertEquals(maxValue[dim], NumericUtils.bytesToInt(maxPackedValue, dim * Integer.BYTES));
|
||||
}
|
||||
|
||||
int iters = atLeast(100);
|
||||
|
@ -196,7 +196,7 @@ public class TestBKD extends LuceneTestCase {
|
|||
public void visit(int docID, byte[] packedValue) {
|
||||
//System.out.println("visit check docID=" + docID);
|
||||
for(int dim=0;dim<numDims;dim++) {
|
||||
int x = NumericUtils.bytesToInt(packedValue, dim);
|
||||
int x = NumericUtils.bytesToInt(packedValue, dim * Integer.BYTES);
|
||||
if (x < queryMin[dim] || x > queryMax[dim]) {
|
||||
//System.out.println(" no");
|
||||
return;
|
||||
|
@ -211,8 +211,8 @@ public class TestBKD extends LuceneTestCase {
|
|||
public Relation compare(byte[] minPacked, byte[] maxPacked) {
|
||||
boolean crosses = false;
|
||||
for(int dim=0;dim<numDims;dim++) {
|
||||
int min = NumericUtils.bytesToInt(minPacked, dim);
|
||||
int max = NumericUtils.bytesToInt(maxPacked, dim);
|
||||
int min = NumericUtils.bytesToInt(minPacked, dim * Integer.BYTES);
|
||||
int max = NumericUtils.bytesToInt(maxPacked, dim * Integer.BYTES);
|
||||
assert max >= min;
|
||||
|
||||
if (max < queryMin[dim] || min > queryMax[dim]) {
|
||||
|
|
|
@ -181,7 +181,7 @@ public class DistanceFacetsExample implements Closeable {
|
|||
BooleanQuery.Builder f = new BooleanQuery.Builder();
|
||||
|
||||
// Add latitude range filter:
|
||||
f.add(PointRangeQuery.new1DDoubleRange("latitude", Math.toDegrees(minLat), true, Math.toDegrees(maxLat), true),
|
||||
f.add(PointRangeQuery.newDoubleRange("latitude", Math.toDegrees(minLat), true, Math.toDegrees(maxLat), true),
|
||||
BooleanClause.Occur.FILTER);
|
||||
|
||||
// Add longitude range filter:
|
||||
|
@ -189,13 +189,13 @@ public class DistanceFacetsExample implements Closeable {
|
|||
// The bounding box crosses the international date
|
||||
// line:
|
||||
BooleanQuery.Builder lonF = new BooleanQuery.Builder();
|
||||
lonF.add(PointRangeQuery.new1DDoubleRange("longitude", Math.toDegrees(minLng), true, null, true),
|
||||
lonF.add(PointRangeQuery.newDoubleRange("longitude", Math.toDegrees(minLng), true, null, true),
|
||||
BooleanClause.Occur.SHOULD);
|
||||
lonF.add(PointRangeQuery.new1DDoubleRange("longitude", null, true, Math.toDegrees(maxLng), true),
|
||||
lonF.add(PointRangeQuery.newDoubleRange("longitude", null, true, Math.toDegrees(maxLng), true),
|
||||
BooleanClause.Occur.SHOULD);
|
||||
f.add(lonF.build(), BooleanClause.Occur.MUST);
|
||||
} else {
|
||||
f.add(PointRangeQuery.new1DDoubleRange("longitude", Math.toDegrees(minLng), true, Math.toDegrees(maxLng), true),
|
||||
f.add(PointRangeQuery.newDoubleRange("longitude", Math.toDegrees(minLng), true, Math.toDegrees(maxLng), true),
|
||||
BooleanClause.Occur.FILTER);
|
||||
}
|
||||
|
||||
|
|
|
@ -107,7 +107,7 @@ public class RangeFacetsExample implements Closeable {
|
|||
// documents ("browse only"):
|
||||
DrillDownQuery q = new DrillDownQuery(getConfig());
|
||||
|
||||
q.add("timestamp", PointRangeQuery.new1DLongRange("timestamp", range.min, range.minInclusive, range.max, range.maxInclusive));
|
||||
q.add("timestamp", PointRangeQuery.newLongRange("timestamp", range.min, range.minInclusive, range.max, range.maxInclusive));
|
||||
|
||||
return searcher.search(q, 10);
|
||||
}
|
||||
|
|
|
@ -281,7 +281,7 @@ public class TestRangeFacetCounts extends FacetTestCase {
|
|||
|
||||
// Third search, drill down on "less than or equal to 10":
|
||||
ddq = new DrillDownQuery(config);
|
||||
ddq.add("field", PointRangeQuery.new1DLongRange("field", 0L, true, 10L, true));
|
||||
ddq.add("field", PointRangeQuery.newLongRange("field", 0L, true, 10L, true));
|
||||
dsr = ds.search(null, ddq, 10);
|
||||
|
||||
assertEquals(11, dsr.hits.totalHits);
|
||||
|
@ -461,9 +461,9 @@ public class TestRangeFacetCounts extends FacetTestCase {
|
|||
Query fastMatchQuery;
|
||||
if (random().nextBoolean()) {
|
||||
if (random().nextBoolean()) {
|
||||
fastMatchQuery = PointRangeQuery.new1DLongRange("field", minValue, true, maxValue, true);
|
||||
fastMatchQuery = PointRangeQuery.newLongRange("field", minValue, true, maxValue, true);
|
||||
} else {
|
||||
fastMatchQuery = PointRangeQuery.new1DLongRange("field", minAcceptedValue, true, maxAcceptedValue, true);
|
||||
fastMatchQuery = PointRangeQuery.newLongRange("field", minAcceptedValue, true, maxAcceptedValue, true);
|
||||
}
|
||||
} else {
|
||||
fastMatchQuery = null;
|
||||
|
@ -485,7 +485,7 @@ public class TestRangeFacetCounts extends FacetTestCase {
|
|||
// Test drill-down:
|
||||
DrillDownQuery ddq = new DrillDownQuery(config);
|
||||
if (random().nextBoolean()) {
|
||||
ddq.add("field", PointRangeQuery.new1DLongRange("field", range.min, range.minInclusive, range.max, range.maxInclusive));
|
||||
ddq.add("field", PointRangeQuery.newLongRange("field", range.min, range.minInclusive, range.max, range.maxInclusive));
|
||||
} else {
|
||||
ddq.add("field", range.getQuery(fastMatchQuery, vs));
|
||||
}
|
||||
|
@ -616,9 +616,9 @@ public class TestRangeFacetCounts extends FacetTestCase {
|
|||
Query fastMatchQuery;
|
||||
if (random().nextBoolean()) {
|
||||
if (random().nextBoolean()) {
|
||||
fastMatchQuery = PointRangeQuery.new1DFloatRange("field", minValue, true, maxValue, true);
|
||||
fastMatchQuery = PointRangeQuery.newFloatRange("field", minValue, true, maxValue, true);
|
||||
} else {
|
||||
fastMatchQuery = PointRangeQuery.new1DFloatRange("field", minAcceptedValue, true, maxAcceptedValue, true);
|
||||
fastMatchQuery = PointRangeQuery.newFloatRange("field", minAcceptedValue, true, maxAcceptedValue, true);
|
||||
}
|
||||
} else {
|
||||
fastMatchQuery = null;
|
||||
|
@ -640,7 +640,7 @@ public class TestRangeFacetCounts extends FacetTestCase {
|
|||
// Test drill-down:
|
||||
DrillDownQuery ddq = new DrillDownQuery(config);
|
||||
if (random().nextBoolean()) {
|
||||
ddq.add("field", PointRangeQuery.new1DFloatRange("field", (float) range.min, range.minInclusive, (float) range.max, range.maxInclusive));
|
||||
ddq.add("field", PointRangeQuery.newFloatRange("field", (float) range.min, range.minInclusive, (float) range.max, range.maxInclusive));
|
||||
} else {
|
||||
ddq.add("field", range.getQuery(fastMatchQuery, vs));
|
||||
}
|
||||
|
@ -755,9 +755,9 @@ public class TestRangeFacetCounts extends FacetTestCase {
|
|||
Query fastMatchFilter;
|
||||
if (random().nextBoolean()) {
|
||||
if (random().nextBoolean()) {
|
||||
fastMatchFilter = PointRangeQuery.new1DDoubleRange("field", minValue, true, maxValue, true);
|
||||
fastMatchFilter = PointRangeQuery.newDoubleRange("field", minValue, true, maxValue, true);
|
||||
} else {
|
||||
fastMatchFilter = PointRangeQuery.new1DDoubleRange("field", minAcceptedValue, true, maxAcceptedValue, true);
|
||||
fastMatchFilter = PointRangeQuery.newDoubleRange("field", minAcceptedValue, true, maxAcceptedValue, true);
|
||||
}
|
||||
} else {
|
||||
fastMatchFilter = null;
|
||||
|
@ -779,7 +779,7 @@ public class TestRangeFacetCounts extends FacetTestCase {
|
|||
// Test drill-down:
|
||||
DrillDownQuery ddq = new DrillDownQuery(config);
|
||||
if (random().nextBoolean()) {
|
||||
ddq.add("field", PointRangeQuery.new1DDoubleRange("field", range.min, range.minInclusive, range.max, range.maxInclusive));
|
||||
ddq.add("field", PointRangeQuery.newDoubleRange("field", range.min, range.minInclusive, range.max, range.maxInclusive));
|
||||
} else {
|
||||
ddq.add("field", range.getQuery(fastMatchFilter, vs));
|
||||
}
|
||||
|
|
|
@ -584,7 +584,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
|
||||
public void testDimensionalRangeQuery() throws Exception {
|
||||
// doesn't currently highlight, but make sure it doesn't cause exception either
|
||||
query = PointRangeQuery.new1DIntRange(NUMERIC_FIELD_NAME, 2, true, 6, true);
|
||||
query = PointRangeQuery.newIntRange(NUMERIC_FIELD_NAME, 2, true, 6, true);
|
||||
searcher = newSearcher(reader);
|
||||
hits = searcher.search(query, 100);
|
||||
int maxNumFragmentsRequired = 2;
|
||||
|
|
|
@ -213,9 +213,9 @@ org.bouncycastle.version = 1.45
|
|||
/org.carrot2.attributes/attributes-binder = 1.2.3
|
||||
/org.carrot2.shaded/carrot2-guava = 18.0
|
||||
|
||||
/org.carrot2/carrot2-mini = 3.10.4
|
||||
/org.carrot2/carrot2-mini = 3.12.0
|
||||
|
||||
org.carrot2.morfologik.version = 2.0.1
|
||||
org.carrot2.morfologik.version = 2.1.0
|
||||
/org.carrot2/morfologik-fsa = ${org.carrot2.morfologik.version}
|
||||
/org.carrot2/morfologik-polish = ${org.carrot2.morfologik.version}
|
||||
/org.carrot2/morfologik-stemming = ${org.carrot2.morfologik.version}
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
23b4c04bb74f80e77573dc3ab84c8b4203f68d50
|
|
@ -0,0 +1 @@
|
|||
88e5993f73c102f378c711f6e47221b7a9e22d25
|
|
@ -1,6 +1,6 @@
|
|||
|
||||
Copyright (c) 2006 Dawid Weiss
|
||||
Copyright (c) 2007-2015 Dawid Weiss, Marcin Miłkowski
|
||||
Copyright (c) 2007-2016 Dawid Weiss, Marcin Miłkowski
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
b35034de153a79d0afeeeee2ff883d548a178961
|
|
@ -0,0 +1 @@
|
|||
d5a9c7721bce2ef17444abbe25ac2e65bfaa181f
|
|
@ -1,18 +1,20 @@
|
|||
BSD-licensed dictionary of Polish (Morfologik)
|
||||
|
||||
Morfologik Polish dictionary.
|
||||
Version: 2.0 PoliMorf
|
||||
Copyright (c) 2013, Marcin Miłkowski
|
||||
VERSION: 2.1 PoliMorf
|
||||
BUILD: 2016-02-13 19:37:50+01:00
|
||||
GIT: 6e63b53
|
||||
|
||||
Copyright (c) 2016, Marcin Miłkowski
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
list of conditions and the following disclaimer.
|
||||
2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
|
@ -23,4 +25,4 @@ ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
|||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
@ -1 +0,0 @@
|
|||
df9434b431bbed20ded67ede439c7dfb1e29e9f8
|
|
@ -0,0 +1 @@
|
|||
94167b64752138a246cc33cbf1a3b0bfe5274b7c
|
|
@ -1,6 +1,6 @@
|
|||
|
||||
Copyright (c) 2006 Dawid Weiss
|
||||
Copyright (c) 2007-2015 Dawid Weiss, Marcin Miłkowski
|
||||
Copyright (c) 2007-2016 Dawid Weiss, Marcin Miłkowski
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
|
|
|
@ -225,8 +225,8 @@ public class MemoryIndex {
|
|||
this.storePayloads = storePayloads;
|
||||
this.bytesUsed = Counter.newCounter();
|
||||
final int maxBufferedByteBlocks = (int)((maxReusedBytes/2) / ByteBlockPool.BYTE_BLOCK_SIZE );
|
||||
final int maxBufferedIntBlocks = (int) ((maxReusedBytes - (maxBufferedByteBlocks*ByteBlockPool.BYTE_BLOCK_SIZE))/(IntBlockPool.INT_BLOCK_SIZE * RamUsageEstimator.NUM_BYTES_INT));
|
||||
assert (maxBufferedByteBlocks * ByteBlockPool.BYTE_BLOCK_SIZE) + (maxBufferedIntBlocks * IntBlockPool.INT_BLOCK_SIZE * RamUsageEstimator.NUM_BYTES_INT) <= maxReusedBytes;
|
||||
final int maxBufferedIntBlocks = (int) ((maxReusedBytes - (maxBufferedByteBlocks*ByteBlockPool.BYTE_BLOCK_SIZE))/(IntBlockPool.INT_BLOCK_SIZE * Integer.BYTES));
|
||||
assert (maxBufferedByteBlocks * ByteBlockPool.BYTE_BLOCK_SIZE) + (maxBufferedIntBlocks * IntBlockPool.INT_BLOCK_SIZE * Integer.BYTES) <= maxReusedBytes;
|
||||
byteBlockPool = new ByteBlockPool(new RecyclingByteBlockAllocator(ByteBlockPool.BYTE_BLOCK_SIZE, maxBufferedByteBlocks, bytesUsed));
|
||||
intBlockPool = new IntBlockPool(new RecyclingIntBlockAllocator(IntBlockPool.INT_BLOCK_SIZE, maxBufferedIntBlocks, bytesUsed));
|
||||
postingsWriter = new SliceWriter(intBlockPool);
|
||||
|
@ -1216,9 +1216,9 @@ public class MemoryIndex {
|
|||
@Override
|
||||
public int[] init() {
|
||||
final int[] ord = super.init();
|
||||
start = new int[ArrayUtil.oversize(ord.length, RamUsageEstimator.NUM_BYTES_INT)];
|
||||
end = new int[ArrayUtil.oversize(ord.length, RamUsageEstimator.NUM_BYTES_INT)];
|
||||
freq = new int[ArrayUtil.oversize(ord.length, RamUsageEstimator.NUM_BYTES_INT)];
|
||||
start = new int[ArrayUtil.oversize(ord.length, Integer.BYTES)];
|
||||
end = new int[ArrayUtil.oversize(ord.length, Integer.BYTES)];
|
||||
freq = new int[ArrayUtil.oversize(ord.length, Integer.BYTES)];
|
||||
assert start.length >= ord.length;
|
||||
assert end.length >= ord.length;
|
||||
assert freq.length >= ord.length;
|
||||
|
|
|
@ -481,7 +481,7 @@ class FieldCacheImpl implements FieldCache {
|
|||
|
||||
@Override
|
||||
public long ramBytesUsed() {
|
||||
return values.ramBytesUsed() + RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_LONG;
|
||||
return values.ramBytesUsed() + RamUsageEstimator.NUM_BYTES_OBJECT_REF + Long.BYTES;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -599,7 +599,7 @@ class FieldCacheImpl implements FieldCache {
|
|||
termOrdToBytesOffset.ramBytesUsed() +
|
||||
docToTermOrd.ramBytesUsed() +
|
||||
3*RamUsageEstimator.NUM_BYTES_OBJECT_REF +
|
||||
RamUsageEstimator.NUM_BYTES_INT;
|
||||
Integer.BYTES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -24,7 +24,7 @@ import org.apache.lucene.spatial.util.GeoUtils;
|
|||
public class LatLonPoint extends Field {
|
||||
public static final FieldType TYPE = new FieldType();
|
||||
static {
|
||||
TYPE.setDimensions(2, 4);
|
||||
TYPE.setDimensions(2, Integer.BYTES);
|
||||
TYPE.freeze();
|
||||
}
|
||||
|
||||
|
@ -45,7 +45,7 @@ public class LatLonPoint extends Field {
|
|||
}
|
||||
byte[] bytes = new byte[8];
|
||||
NumericUtils.intToBytes(encodeLat(lat), bytes, 0);
|
||||
NumericUtils.intToBytes(encodeLon(lon), bytes, 1);
|
||||
NumericUtils.intToBytes(encodeLon(lon), bytes, Integer.BYTES);
|
||||
fieldsData = new BytesRef(bytes);
|
||||
}
|
||||
|
||||
|
|
|
@ -124,7 +124,7 @@ public class PointInPolygonQuery extends Query {
|
|||
public void visit(int docID, byte[] packedValue) {
|
||||
assert packedValue.length == 8;
|
||||
double lat = LatLonPoint.decodeLat(NumericUtils.bytesToInt(packedValue, 0));
|
||||
double lon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(packedValue, 1));
|
||||
double lon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(packedValue, Integer.BYTES));
|
||||
if (GeoRelationUtils.pointInPolygon(polyLons, polyLats, lat, lon)) {
|
||||
hitCount[0]++;
|
||||
result.add(docID);
|
||||
|
@ -134,9 +134,9 @@ public class PointInPolygonQuery extends Query {
|
|||
@Override
|
||||
public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) {
|
||||
double cellMinLat = LatLonPoint.decodeLat(NumericUtils.bytesToInt(minPackedValue, 0));
|
||||
double cellMinLon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(minPackedValue, 1));
|
||||
double cellMinLon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(minPackedValue, Integer.BYTES));
|
||||
double cellMaxLat = LatLonPoint.decodeLat(NumericUtils.bytesToInt(maxPackedValue, 0));
|
||||
double cellMaxLon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(maxPackedValue, 1));
|
||||
double cellMaxLon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(maxPackedValue, Integer.BYTES));
|
||||
|
||||
if (cellMinLat <= minLat && cellMaxLat >= maxLat && cellMinLon <= minLon && cellMaxLon >= maxLon) {
|
||||
// Cell fully encloses the query
|
||||
|
|
|
@ -98,7 +98,7 @@ public class PointInRectQuery extends Query {
|
|||
public void visit(int docID, byte[] packedValue) {
|
||||
assert packedValue.length == 8;
|
||||
double lat = LatLonPoint.decodeLat(NumericUtils.bytesToInt(packedValue, 0));
|
||||
double lon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(packedValue, 1));
|
||||
double lon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(packedValue, Integer.BYTES));
|
||||
if (lat >= minLat && lat <= maxLat && lon >= minLon && lon <= maxLon) {
|
||||
hitCount[0]++;
|
||||
result.add(docID);
|
||||
|
@ -108,9 +108,9 @@ public class PointInRectQuery extends Query {
|
|||
@Override
|
||||
public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) {
|
||||
double cellMinLat = LatLonPoint.decodeLat(NumericUtils.bytesToInt(minPackedValue, 0));
|
||||
double cellMinLon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(minPackedValue, 1));
|
||||
double cellMinLon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(minPackedValue, Integer.BYTES));
|
||||
double cellMaxLat = LatLonPoint.decodeLat(NumericUtils.bytesToInt(maxPackedValue, 0));
|
||||
double cellMaxLon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(maxPackedValue, 1));
|
||||
double cellMaxLon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(maxPackedValue, Integer.BYTES));
|
||||
|
||||
if (minLat <= cellMinLat && maxLat >= cellMaxLat && minLon <= cellMinLon && maxLon >= cellMaxLon) {
|
||||
return Relation.CELL_INSIDE_QUERY;
|
||||
|
|
|
@ -57,7 +57,7 @@ public class TestDocValuesRangeQuery extends LuceneTestCase {
|
|||
iw.addDocument(doc);
|
||||
}
|
||||
if (random().nextBoolean()) {
|
||||
iw.deleteDocuments(PointRangeQuery.new1DLongRange("idx", 0L, true, 10L, true));
|
||||
iw.deleteDocuments(PointRangeQuery.newLongRange("idx", 0L, true, 10L, true));
|
||||
}
|
||||
iw.commit();
|
||||
final IndexReader reader = iw.getReader();
|
||||
|
@ -69,7 +69,7 @@ public class TestDocValuesRangeQuery extends LuceneTestCase {
|
|||
final Long max = random().nextBoolean() ? null : TestUtil.nextLong(random(), -100, 1000);
|
||||
final boolean minInclusive = random().nextBoolean();
|
||||
final boolean maxInclusive = random().nextBoolean();
|
||||
final Query q1 = PointRangeQuery.new1DLongRange("idx", min, minInclusive, max, maxInclusive);
|
||||
final Query q1 = PointRangeQuery.newLongRange("idx", min, minInclusive, max, maxInclusive);
|
||||
final Query q2 = DocValuesRangeQuery.newLongRange("dv", min, max, minInclusive, maxInclusive);
|
||||
assertSameMatches(searcher, q1, q2, false);
|
||||
}
|
||||
|
@ -83,7 +83,7 @@ public class TestDocValuesRangeQuery extends LuceneTestCase {
|
|||
if (l == null) {
|
||||
return null;
|
||||
} else {
|
||||
byte[] bytes = new byte[RamUsageEstimator.NUM_BYTES_LONG];
|
||||
byte[] bytes = new byte[Long.BYTES];
|
||||
NumericUtils.longToBytes(l, bytes, 0);
|
||||
return new BytesRef(bytes);
|
||||
}
|
||||
|
@ -185,7 +185,7 @@ public class TestDocValuesRangeQuery extends LuceneTestCase {
|
|||
iw.addDocument(doc);
|
||||
}
|
||||
if (random().nextBoolean()) {
|
||||
iw.deleteDocuments(PointRangeQuery.new1DLongRange("idx", 0L, true, 10L, true));
|
||||
iw.deleteDocuments(PointRangeQuery.newLongRange("idx", 0L, true, 10L, true));
|
||||
}
|
||||
iw.commit();
|
||||
final IndexReader reader = iw.getReader();
|
||||
|
@ -199,7 +199,7 @@ public class TestDocValuesRangeQuery extends LuceneTestCase {
|
|||
final boolean maxInclusive = random().nextBoolean();
|
||||
|
||||
BooleanQuery.Builder ref = new BooleanQuery.Builder();
|
||||
ref.add(PointRangeQuery.new1DLongRange("idx", min, minInclusive, max, maxInclusive), Occur.FILTER);
|
||||
ref.add(PointRangeQuery.newLongRange("idx", min, minInclusive, max, maxInclusive), Occur.FILTER);
|
||||
ref.add(new TermQuery(new Term("f", "a")), Occur.MUST);
|
||||
|
||||
BooleanQuery.Builder bq1 = new BooleanQuery.Builder();
|
||||
|
|
|
@ -353,7 +353,7 @@ public class ContainsPrefixTreeQuery extends AbstractPrefixTreeQuery {
|
|||
public long ramBytesUsed() {
|
||||
return RamUsageEstimator.alignObjectSize(
|
||||
RamUsageEstimator.NUM_BYTES_OBJECT_REF
|
||||
+ RamUsageEstimator.NUM_BYTES_INT)
|
||||
+ Integer.BYTES)
|
||||
+ intSet.ramBytesUsed();
|
||||
}
|
||||
|
||||
|
|
|
@ -32,7 +32,7 @@ public final class Geo3DPoint extends Field {
|
|||
/** Indexing {@link FieldType}. */
|
||||
public static final FieldType TYPE = new FieldType();
|
||||
static {
|
||||
TYPE.setDimensions(3, RamUsageEstimator.NUM_BYTES_INT);
|
||||
TYPE.setDimensions(3, Integer.BYTES);
|
||||
TYPE.freeze();
|
||||
}
|
||||
|
||||
|
@ -61,8 +61,8 @@ public final class Geo3DPoint extends Field {
|
|||
private void fillFieldsData(double planetMax, double x, double y, double z) {
|
||||
byte[] bytes = new byte[12];
|
||||
NumericUtils.intToBytes(Geo3DUtil.encodeValue(planetMax, x), bytes, 0);
|
||||
NumericUtils.intToBytes(Geo3DUtil.encodeValue(planetMax, y), bytes, 1);
|
||||
NumericUtils.intToBytes(Geo3DUtil.encodeValue(planetMax, z), bytes, 2);
|
||||
NumericUtils.intToBytes(Geo3DUtil.encodeValue(planetMax, y), bytes, Integer.BYTES);
|
||||
NumericUtils.intToBytes(Geo3DUtil.encodeValue(planetMax, z), bytes, 2 * Integer.BYTES);
|
||||
fieldsData = new BytesRef(bytes);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -106,8 +106,8 @@ public class PointInGeo3DShapeQuery extends Query {
|
|||
public void visit(int docID, byte[] packedValue) {
|
||||
assert packedValue.length == 12;
|
||||
double x = Geo3DUtil.decodeValueCenter(planetMax, NumericUtils.bytesToInt(packedValue, 0));
|
||||
double y = Geo3DUtil.decodeValueCenter(planetMax, NumericUtils.bytesToInt(packedValue, 1));
|
||||
double z = Geo3DUtil.decodeValueCenter(planetMax, NumericUtils.bytesToInt(packedValue, 2));
|
||||
double y = Geo3DUtil.decodeValueCenter(planetMax, NumericUtils.bytesToInt(packedValue, 1 * Integer.BYTES));
|
||||
double z = Geo3DUtil.decodeValueCenter(planetMax, NumericUtils.bytesToInt(packedValue, 2 * Integer.BYTES));
|
||||
if (shape.isWithin(x, y, z)) {
|
||||
result.add(docID);
|
||||
hitCount[0]++;
|
||||
|
@ -122,10 +122,10 @@ public class PointInGeo3DShapeQuery extends Query {
|
|||
// a Math.round from double to long, so e.g. 1.4 -> 1, and -1.4 -> -1:
|
||||
double xMin = Geo3DUtil.decodeValueMin(planetMax, NumericUtils.bytesToInt(minPackedValue, 0));
|
||||
double xMax = Geo3DUtil.decodeValueMax(planetMax, NumericUtils.bytesToInt(maxPackedValue, 0));
|
||||
double yMin = Geo3DUtil.decodeValueMin(planetMax, NumericUtils.bytesToInt(minPackedValue, 1));
|
||||
double yMax = Geo3DUtil.decodeValueMax(planetMax, NumericUtils.bytesToInt(maxPackedValue, 1));
|
||||
double zMin = Geo3DUtil.decodeValueMin(planetMax, NumericUtils.bytesToInt(minPackedValue, 2));
|
||||
double zMax = Geo3DUtil.decodeValueMax(planetMax, NumericUtils.bytesToInt(maxPackedValue, 2));
|
||||
double yMin = Geo3DUtil.decodeValueMin(planetMax, NumericUtils.bytesToInt(minPackedValue, 1 * Integer.BYTES));
|
||||
double yMax = Geo3DUtil.decodeValueMax(planetMax, NumericUtils.bytesToInt(maxPackedValue, 1 * Integer.BYTES));
|
||||
double zMin = Geo3DUtil.decodeValueMin(planetMax, NumericUtils.bytesToInt(minPackedValue, 2 * Integer.BYTES));
|
||||
double zMax = Geo3DUtil.decodeValueMax(planetMax, NumericUtils.bytesToInt(maxPackedValue, 2 * Integer.BYTES));
|
||||
|
||||
//System.out.println(" compare: x=" + cellXMin + "-" + cellXMax + " y=" + cellYMin + "-" + cellYMax + " z=" + cellZMin + "-" + cellZMax);
|
||||
assert xMin <= xMax;
|
||||
|
|
|
@ -56,7 +56,7 @@ public class TernaryTreeNode {
|
|||
mem += hiKid.sizeInBytes();
|
||||
}
|
||||
if (token != null) {
|
||||
mem += RamUsageEstimator.shallowSizeOf(token) + RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + RamUsageEstimator.NUM_BYTES_CHAR * token.length();
|
||||
mem += RamUsageEstimator.shallowSizeOf(token) + RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + Character.BYTES * token.length();
|
||||
}
|
||||
mem += RamUsageEstimator.shallowSizeOf(val);
|
||||
return mem;
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue