LUCENE-7042: more cleanup for Point encodings

This commit is contained in:
Robert Muir 2016-02-22 19:32:28 -05:00
parent d6105334a0
commit 9ca1a19b81
72 changed files with 221 additions and 322 deletions

View File

@ -48,7 +48,6 @@ import java.io.IOException;
import java.io.InputStream;
import java.io.FileInputStream;
import static org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_CHAR;
import org.apache.lucene.util.ArrayUtil;
/**
@ -453,7 +452,7 @@ class PorterStemmer
public boolean stem(char[] wordBuffer, int offset, int wordLen) {
reset();
if (b.length < wordLen) {
b = new char[ArrayUtil.oversize(wordLen, NUM_BYTES_CHAR)];
b = new char[ArrayUtil.oversize(wordLen, Character.BYTES)];
}
System.arraycopy(wordBuffer, offset, b, 0, wordLen);
i = wordLen;

View File

@ -16,7 +16,6 @@
*/
package org.apache.lucene.analysis.miscellaneous;
import java.io.IOException;
import org.apache.lucene.analysis.TokenFilter;
@ -24,7 +23,6 @@ import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.RamUsageEstimator;
/**
* This class converts alphabetic, numeric, and symbolic Unicode characters
@ -142,7 +140,7 @@ public final class ASCIIFoldingFilter extends TokenFilter {
// Worst-case length required:
final int maxSizeNeeded = 4 * length;
if (output.length < maxSizeNeeded) {
output = new char[ArrayUtil.oversize(maxSizeNeeded, RamUsageEstimator.NUM_BYTES_CHAR)];
output = new char[ArrayUtil.oversize(maxSizeNeeded, Character.BYTES)];
}
outputPos = foldToASCII(input, 0, output, 0, length);

View File

@ -28,7 +28,6 @@ import org.apache.lucene.analysis.util.CharArraySet;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.AttributeSource;
import org.apache.lucene.util.InPlaceMergeSorter;
import org.apache.lucene.util.RamUsageEstimator;
import java.io.IOException;
import java.util.Arrays;
@ -429,7 +428,7 @@ public final class WordDelimiterFilter extends TokenFilter {
savedType = typeAttribute.type();
if (savedBuffer.length < termAttribute.length()) {
savedBuffer = new char[ArrayUtil.oversize(termAttribute.length(), RamUsageEstimator.NUM_BYTES_CHAR)];
savedBuffer = new char[ArrayUtil.oversize(termAttribute.length(), Character.BYTES)];
}
System.arraycopy(termAttribute.buffer(), 0, savedBuffer, 0, termAttribute.length());

View File

@ -16,7 +16,6 @@
*/
package org.apache.lucene.analysis.synonym;
import java.io.IOException;
import java.util.Arrays;
@ -31,11 +30,9 @@ import org.apache.lucene.store.ByteArrayDataInput;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.AttributeSource;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.CharsRef;
import org.apache.lucene.util.CharsRefBuilder;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.UnicodeUtil;
import org.apache.lucene.util.fst.FST;
/**
@ -207,12 +204,12 @@ public final class SynonymFilter extends TokenFilter {
outputs = Arrays.copyOf(outputs, ArrayUtil.oversize(1+count, RamUsageEstimator.NUM_BYTES_OBJECT_REF));
}
if (count == endOffsets.length) {
final int[] next = new int[ArrayUtil.oversize(1+count, RamUsageEstimator.NUM_BYTES_INT)];
final int[] next = new int[ArrayUtil.oversize(1+count, Integer.BYTES)];
System.arraycopy(endOffsets, 0, next, 0, count);
endOffsets = next;
}
if (count == posLengths.length) {
final int[] next = new int[ArrayUtil.oversize(1+count, RamUsageEstimator.NUM_BYTES_INT)];
final int[] next = new int[ArrayUtil.oversize(1+count, Integer.BYTES)];
System.arraycopy(posLengths, 0, next, 0, count);
posLengths = next;
}

View File

@ -16,12 +16,10 @@
*/
package org.apache.lucene.analysis.util;
import java.io.IOException;
import java.io.Reader;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.RamUsageEstimator;
/** Acts like a forever growing char[] as you read
* characters into it from the provided reader, but
@ -71,7 +69,7 @@ public final class RollingCharBuffer {
}
if (count == buffer.length) {
// Grow
final char[] newBuffer = new char[ArrayUtil.oversize(1+count, RamUsageEstimator.NUM_BYTES_CHAR)];
final char[] newBuffer = new char[ArrayUtil.oversize(1+count, Character.BYTES)];
//System.out.println(Thread.currentThread().getName() + ": cb grow " + newBuffer.length);
System.arraycopy(buffer, nextWrite, newBuffer, 0, buffer.length - nextWrite);
System.arraycopy(buffer, 0, newBuffer, buffer.length - nextWrite, nextWrite);

View File

@ -29,11 +29,9 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.tartarus.snowball;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.RamUsageEstimator;
/**
* This is the rev 502 of the Snowball SVN trunk,
@ -397,7 +395,7 @@ public abstract class SnowballProgram {
final int newLength = limit + adjustment;
//resize if necessary
if (newLength > current.length) {
char newBuffer[] = new char[ArrayUtil.oversize(newLength, RamUsageEstimator.NUM_BYTES_CHAR)];
char newBuffer[] = new char[ArrayUtil.oversize(newLength, Character.BYTES)];
System.arraycopy(current, 0, newBuffer, 0, limit);
current = newBuffer;
}

View File

@ -16,7 +16,6 @@
*/
package org.apache.lucene.analysis.ja;
import java.io.IOException;
import java.io.StringReader;
import java.util.ArrayList;
@ -1053,7 +1052,7 @@ public final class JapaneseTokenizer extends Tokenizer {
assert baseOffset <= lastOffset;
int size = lastOffset - baseOffset + 1;
if (rootCapacity < size) {
int oversize = ArrayUtil.oversize(size, RamUsageEstimator.NUM_BYTES_INT);
int oversize = ArrayUtil.oversize(size, Integer.BYTES);
lRoot = new int[oversize];
rRoot = new int[oversize];
rootCapacity = oversize;
@ -1067,7 +1066,7 @@ public final class JapaneseTokenizer extends Tokenizer {
// Reserve at least N nodes.
private void reserve(int n) {
if (capacity < n) {
int oversize = ArrayUtil.oversize(n, RamUsageEstimator.NUM_BYTES_INT);
int oversize = ArrayUtil.oversize(n, Integer.BYTES);
nodeDicType = new Type[oversize];
nodeWordID = new int[oversize];
nodeMark = new int[oversize];

View File

@ -537,7 +537,7 @@ class Lucene50DocValuesProducer extends DocValuesProducer implements Closeable {
addresses = MonotonicBlockPackedReader.of(data, bytes.packedIntsVersion, bytes.blockSize, bytes.count+1, false);
if (!merging) {
addressInstances.put(field.name, addresses);
ramBytesUsed.addAndGet(addresses.ramBytesUsed() + RamUsageEstimator.NUM_BYTES_INT);
ramBytesUsed.addAndGet(addresses.ramBytesUsed() + Integer.BYTES);
}
}
return addresses;
@ -577,7 +577,7 @@ class Lucene50DocValuesProducer extends DocValuesProducer implements Closeable {
addresses = MonotonicBlockPackedReader.of(data, bytes.packedIntsVersion, bytes.blockSize, size, false);
if (!merging) {
addressInstances.put(field.name, addresses);
ramBytesUsed.addAndGet(addresses.ramBytesUsed() + RamUsageEstimator.NUM_BYTES_INT);
ramBytesUsed.addAndGet(addresses.ramBytesUsed() + Integer.BYTES);
}
}
return addresses;
@ -662,7 +662,7 @@ class Lucene50DocValuesProducer extends DocValuesProducer implements Closeable {
instance = MonotonicBlockPackedReader.of(data, entry.packedIntsVersion, entry.blockSize, entry.count+1, false);
if (!merging) {
ordIndexInstances.put(field.name, instance);
ramBytesUsed.addAndGet(instance.ramBytesUsed() + RamUsageEstimator.NUM_BYTES_INT);
ramBytesUsed.addAndGet(instance.ramBytesUsed() + Integer.BYTES);
}
}
return instance;

View File

@ -228,6 +228,6 @@ final class FSTOrdsOutputs extends Outputs<FSTOrdsOutputs.Output> {
@Override
public long ramBytesUsed(Output output) {
return 2 * RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + 2 * RamUsageEstimator.NUM_BYTES_LONG + 2 * RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + 2 * RamUsageEstimator.NUM_BYTES_INT + output.bytes.length;
return 2 * RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + 2 * Long.BYTES + 2 * RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + 2 * Integer.BYTES + output.bytes.length;
}
}

View File

@ -16,7 +16,6 @@
*/
package org.apache.lucene.codecs.bloom;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
@ -45,10 +44,8 @@ import org.apache.lucene.store.DataOutput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Accountables;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.automaton.CompiledAutomaton;
/**
@ -380,7 +377,7 @@ public final class BloomFilteringPostingsFormat extends PostingsFormat {
public long ramBytesUsed() {
long sizeInBytes = ((delegateFieldsProducer!=null) ? delegateFieldsProducer.ramBytesUsed() : 0);
for(Map.Entry<String,FuzzySet> entry: bloomsByFieldName.entrySet()) {
sizeInBytes += entry.getKey().length() * RamUsageEstimator.NUM_BYTES_CHAR;
sizeInBytes += entry.getKey().length() * Character.BYTES;
sizeInBytes += entry.getValue().ramBytesUsed();
}
return sizeInBytes;

View File

@ -16,7 +16,6 @@
*/
package org.apache.lucene.codecs.memory;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
@ -154,7 +153,7 @@ public final class DirectPostingsFormat extends PostingsFormat {
public long ramBytesUsed() {
long sizeInBytes = 0;
for(Map.Entry<String,DirectField> entry: fields.entrySet()) {
sizeInBytes += entry.getKey().length() * RamUsageEstimator.NUM_BYTES_CHAR;
sizeInBytes += entry.getKey().length() * Character.BYTES;
sizeInBytes += entry.getValue().ramBytesUsed();
}
return sizeInBytes;

View File

@ -16,7 +16,6 @@
*/
package org.apache.lucene.codecs.memory;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
@ -50,12 +49,10 @@ import org.apache.lucene.store.RAMOutputStream;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Accountables;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.IntsRefBuilder;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.fst.Builder;
import org.apache.lucene.util.fst.ByteSequenceOutputs;
import org.apache.lucene.util.fst.BytesRefFSTEnum;
@ -1016,7 +1013,7 @@ public final class MemoryPostingsFormat extends PostingsFormat {
public long ramBytesUsed() {
long sizeInBytes = 0;
for(Map.Entry<String,TermsReader> entry: fields.entrySet()) {
sizeInBytes += (entry.getKey().length() * RamUsageEstimator.NUM_BYTES_CHAR);
sizeInBytes += (entry.getKey().length() * Character.BYTES);
sizeInBytes += entry.getValue().ramBytesUsed();
}
return sizeInBytes;

View File

@ -16,7 +16,6 @@
*/
package org.apache.lucene.analysis.tokenattributes;
import java.nio.CharBuffer;
import org.apache.lucene.util.ArrayUtil;
@ -24,13 +23,12 @@ import org.apache.lucene.util.AttributeImpl;
import org.apache.lucene.util.AttributeReflector;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.RamUsageEstimator;
/** Default implementation of {@link CharTermAttribute}. */
public class CharTermAttributeImpl extends AttributeImpl implements CharTermAttribute, TermToBytesRefAttribute, Cloneable {
private static int MIN_BUFFER_SIZE = 10;
private char[] termBuffer = new char[ArrayUtil.oversize(MIN_BUFFER_SIZE, RamUsageEstimator.NUM_BYTES_CHAR)];
private char[] termBuffer = new char[ArrayUtil.oversize(MIN_BUFFER_SIZE, Character.BYTES)];
private int termLength = 0;
/** May be used by subclasses to convert to different charsets / encodings for implementing {@link #getBytesRef()}. */
@ -56,7 +54,7 @@ public class CharTermAttributeImpl extends AttributeImpl implements CharTermAttr
if(termBuffer.length < newSize){
// Not big enough; create a new array with slight
// over allocation and preserve content
final char[] newCharBuffer = new char[ArrayUtil.oversize(newSize, RamUsageEstimator.NUM_BYTES_CHAR)];
final char[] newCharBuffer = new char[ArrayUtil.oversize(newSize, Character.BYTES)];
System.arraycopy(termBuffer, 0, newCharBuffer, 0, termBuffer.length);
termBuffer = newCharBuffer;
}
@ -67,7 +65,7 @@ public class CharTermAttributeImpl extends AttributeImpl implements CharTermAttr
if(termBuffer.length < newSize){
// Not big enough; create a new array with slight
// over allocation:
termBuffer = new char[ArrayUtil.oversize(newSize, RamUsageEstimator.NUM_BYTES_CHAR)];
termBuffer = new char[ArrayUtil.oversize(newSize, Character.BYTES)];
}
}

View File

@ -16,7 +16,6 @@
*/
package org.apache.lucene.codecs.lucene54;
import java.io.Closeable;
import java.io.IOException;
import java.util.ArrayList;
@ -753,7 +752,7 @@ final class Lucene54DocValuesProducer extends DocValuesProducer implements Close
addresses = MonotonicBlockPackedReader.of(data, bytes.packedIntsVersion, bytes.blockSize, size, false);
if (!merging) {
addressInstances.put(field.name, addresses);
ramBytesUsed.addAndGet(addresses.ramBytesUsed() + RamUsageEstimator.NUM_BYTES_INT);
ramBytesUsed.addAndGet(addresses.ramBytesUsed() + Integer.BYTES);
}
}
return addresses;

View File

@ -16,7 +16,6 @@
*/
package org.apache.lucene.codecs.perfield;
import java.io.Closeable;
import java.io.IOException;
import java.util.Collection;
@ -44,7 +43,6 @@ import org.apache.lucene.util.Accountables;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.RamUsageEstimator;
/**
* Enables per field docvalues support.
@ -324,8 +322,7 @@ public abstract class PerFieldDocValuesFormat extends DocValuesFormat {
public long ramBytesUsed() {
long size = 0;
for (Map.Entry<String,DocValuesProducer> entry : formats.entrySet()) {
size += (entry.getKey().length() * RamUsageEstimator.NUM_BYTES_CHAR) +
entry.getValue().ramBytesUsed();
size += (entry.getKey().length() * Character.BYTES) + entry.getValue().ramBytesUsed();
}
return size;
}

View File

@ -16,10 +16,8 @@
*/
package org.apache.lucene.document;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.NumericUtils;
import org.apache.lucene.util.RamUsageEstimator;
/** A double field that is indexed dimensionally such that finding
* all documents within an N-dimensional shape or range at search time is
@ -30,7 +28,7 @@ public final class DoublePoint extends Field {
private static FieldType getType(int numDims) {
FieldType type = new FieldType();
type.setDimensions(numDims, RamUsageEstimator.NUM_BYTES_LONG);
type.setDimensions(numDims, Double.BYTES);
type.freeze();
return type;
}
@ -59,8 +57,8 @@ public final class DoublePoint extends Field {
throw new IllegalStateException("this field (name=" + name + ") uses " + type.pointDimensionCount() + " dimensions; cannot convert to a single numeric value");
}
BytesRef bytes = (BytesRef) fieldsData;
assert bytes.length == RamUsageEstimator.NUM_BYTES_LONG;
return NumericUtils.sortableLongToDouble(NumericUtils.bytesToLongDirect(bytes.bytes, bytes.offset));
assert bytes.length == Double.BYTES;
return decodeDimension(bytes.bytes, bytes.offset);
}
private static BytesRef pack(double... point) {
@ -70,10 +68,10 @@ public final class DoublePoint extends Field {
if (point.length == 0) {
throw new IllegalArgumentException("point cannot be 0 dimensions");
}
byte[] packed = new byte[point.length * RamUsageEstimator.NUM_BYTES_LONG];
byte[] packed = new byte[point.length * Double.BYTES];
for(int dim=0;dim<point.length;dim++) {
NumericUtils.longToBytesDirect(NumericUtils.doubleToSortableLong(point[dim]), packed, dim);
for (int dim = 0; dim < point.length ; dim++) {
encodeDimension(point[dim], packed, dim * Double.BYTES);
}
return new BytesRef(packed);
@ -91,28 +89,26 @@ public final class DoublePoint extends Field {
}
// public helper methods (e.g. for queries)
// TODO: try to rectify with pack() above, which works on a single concatenated array...
/** Encode n-dimensional double point into binary encoding */
public static byte[][] encode(Double value[]) {
byte[][] encoded = new byte[value.length][];
for (int i = 0; i < value.length; i++) {
if (value[i] != null) {
encoded[i] = encodeDimension(value[i]);
encoded[i] = new byte[Double.BYTES];
encodeDimension(value[i], encoded[i], 0);
}
}
return encoded;
}
/** Encode single double dimension */
public static byte[] encodeDimension(Double value) {
byte encoded[] = new byte[Long.BYTES];
NumericUtils.longToBytesDirect(NumericUtils.doubleToSortableLong(value), encoded, 0);
return encoded;
public static void encodeDimension(Double value, byte dest[], int offset) {
NumericUtils.longToBytesDirect(NumericUtils.doubleToSortableLong(value), dest, offset);
}
/** Decode single double value */
public static Double decodeDimension(byte value[]) {
return NumericUtils.sortableLongToDouble(NumericUtils.bytesToLongDirect(value, 0));
/** Decode single double dimension */
public static Double decodeDimension(byte value[], int offset) {
return NumericUtils.sortableLongToDouble(NumericUtils.bytesToLongDirect(value, offset));
}
}

View File

@ -16,10 +16,8 @@
*/
package org.apache.lucene.document;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.NumericUtils;
import org.apache.lucene.util.RamUsageEstimator;
/** A field that is indexed dimensionally such that finding
* all documents within an N-dimensional at search time is
@ -30,7 +28,7 @@ public final class FloatPoint extends Field {
private static FieldType getType(int numDims) {
FieldType type = new FieldType();
type.setDimensions(numDims, RamUsageEstimator.NUM_BYTES_INT);
type.setDimensions(numDims, Float.BYTES);
type.freeze();
return type;
}
@ -59,8 +57,8 @@ public final class FloatPoint extends Field {
throw new IllegalStateException("this field (name=" + name + ") uses " + type.pointDimensionCount() + " dimensions; cannot convert to a single numeric value");
}
BytesRef bytes = (BytesRef) fieldsData;
assert bytes.length == RamUsageEstimator.NUM_BYTES_INT;
return NumericUtils.sortableIntToFloat(NumericUtils.bytesToIntDirect(bytes.bytes, bytes.offset));
assert bytes.length == Float.BYTES;
return decodeDimension(bytes.bytes, bytes.offset);
}
private static BytesRef pack(float... point) {
@ -70,10 +68,10 @@ public final class FloatPoint extends Field {
if (point.length == 0) {
throw new IllegalArgumentException("point cannot be 0 dimensions");
}
byte[] packed = new byte[point.length * RamUsageEstimator.NUM_BYTES_INT];
byte[] packed = new byte[point.length * Float.BYTES];
for(int dim=0;dim<point.length;dim++) {
NumericUtils.intToBytesDirect(NumericUtils.floatToSortableInt(point[dim]), packed, dim);
for (int dim = 0; dim < point.length; dim++) {
encodeDimension(point[dim], packed, dim * Float.BYTES);
}
return new BytesRef(packed);
@ -91,28 +89,26 @@ public final class FloatPoint extends Field {
}
// public helper methods (e.g. for queries)
// TODO: try to rectify with pack() above, which works on a single concatenated array...
/** Encode n-dimensional float values into binary encoding */
public static byte[][] encode(Float value[]) {
byte[][] encoded = new byte[value.length][];
for (int i = 0; i < value.length; i++) {
if (value[i] != null) {
encoded[i] = encodeDimension(value[i]);
encoded[i] = new byte[Float.BYTES];
encodeDimension(value[i], encoded[i], 0);
}
}
return encoded;
}
/** Encode single float dimension */
public static byte[] encodeDimension(Float value) {
byte encoded[] = new byte[Integer.BYTES];
NumericUtils.intToBytesDirect(NumericUtils.floatToSortableInt(value), encoded, 0);
return encoded;
public static void encodeDimension(Float value, byte dest[], int offset) {
NumericUtils.intToBytesDirect(NumericUtils.floatToSortableInt(value), dest, offset);
}
/** Decode single float dimension */
public static Float decodeDimension(byte value[]) {
return NumericUtils.sortableIntToFloat(NumericUtils.bytesToIntDirect(value, 0));
public static Float decodeDimension(byte value[], int offset) {
return NumericUtils.sortableIntToFloat(NumericUtils.bytesToIntDirect(value, offset));
}
}

View File

@ -16,10 +16,8 @@
*/
package org.apache.lucene.document;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.NumericUtils;
import org.apache.lucene.util.RamUsageEstimator;
/** An int field that is indexed dimensionally such that finding
* all documents within an N-dimensional shape or range at search time is
@ -30,7 +28,7 @@ public final class IntPoint extends Field {
private static FieldType getType(int numDims) {
FieldType type = new FieldType();
type.setDimensions(numDims, RamUsageEstimator.NUM_BYTES_INT);
type.setDimensions(numDims, Integer.BYTES);
type.freeze();
return type;
}
@ -59,8 +57,8 @@ public final class IntPoint extends Field {
throw new IllegalStateException("this field (name=" + name + ") uses " + type.pointDimensionCount() + " dimensions; cannot convert to a single numeric value");
}
BytesRef bytes = (BytesRef) fieldsData;
assert bytes.length == RamUsageEstimator.NUM_BYTES_INT;
return NumericUtils.bytesToInt(bytes.bytes, bytes.offset);
assert bytes.length == Integer.BYTES;
return decodeDimension(bytes.bytes, bytes.offset);
}
private static BytesRef pack(int... point) {
@ -70,10 +68,10 @@ public final class IntPoint extends Field {
if (point.length == 0) {
throw new IllegalArgumentException("point cannot be 0 dimensions");
}
byte[] packed = new byte[point.length * RamUsageEstimator.NUM_BYTES_INT];
byte[] packed = new byte[point.length * Integer.BYTES];
for(int dim=0;dim<point.length;dim++) {
NumericUtils.intToBytes(point[dim], packed, dim);
for (int dim = 0; dim < point.length; dim++) {
encodeDimension(point[dim], packed, dim * Integer.BYTES);
}
return new BytesRef(packed);
@ -91,28 +89,26 @@ public final class IntPoint extends Field {
}
// public helper methods (e.g. for queries)
// TODO: try to rectify with pack() above, which works on a single concatenated array...
/** Encode n-dimensional integer values into binary encoding */
public static byte[][] encode(Integer value[]) {
byte[][] encoded = new byte[value.length][];
for (int i = 0; i < value.length; i++) {
if (value[i] != null) {
encoded[i] = encodeDimension(value[i]);
encoded[i] = new byte[Integer.BYTES];
encodeDimension(value[i], encoded[i], 0);
}
}
return encoded;
}
/** Encode single integer dimension */
public static byte[] encodeDimension(Integer value) {
byte encoded[] = new byte[Integer.BYTES];
NumericUtils.intToBytes(value, encoded, 0);
return encoded;
public static void encodeDimension(Integer value, byte dest[], int offset) {
NumericUtils.intToBytes(value, dest, offset);
}
/** Decode single integer dimension */
public static Integer decodeDimension(byte value[]) {
return NumericUtils.bytesToInt(value, 0);
public static Integer decodeDimension(byte value[], int offset) {
return NumericUtils.bytesToInt(value, offset);
}
}

View File

@ -16,10 +16,8 @@
*/
package org.apache.lucene.document;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.NumericUtils;
import org.apache.lucene.util.RamUsageEstimator;
/** A long field that is indexed dimensionally such that finding
* all documents within an N-dimensional shape or range at search time is
@ -30,7 +28,7 @@ public final class LongPoint extends Field {
private static FieldType getType(int numDims) {
FieldType type = new FieldType();
type.setDimensions(numDims, RamUsageEstimator.NUM_BYTES_LONG);
type.setDimensions(numDims, Long.BYTES);
type.freeze();
return type;
}
@ -59,8 +57,8 @@ public final class LongPoint extends Field {
throw new IllegalStateException("this field (name=" + name + ") uses " + type.pointDimensionCount() + " dimensions; cannot convert to a single numeric value");
}
BytesRef bytes = (BytesRef) fieldsData;
assert bytes.length == RamUsageEstimator.NUM_BYTES_LONG;
return NumericUtils.bytesToLong(bytes.bytes, bytes.offset);
assert bytes.length == Long.BYTES;
return decodeDimension(bytes.bytes, bytes.offset);
}
private static BytesRef pack(long... point) {
@ -70,10 +68,10 @@ public final class LongPoint extends Field {
if (point.length == 0) {
throw new IllegalArgumentException("point cannot be 0 dimensions");
}
byte[] packed = new byte[point.length * RamUsageEstimator.NUM_BYTES_LONG];
byte[] packed = new byte[point.length * Long.BYTES];
for(int dim=0;dim<point.length;dim++) {
NumericUtils.longToBytes(point[dim], packed, dim);
for (int dim = 0; dim < point.length; dim++) {
encodeDimension(point[dim], packed, dim * Long.BYTES);
}
return new BytesRef(packed);
@ -91,28 +89,26 @@ public final class LongPoint extends Field {
}
// public helper methods (e.g. for queries)
// TODO: try to rectify with pack() above, which works on a single concatenated array...
/** Encode n-dimensional long values into binary encoding */
public static byte[][] encode(Long value[]) {
byte[][] encoded = new byte[value.length][];
for (int i = 0; i < value.length; i++) {
if (value[i] != null) {
encoded[i] = encodeDimension(value[i]);
encoded[i] = new byte[Long.BYTES];
encodeDimension(value[i], encoded[i], 0);
}
}
return encoded;
}
/** Encode single long dimension */
public static byte[] encodeDimension(Long value) {
byte encoded[] = new byte[Long.BYTES];
NumericUtils.longToBytes(value, encoded, 0);
return encoded;
public static void encodeDimension(Long value, byte dest[], int offset) {
NumericUtils.longToBytes(value, dest, offset);
}
/** Decode single long dimension */
public static Long decodeDimension(byte value[]) {
return NumericUtils.bytesToLong(value, 0);
public static Long decodeDimension(byte value[], int offset) {
return NumericUtils.bytesToLong(value, offset);
}
}

View File

@ -52,19 +52,19 @@ class BufferedUpdates {
Term's text is String (OBJ_HEADER + 4*INT + POINTER +
OBJ_HEADER + string.length*CHAR). Integer is
OBJ_HEADER + INT. */
final static int BYTES_PER_DEL_TERM = 9*RamUsageEstimator.NUM_BYTES_OBJECT_REF + 7*RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + 10*RamUsageEstimator.NUM_BYTES_INT;
final static int BYTES_PER_DEL_TERM = 9*RamUsageEstimator.NUM_BYTES_OBJECT_REF + 7*RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + 10*Integer.BYTES;
/* Rough logic: del docIDs are List<Integer>. Say list
allocates ~2X size (2*POINTER). Integer is OBJ_HEADER
+ int */
final static int BYTES_PER_DEL_DOCID = 2*RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + RamUsageEstimator.NUM_BYTES_INT;
final static int BYTES_PER_DEL_DOCID = 2*RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + Integer.BYTES;
/* Rough logic: HashMap has an array[Entry] w/ varying
load factor (say 2 * POINTER). Entry is object w/
Query key, Integer val, int hash, Entry next
(OBJ_HEADER + 3*POINTER + INT). Query we often
undercount (say 24 bytes). Integer is OBJ_HEADER + INT. */
final static int BYTES_PER_DEL_QUERY = 5*RamUsageEstimator.NUM_BYTES_OBJECT_REF + 2*RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + 2*RamUsageEstimator.NUM_BYTES_INT + 24;
final static int BYTES_PER_DEL_QUERY = 5*RamUsageEstimator.NUM_BYTES_OBJECT_REF + 2*RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + 2*Integer.BYTES + 24;
/* Rough logic: NumericUpdate calculates its actual size,
* including the update Term and DV field (String). The
@ -82,7 +82,7 @@ class BufferedUpdates {
*/
final static int BYTES_PER_NUMERIC_FIELD_ENTRY =
7*RamUsageEstimator.NUM_BYTES_OBJECT_REF + 3*RamUsageEstimator.NUM_BYTES_OBJECT_HEADER +
RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + 5*RamUsageEstimator.NUM_BYTES_INT + RamUsageEstimator.NUM_BYTES_FLOAT;
RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + 5*Integer.BYTES + Float.BYTES;
/* Rough logic: Incremented when we see another Term for an already updated
* field.
@ -93,7 +93,7 @@ class BufferedUpdates {
* Term (key) is counted only as POINTER.
* NumericUpdate (val) counts its own size and isn't accounted for here.
*/
final static int BYTES_PER_NUMERIC_UPDATE_ENTRY = 7*RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + RamUsageEstimator.NUM_BYTES_INT;
final static int BYTES_PER_NUMERIC_UPDATE_ENTRY = 7*RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + Integer.BYTES;
/* Rough logic: BinaryUpdate calculates its actual size,
* including the update Term and DV field (String). The
@ -111,7 +111,7 @@ class BufferedUpdates {
*/
final static int BYTES_PER_BINARY_FIELD_ENTRY =
7*RamUsageEstimator.NUM_BYTES_OBJECT_REF + 3*RamUsageEstimator.NUM_BYTES_OBJECT_HEADER +
RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + 5*RamUsageEstimator.NUM_BYTES_INT + RamUsageEstimator.NUM_BYTES_FLOAT;
RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + 5*Integer.BYTES + Float.BYTES;
/* Rough logic: Incremented when we see another Term for an already updated
* field.
@ -122,7 +122,7 @@ class BufferedUpdates {
* Term (key) is counted only as POINTER.
* BinaryUpdate (val) counts its own size and isn't accounted for here.
*/
final static int BYTES_PER_BINARY_UPDATE_ENTRY = 7*RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + RamUsageEstimator.NUM_BYTES_INT;
final static int BYTES_PER_BINARY_UPDATE_ENTRY = 7*RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + Integer.BYTES;
final AtomicInteger numTermDeletes = new AtomicInteger();
final AtomicInteger numNumericUpdates = new AtomicInteger();
@ -226,7 +226,7 @@ class BufferedUpdates {
// is done to respect IndexWriterConfig.setMaxBufferedDeleteTerms.
numTermDeletes.incrementAndGet();
if (current == null) {
bytesUsed.addAndGet(BYTES_PER_DEL_TERM + term.bytes.length + (RamUsageEstimator.NUM_BYTES_CHAR * term.field().length()));
bytesUsed.addAndGet(BYTES_PER_DEL_TERM + term.bytes.length + (Character.BYTES * term.field().length()));
}
}

View File

@ -16,16 +16,12 @@
*/
package org.apache.lucene.index;
import static org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_ARRAY_HEADER;
import static org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_CHAR;
import static org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_INT;
import static org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_OBJECT_HEADER;
import static org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_OBJECT_REF;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.RamUsageEstimator;
/** An in-place update to a DocValues field. */
abstract class DocValuesUpdate {
@ -37,7 +33,7 @@ abstract class DocValuesUpdate {
* String: 2*OBJ_HEADER + 4*INT + PTR + string.length*CHAR
* T: OBJ_HEADER
*/
private static final int RAW_SIZE_IN_BYTES = 8*NUM_BYTES_OBJECT_HEADER + 8*NUM_BYTES_OBJECT_REF + 8*NUM_BYTES_INT;
private static final int RAW_SIZE_IN_BYTES = 8*NUM_BYTES_OBJECT_HEADER + 8*NUM_BYTES_OBJECT_REF + 8*Integer.BYTES;
final DocValuesType type;
final Term term;
@ -63,9 +59,9 @@ abstract class DocValuesUpdate {
final int sizeInBytes() {
int sizeInBytes = RAW_SIZE_IN_BYTES;
sizeInBytes += term.field.length() * NUM_BYTES_CHAR;
sizeInBytes += term.field.length() * Character.BYTES;
sizeInBytes += term.bytes.bytes.length;
sizeInBytes += field.length() * NUM_BYTES_CHAR;
sizeInBytes += field.length() * Character.BYTES;
sizeInBytes += valueSizeInBytes();
return sizeInBytes;
}
@ -79,7 +75,7 @@ abstract class DocValuesUpdate {
static final class BinaryDocValuesUpdate extends DocValuesUpdate {
/* Size of BytesRef: 2*INT + ARRAY_HEADER + PTR */
private static final long RAW_VALUE_SIZE_IN_BYTES = NUM_BYTES_ARRAY_HEADER + 2*NUM_BYTES_INT + NUM_BYTES_OBJECT_REF;
private static final long RAW_VALUE_SIZE_IN_BYTES = NUM_BYTES_ARRAY_HEADER + 2*Integer.BYTES + NUM_BYTES_OBJECT_REF;
BinaryDocValuesUpdate(Term term, String field, BytesRef value) {
super(DocValuesType.BINARY, term, field, value);
@ -101,7 +97,7 @@ abstract class DocValuesUpdate {
@Override
long valueSizeInBytes() {
return RamUsageEstimator.NUM_BYTES_LONG;
return Long.BYTES;
}
}

View File

@ -16,7 +16,6 @@
*/
package org.apache.lucene.index;
import java.io.IOException;
import java.text.NumberFormat;
import java.util.Collections;
@ -40,7 +39,6 @@ import org.apache.lucene.util.Counter;
import org.apache.lucene.util.InfoStream;
import org.apache.lucene.util.IntBlockPool;
import org.apache.lucene.util.MutableBits;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.StringHelper;
import org.apache.lucene.util.Version;
@ -576,14 +574,13 @@ class DocumentsWriterPerThread {
@Override
public int[] getIntBlock() {
int[] b = new int[IntBlockPool.INT_BLOCK_SIZE];
bytesUsed.addAndGet(IntBlockPool.INT_BLOCK_SIZE
* RamUsageEstimator.NUM_BYTES_INT);
bytesUsed.addAndGet(IntBlockPool.INT_BLOCK_SIZE * Integer.BYTES);
return b;
}
@Override
public void recycleIntBlocks(int[][] blocks, int offset, int length) {
bytesUsed.addAndGet(-(length * (IntBlockPool.INT_BLOCK_SIZE * RamUsageEstimator.NUM_BYTES_INT)));
bytesUsed.addAndGet(-(length * (IntBlockPool.INT_BLOCK_SIZE * Integer.BYTES)));
}
}

View File

@ -16,13 +16,11 @@
*/
package org.apache.lucene.index;
import java.io.IOException;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.RamUsageEstimator;
// TODO: break into separate freq and prox writers as
// codecs; make separate container (tii/tis/skip/*) that can
@ -257,15 +255,15 @@ final class FreqProxTermsWriterPerField extends TermsHashPerField {
@Override
int bytesPerPosting() {
int bytes = ParallelPostingsArray.BYTES_PER_POSTING + 2 * RamUsageEstimator.NUM_BYTES_INT;
int bytes = ParallelPostingsArray.BYTES_PER_POSTING + 2 * Integer.BYTES;
if (lastPositions != null) {
bytes += RamUsageEstimator.NUM_BYTES_INT;
bytes += Integer.BYTES;
}
if (lastOffsets != null) {
bytes += RamUsageEstimator.NUM_BYTES_INT;
bytes += Integer.BYTES;
}
if (termFreqs != null) {
bytes += RamUsageEstimator.NUM_BYTES_INT;
bytes += Integer.BYTES;
}
return bytes;

View File

@ -16,7 +16,6 @@
*/
package org.apache.lucene.index;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedHashMap;
@ -40,7 +39,7 @@ import org.apache.lucene.util.RamUsageEstimator;
class FrozenBufferedUpdates {
/* Query we often undercount (say 24 bytes), plus int. */
final static int BYTES_PER_DEL_QUERY = RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_INT + 24;
final static int BYTES_PER_DEL_QUERY = RamUsageEstimator.NUM_BYTES_OBJECT_REF + Integer.BYTES + 24;
// Terms, in sorted order:
final PrefixCodedTerms terms;

View File

@ -16,12 +16,10 @@
*/
package org.apache.lucene.index;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.RamUsageEstimator;
class ParallelPostingsArray {
final static int BYTES_PER_POSTING = 3 * RamUsageEstimator.NUM_BYTES_INT;
final static int BYTES_PER_POSTING = 3 * Integer.BYTES;
final int size;
final int[] textStarts;

View File

@ -16,7 +16,6 @@
*/
package org.apache.lucene.index;
import java.io.IOException;
import org.apache.lucene.codecs.PointReader;
@ -25,7 +24,6 @@ import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.ByteBlockPool;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.Counter;
import org.apache.lucene.util.RamUsageEstimator;
/** Buffers up pending byte[][] value(s) per doc, then flushes when segment flushes. */
class PointValuesWriter {
@ -41,7 +39,7 @@ class PointValuesWriter {
this.iwBytesUsed = docWriter.bytesUsed;
this.bytes = new ByteBlockPool(docWriter.byteBlockAllocator);
docIDs = new int[16];
iwBytesUsed.addAndGet(16 * RamUsageEstimator.NUM_BYTES_INT);
iwBytesUsed.addAndGet(16 * Integer.BYTES);
packedValue = new byte[fieldInfo.getPointDimensionCount() * fieldInfo.getPointNumBytes()];
}
@ -54,7 +52,7 @@ class PointValuesWriter {
}
if (docIDs.length == numDocs) {
docIDs = ArrayUtil.grow(docIDs, numDocs+1);
iwBytesUsed.addAndGet((docIDs.length - numDocs) * RamUsageEstimator.NUM_BYTES_INT);
iwBytesUsed.addAndGet((docIDs.length - numDocs) * Integer.BYTES);
}
bytes.append(value);
docIDs[numDocs] = docID;

View File

@ -16,7 +16,6 @@
*/
package org.apache.lucene.index;
import java.io.IOException;
import java.util.Objects;
@ -27,7 +26,6 @@ import org.apache.lucene.store.RAMOutputStream;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.RamUsageEstimator;
/**
* Prefix codes term instances (prefixes are shared)
@ -45,7 +43,7 @@ public class PrefixCodedTerms implements Accountable {
@Override
public long ramBytesUsed() {
return buffer.ramBytesUsed() + 2 * RamUsageEstimator.NUM_BYTES_LONG;
return buffer.ramBytesUsed() + 2 * Long.BYTES;
}
/** Records del gen for this packet. */

View File

@ -16,7 +16,6 @@
*/
package org.apache.lucene.index;
import static org.apache.lucene.util.ByteBlockPool.BYTE_BLOCK_SIZE;
import java.io.IOException;
@ -29,7 +28,6 @@ import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefHash.DirectBytesStartArray;
import org.apache.lucene.util.BytesRefHash;
import org.apache.lucene.util.Counter;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.packed.PackedInts;
import org.apache.lucene.util.packed.PackedLongValues;
@ -93,7 +91,7 @@ class SortedDocValuesWriter extends DocValuesWriter {
// 1. when indexing, when hash is 50% full, rehash() suddenly needs 2*size ints.
// TODO: can this same OOM happen in THPF?
// 2. when flushing, we need 1 int per value (slot in the ordMap).
iwBytesUsed.addAndGet(2 * RamUsageEstimator.NUM_BYTES_INT);
iwBytesUsed.addAndGet(2 * Integer.BYTES);
}
pending.add(termID);

View File

@ -16,7 +16,6 @@
*/
package org.apache.lucene.index;
import static org.apache.lucene.util.ByteBlockPool.BYTE_BLOCK_SIZE;
import java.io.IOException;
@ -31,7 +30,6 @@ import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefHash.DirectBytesStartArray;
import org.apache.lucene.util.BytesRefHash;
import org.apache.lucene.util.Counter;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.packed.PackedInts;
import org.apache.lucene.util.packed.PackedLongValues;
@ -125,14 +123,14 @@ class SortedSetDocValuesWriter extends DocValuesWriter {
// 1. when indexing, when hash is 50% full, rehash() suddenly needs 2*size ints.
// TODO: can this same OOM happen in THPF?
// 2. when flushing, we need 1 int per value (slot in the ordMap).
iwBytesUsed.addAndGet(2 * RamUsageEstimator.NUM_BYTES_INT);
iwBytesUsed.addAndGet(2 * Integer.BYTES);
}
if (currentUpto == currentValues.length) {
currentValues = ArrayUtil.grow(currentValues, currentValues.length+1);
// reserve additional space for max # values per-doc
// when flushing, we need an int[] to sort the mapped-ords within the doc
iwBytesUsed.addAndGet((currentValues.length - currentUpto) * 2 * RamUsageEstimator.NUM_BYTES_INT);
iwBytesUsed.addAndGet((currentValues.length - currentUpto) * 2 * Integer.BYTES);
}
currentValues[currentUpto] = termID;

View File

@ -16,7 +16,6 @@
*/
package org.apache.lucene.index;
import java.io.IOException;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
@ -283,7 +282,7 @@ final class TermVectorsConsumerPerField extends TermsHashPerField {
@Override
int bytesPerPosting() {
return super.bytesPerPosting() + 3 * RamUsageEstimator.NUM_BYTES_INT;
return super.bytesPerPosting() + 3 * Integer.BYTES;
}
}
}

View File

@ -16,7 +16,6 @@
*/
package org.apache.lucene.search;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
@ -24,7 +23,6 @@ import java.util.List;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.RamUsageEstimator;
/**
* Caches all docs, and optionally also scores, coming from
@ -233,7 +231,7 @@ public abstract class CachingCollector extends FilterCollector {
if (docCount >= maxDocsToCache) {
invalidate();
} else {
final int newLen = Math.min(ArrayUtil.oversize(docCount + 1, RamUsageEstimator.NUM_BYTES_INT), maxDocsToCache);
final int newLen = Math.min(ArrayUtil.oversize(docCount + 1, Integer.BYTES), maxDocsToCache);
grow(newLen);
}
}
@ -329,9 +327,9 @@ public abstract class CachingCollector extends FilterCollector {
* scores are cached.
*/
public static CachingCollector create(Collector other, boolean cacheScores, double maxRAMMB) {
int bytesPerDoc = RamUsageEstimator.NUM_BYTES_INT;
int bytesPerDoc = Integer.BYTES;
if (cacheScores) {
bytesPerDoc += RamUsageEstimator.NUM_BYTES_FLOAT;
bytesPerDoc += Float.BYTES;
}
final int maxDocsToCache = (int) ((maxRAMMB * 1024 * 1024) / bytesPerDoc);
return create(other, cacheScores, maxDocsToCache);

View File

@ -194,7 +194,7 @@ public class PointRangeQuery extends Query {
return new PointRangeQuery(field, IntPoint.encode(lowerValue), lowerInclusive, IntPoint.encode(upperValue), upperInclusive) {
@Override
protected String toString(byte[] value) {
return IntPoint.decodeDimension(value).toString();
return IntPoint.decodeDimension(value, 0).toString();
}
};
}
@ -264,7 +264,7 @@ public class PointRangeQuery extends Query {
return new PointRangeQuery(field, LongPoint.encode(lowerValue), lowerInclusive, LongPoint.encode(upperValue), upperInclusive) {
@Override
protected String toString(byte[] value) {
return LongPoint.decodeDimension(value).toString();
return LongPoint.decodeDimension(value, 0).toString();
}
};
}
@ -334,7 +334,7 @@ public class PointRangeQuery extends Query {
return new PointRangeQuery(field, FloatPoint.encode(lowerValue), lowerInclusive, FloatPoint.encode(upperValue), upperInclusive) {
@Override
protected String toString(byte[] value) {
return FloatPoint.decodeDimension(value).toString();
return FloatPoint.decodeDimension(value, 0).toString();
}
};
}
@ -404,7 +404,7 @@ public class PointRangeQuery extends Query {
return new PointRangeQuery(field, DoublePoint.encode(lowerValue), lowerInclusive, DoublePoint.encode(upperValue), upperInclusive) {
@Override
protected String toString(byte[] value) {
return DoublePoint.decodeDimension(value).toString();
return DoublePoint.decodeDimension(value, 0).toString();
}
};
}

View File

@ -16,7 +16,6 @@
*/
package org.apache.lucene.search;
import java.io.IOException;
import org.apache.lucene.index.IndexReader;
@ -168,7 +167,7 @@ public abstract class ScoringRewrite<B> extends TermCollectingRewrite<B> {
@Override
public int[] init() {
final int[] ord = super.init();
boost = new float[ArrayUtil.oversize(ord.length, RamUsageEstimator.NUM_BYTES_FLOAT)];
boost = new float[ArrayUtil.oversize(ord.length, Float.BYTES)];
termState = new TermContext[ArrayUtil.oversize(ord.length, RamUsageEstimator.NUM_BYTES_OBJECT_REF)];
assert termState.length >= ord.length && boost.length >= ord.length;
return ord;

View File

@ -16,7 +16,6 @@
*/
package org.apache.lucene.util;
import java.util.Arrays;
import java.util.Collection;
import java.util.Comparator;
@ -248,7 +247,7 @@ public final class ArrayUtil {
public static short[] grow(short[] array, int minSize) {
assert minSize >= 0: "size must be positive (got " + minSize + "): likely integer overflow?";
if (array.length < minSize) {
short[] newArray = new short[oversize(minSize, RamUsageEstimator.NUM_BYTES_SHORT)];
short[] newArray = new short[oversize(minSize, Short.BYTES)];
System.arraycopy(array, 0, newArray, 0, array.length);
return newArray;
} else
@ -262,7 +261,7 @@ public final class ArrayUtil {
public static float[] grow(float[] array, int minSize) {
assert minSize >= 0: "size must be positive (got " + minSize + "): likely integer overflow?";
if (array.length < minSize) {
float[] newArray = new float[oversize(minSize, RamUsageEstimator.NUM_BYTES_FLOAT)];
float[] newArray = new float[oversize(minSize, Float.BYTES)];
System.arraycopy(array, 0, newArray, 0, array.length);
return newArray;
} else
@ -276,7 +275,7 @@ public final class ArrayUtil {
public static double[] grow(double[] array, int minSize) {
assert minSize >= 0: "size must be positive (got " + minSize + "): likely integer overflow?";
if (array.length < minSize) {
double[] newArray = new double[oversize(minSize, RamUsageEstimator.NUM_BYTES_DOUBLE)];
double[] newArray = new double[oversize(minSize, Double.BYTES)];
System.arraycopy(array, 0, newArray, 0, array.length);
return newArray;
} else
@ -289,7 +288,7 @@ public final class ArrayUtil {
public static short[] shrink(short[] array, int targetSize) {
assert targetSize >= 0: "size must be positive (got " + targetSize + "): likely integer overflow?";
final int newSize = getShrinkSize(array.length, targetSize, RamUsageEstimator.NUM_BYTES_SHORT);
final int newSize = getShrinkSize(array.length, targetSize, Short.BYTES);
if (newSize != array.length) {
short[] newArray = new short[newSize];
System.arraycopy(array, 0, newArray, 0, newSize);
@ -301,7 +300,7 @@ public final class ArrayUtil {
public static int[] grow(int[] array, int minSize) {
assert minSize >= 0: "size must be positive (got " + minSize + "): likely integer overflow?";
if (array.length < minSize) {
int[] newArray = new int[oversize(minSize, RamUsageEstimator.NUM_BYTES_INT)];
int[] newArray = new int[oversize(minSize, Integer.BYTES)];
System.arraycopy(array, 0, newArray, 0, array.length);
return newArray;
} else
@ -314,7 +313,7 @@ public final class ArrayUtil {
public static int[] shrink(int[] array, int targetSize) {
assert targetSize >= 0: "size must be positive (got " + targetSize + "): likely integer overflow?";
final int newSize = getShrinkSize(array.length, targetSize, RamUsageEstimator.NUM_BYTES_INT);
final int newSize = getShrinkSize(array.length, targetSize, Integer.BYTES);
if (newSize != array.length) {
int[] newArray = new int[newSize];
System.arraycopy(array, 0, newArray, 0, newSize);
@ -326,7 +325,7 @@ public final class ArrayUtil {
public static long[] grow(long[] array, int minSize) {
assert minSize >= 0: "size must be positive (got " + minSize + "): likely integer overflow?";
if (array.length < minSize) {
long[] newArray = new long[oversize(minSize, RamUsageEstimator.NUM_BYTES_LONG)];
long[] newArray = new long[oversize(minSize, Long.BYTES)];
System.arraycopy(array, 0, newArray, 0, array.length);
return newArray;
} else
@ -339,7 +338,7 @@ public final class ArrayUtil {
public static long[] shrink(long[] array, int targetSize) {
assert targetSize >= 0: "size must be positive (got " + targetSize + "): likely integer overflow?";
final int newSize = getShrinkSize(array.length, targetSize, RamUsageEstimator.NUM_BYTES_LONG);
final int newSize = getShrinkSize(array.length, targetSize, Long.BYTES);
if (newSize != array.length) {
long[] newArray = new long[newSize];
System.arraycopy(array, 0, newArray, 0, newSize);
@ -401,7 +400,7 @@ public final class ArrayUtil {
public static char[] grow(char[] array, int minSize) {
assert minSize >= 0: "size must be positive (got " + minSize + "): likely integer overflow?";
if (array.length < minSize) {
char[] newArray = new char[oversize(minSize, RamUsageEstimator.NUM_BYTES_CHAR)];
char[] newArray = new char[oversize(minSize, Character.BYTES)];
System.arraycopy(array, 0, newArray, 0, array.length);
return newArray;
} else
@ -414,7 +413,7 @@ public final class ArrayUtil {
public static char[] shrink(char[] array, int targetSize) {
assert targetSize >= 0: "size must be positive (got " + targetSize + "): likely integer overflow?";
final int newSize = getShrinkSize(array.length, targetSize, RamUsageEstimator.NUM_BYTES_CHAR);
final int newSize = getShrinkSize(array.length, targetSize, Character.BYTES);
if (newSize != array.length) {
char[] newArray = new char[newSize];
System.arraycopy(array, 0, newArray, 0, newSize);

View File

@ -51,8 +51,7 @@ public final class BytesRefArray {
this.pool = new ByteBlockPool(new ByteBlockPool.DirectTrackingAllocator(
bytesUsed));
pool.nextBuffer();
bytesUsed.addAndGet(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER
+ RamUsageEstimator.NUM_BYTES_INT);
bytesUsed.addAndGet(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER * Integer.BYTES);
this.bytesUsed = bytesUsed;
}
@ -75,8 +74,7 @@ public final class BytesRefArray {
if (lastElement >= offsets.length) {
int oldLen = offsets.length;
offsets = ArrayUtil.grow(offsets, offsets.length + 1);
bytesUsed.addAndGet((offsets.length - oldLen)
* RamUsageEstimator.NUM_BYTES_INT);
bytesUsed.addAndGet((offsets.length - oldLen) * Integer.BYTES);
}
pool.append(bytes);
offsets[lastElement++] = currentOffset;

View File

@ -90,7 +90,7 @@ public final class BytesRefHash {
this.bytesStartArray = bytesStartArray;
bytesStart = bytesStartArray.init();
bytesUsed = bytesStartArray.bytesUsed() == null? Counter.newCounter() : bytesStartArray.bytesUsed();
bytesUsed.addAndGet(hashSize * RamUsageEstimator.NUM_BYTES_INT);
bytesUsed.addAndGet(hashSize * Integer.BYTES);
}
/**
@ -213,7 +213,7 @@ public final class BytesRefHash {
newSize /= 2;
}
if (newSize != hashSize) {
bytesUsed.addAndGet(RamUsageEstimator.NUM_BYTES_INT * -(hashSize - newSize));
bytesUsed.addAndGet(Integer.BYTES * -(hashSize - newSize));
hashSize = newSize;
ids = new int[hashSize];
Arrays.fill(ids, -1);
@ -252,7 +252,7 @@ public final class BytesRefHash {
public void close() {
clear(true);
ids = null;
bytesUsed.addAndGet(RamUsageEstimator.NUM_BYTES_INT * -hashSize);
bytesUsed.addAndGet(Integer.BYTES * -hashSize);
}
/**
@ -408,7 +408,7 @@ public final class BytesRefHash {
*/
private void rehash(final int newSize, boolean hashOnData) {
final int newMask = newSize - 1;
bytesUsed.addAndGet(RamUsageEstimator.NUM_BYTES_INT * (newSize));
bytesUsed.addAndGet(Integer.BYTES * (newSize));
final int[] newHash = new int[newSize];
Arrays.fill(newHash, -1);
for (int i = 0; i < hashSize; i++) {
@ -449,7 +449,7 @@ public final class BytesRefHash {
}
hashMask = newMask;
bytesUsed.addAndGet(RamUsageEstimator.NUM_BYTES_INT * (-ids.length));
bytesUsed.addAndGet(Integer.BYTES * (-ids.length));
ids = newHash;
hashSize = newSize;
hashHalfSize = newSize / 2;
@ -472,7 +472,7 @@ public final class BytesRefHash {
if (ids == null) {
ids = new int[hashSize];
bytesUsed.addAndGet(RamUsageEstimator.NUM_BYTES_INT * hashSize);
bytesUsed.addAndGet(Integer.BYTES * hashSize);
}
}
@ -570,8 +570,7 @@ public final class BytesRefHash {
@Override
public int[] init() {
return bytesStart = new int[ArrayUtil.oversize(initSize,
RamUsageEstimator.NUM_BYTES_INT)];
return bytesStart = new int[ArrayUtil.oversize(initSize, Integer.BYTES)];
}
@Override

View File

@ -16,7 +16,6 @@
*/
package org.apache.lucene.util;
import java.io.IOException;
import org.apache.lucene.search.DocIdSet;
@ -68,7 +67,7 @@ public final class DocIdSetBuilder {
private void growBuffer(int minSize) {
assert minSize < threshold;
if (buffer.length < minSize) {
int nextSize = Math.min(threshold, ArrayUtil.oversize(minSize, RamUsageEstimator.NUM_BYTES_INT));
int nextSize = Math.min(threshold, ArrayUtil.oversize(minSize, Integer.BYTES));
int[] newBuffer = new int[nextSize];
System.arraycopy(buffer, 0, newBuffer, 0, buffer.length);
buffer = newBuffer;

View File

@ -155,16 +155,15 @@ public final class NumericUtils {
return true;
}
public static void intToBytes(int x, byte[] dest, int index) {
public static void intToBytes(int x, byte[] dest, int offset) {
// Flip the sign bit, so negative ints sort before positive ints correctly:
x ^= 0x80000000;
intToBytesDirect(x, dest, index);
intToBytesDirect(x, dest, offset);
}
public static void intToBytesDirect(int x, byte[] dest, int index) {
// Flip the sign bit, so negative ints sort before positive ints correctly:
for(int i=0;i<4;i++) {
dest[4*index+i] = (byte) (x >> 24-i*8);
public static void intToBytesDirect(int x, byte[] dest, int offset) {
for (int i = 0; i < 4; i++) {
dest[offset+i] = (byte) (x >> 24-i*8);
}
}
@ -174,22 +173,21 @@ public final class NumericUtils {
return x ^ 0x80000000;
}
public static int bytesToIntDirect(byte[] src, int index) {
public static int bytesToIntDirect(byte[] src, int offset) {
int x = 0;
for(int i=0;i<4;i++) {
x |= (src[4*index+i] & 0xff) << (24-i*8);
for (int i = 0; i < 4; i++) {
x |= (src[offset+i] & 0xff) << (24-i*8);
}
return x;
}
public static void longToBytes(long v, byte[] bytes, int dim) {
public static void longToBytes(long v, byte[] bytes, int offset) {
// Flip the sign bit so negative longs sort before positive longs:
v ^= 0x8000000000000000L;
longToBytesDirect(v, bytes, dim);
longToBytesDirect(v, bytes, offset);
}
public static void longToBytesDirect(long v, byte[] bytes, int dim) {
int offset = 8 * dim;
public static void longToBytesDirect(long v, byte[] bytes, int offset) {
bytes[offset] = (byte) (v >> 56);
bytes[offset+1] = (byte) (v >> 48);
bytes[offset+2] = (byte) (v >> 40);
@ -200,15 +198,14 @@ public final class NumericUtils {
bytes[offset+7] = (byte) v;
}
public static long bytesToLong(byte[] bytes, int index) {
long v = bytesToLongDirect(bytes, index);
public static long bytesToLong(byte[] bytes, int offset) {
long v = bytesToLongDirect(bytes, offset);
// Flip the sign bit back
v ^= 0x8000000000000000L;
return v;
}
public static long bytesToLongDirect(byte[] bytes, int index) {
int offset = 8 * index;
public static long bytesToLongDirect(byte[] bytes, int offset) {
long v = ((bytes[offset] & 0xffL) << 56) |
((bytes[offset+1] & 0xffL) << 48) |
((bytes[offset+2] & 0xffL) << 40) |

View File

@ -55,15 +55,6 @@ public final class RamUsageEstimator {
/** No instantiation. */
private RamUsageEstimator() {}
public final static int NUM_BYTES_BOOLEAN = 1;
public final static int NUM_BYTES_BYTE = 1;
public final static int NUM_BYTES_CHAR = 2;
public final static int NUM_BYTES_SHORT = 2;
public final static int NUM_BYTES_INT = 4;
public final static int NUM_BYTES_FLOAT = 4;
public final static int NUM_BYTES_LONG = 8;
public final static int NUM_BYTES_DOUBLE = 8;
/**
* True, iff compressed references (oops) are enabled by this JVM
*/
@ -95,14 +86,14 @@ public final class RamUsageEstimator {
*/
private static final Map<Class<?>,Integer> primitiveSizes = new IdentityHashMap<>();
static {
primitiveSizes.put(boolean.class, Integer.valueOf(NUM_BYTES_BOOLEAN));
primitiveSizes.put(byte.class, Integer.valueOf(NUM_BYTES_BYTE));
primitiveSizes.put(char.class, Integer.valueOf(NUM_BYTES_CHAR));
primitiveSizes.put(short.class, Integer.valueOf(NUM_BYTES_SHORT));
primitiveSizes.put(int.class, Integer.valueOf(NUM_BYTES_INT));
primitiveSizes.put(float.class, Integer.valueOf(NUM_BYTES_FLOAT));
primitiveSizes.put(double.class, Integer.valueOf(NUM_BYTES_DOUBLE));
primitiveSizes.put(long.class, Integer.valueOf(NUM_BYTES_LONG));
primitiveSizes.put(boolean.class, 1);
primitiveSizes.put(byte.class, 1);
primitiveSizes.put(char.class, Integer.valueOf(Character.BYTES));
primitiveSizes.put(short.class, Integer.valueOf(Short.BYTES));
primitiveSizes.put(int.class, Integer.valueOf(Integer.BYTES));
primitiveSizes.put(float.class, Integer.valueOf(Float.BYTES));
primitiveSizes.put(double.class, Integer.valueOf(Double.BYTES));
primitiveSizes.put(long.class, Integer.valueOf(Long.BYTES));
}
/**
@ -165,7 +156,7 @@ public final class RamUsageEstimator {
// "best guess" based on reference size:
NUM_BYTES_OBJECT_HEADER = 8 + NUM_BYTES_OBJECT_REF;
// array header is NUM_BYTES_OBJECT_HEADER + NUM_BYTES_INT, but aligned (object alignment):
NUM_BYTES_ARRAY_HEADER = (int) alignObjectSize(NUM_BYTES_OBJECT_HEADER + NUM_BYTES_INT);
NUM_BYTES_ARRAY_HEADER = (int) alignObjectSize(NUM_BYTES_OBJECT_HEADER + Integer.BYTES);
} else {
JVM_IS_HOTSPOT_64BIT = false;
COMPRESSED_REFS_ENABLED = false;
@ -173,7 +164,7 @@ public final class RamUsageEstimator {
NUM_BYTES_OBJECT_REF = 4;
NUM_BYTES_OBJECT_HEADER = 8;
// For 32 bit JVMs, no extra alignment of array header:
NUM_BYTES_ARRAY_HEADER = NUM_BYTES_OBJECT_HEADER + NUM_BYTES_INT;
NUM_BYTES_ARRAY_HEADER = NUM_BYTES_OBJECT_HEADER + Integer.BYTES;
}
// get min/max value of cached Long class instances:
@ -223,32 +214,32 @@ public final class RamUsageEstimator {
/** Returns the size in bytes of the char[] object. */
public static long sizeOf(char[] arr) {
return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) NUM_BYTES_CHAR * arr.length);
return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) Character.BYTES * arr.length);
}
/** Returns the size in bytes of the short[] object. */
public static long sizeOf(short[] arr) {
return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) NUM_BYTES_SHORT * arr.length);
return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) Short.BYTES * arr.length);
}
/** Returns the size in bytes of the int[] object. */
public static long sizeOf(int[] arr) {
return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) NUM_BYTES_INT * arr.length);
return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) Integer.BYTES * arr.length);
}
/** Returns the size in bytes of the float[] object. */
public static long sizeOf(float[] arr) {
return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) NUM_BYTES_FLOAT * arr.length);
return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) Float.BYTES * arr.length);
}
/** Returns the size in bytes of the long[] object. */
public static long sizeOf(long[] arr) {
return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) NUM_BYTES_LONG * arr.length);
return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) Long.BYTES * arr.length);
}
/** Returns the size in bytes of the double[] object. */
public static long sizeOf(double[] arr) {
return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) NUM_BYTES_DOUBLE * arr.length);
return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) Double.BYTES * arr.length);
}
/** Returns the shallow size in bytes of the Object[] object. */

View File

@ -78,7 +78,7 @@ public final class RecyclingIntBlockAllocator extends Allocator {
@Override
public int[] getIntBlock() {
if (freeBlocks == 0) {
bytesUsed.addAndGet(blockSize*RamUsageEstimator.NUM_BYTES_INT);
bytesUsed.addAndGet(blockSize*Integer.BYTES);
return new int[blockSize];
}
final int[] b = freeByteBlocks[--freeBlocks];
@ -104,7 +104,7 @@ public final class RecyclingIntBlockAllocator extends Allocator {
for (int i = stop; i < end; i++) {
blocks[i] = null;
}
bytesUsed.addAndGet(-(end - stop) * (blockSize * RamUsageEstimator.NUM_BYTES_INT));
bytesUsed.addAndGet(-(end - stop) * (blockSize * Integer.BYTES));
assert bytesUsed.get() >= 0;
}
@ -150,7 +150,7 @@ public final class RecyclingIntBlockAllocator extends Allocator {
while (freeBlocks > stop) {
freeByteBlocks[--freeBlocks] = null;
}
bytesUsed.addAndGet(-count*blockSize* RamUsageEstimator.NUM_BYTES_INT);
bytesUsed.addAndGet(-count*blockSize*Integer.BYTES);
assert bytesUsed.get() >= 0;
return count;
}

View File

@ -149,7 +149,7 @@ public class SentinelIntSet {
/** Return the memory footprint of this class in bytes. */
public long ramBytesUsed() {
return RamUsageEstimator.alignObjectSize(
RamUsageEstimator.NUM_BYTES_INT * 3
Integer.BYTES * 3
+ RamUsageEstimator.NUM_BYTES_OBJECT_REF)
+ RamUsageEstimator.sizeOf(keys);
}

View File

@ -893,7 +893,7 @@ public class Automaton implements Accountable {
return RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + RamUsageEstimator.sizeOf(states) + RamUsageEstimator.sizeOf(transitions) +
RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + (isAccept.size() / 8) + RamUsageEstimator.NUM_BYTES_OBJECT_REF +
2 * RamUsageEstimator.NUM_BYTES_OBJECT_REF +
3 * RamUsageEstimator.NUM_BYTES_INT +
RamUsageEstimator.NUM_BYTES_BOOLEAN;
3 * Integer.BYTES +
1;
}
}

View File

@ -16,11 +16,9 @@
*/
package org.apache.lucene.util.automaton;
import java.util.TreeMap;
import java.util.Map;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.RamUsageEstimator;
// Just holds a set of int[] states, plus a corresponding
// int[] count per state. Used by
@ -135,7 +133,7 @@ final class SortedIntSet {
public void computeHash() {
if (useTreeMap) {
if (map.size() > values.length) {
final int size = ArrayUtil.oversize(map.size(), RamUsageEstimator.NUM_BYTES_INT);
final int size = ArrayUtil.oversize(map.size(), Integer.BYTES);
values = new int[size];
counts = new int[size];
}

View File

@ -16,7 +16,6 @@
*/
package org.apache.lucene.util.bkd;
import java.io.IOException;
import java.util.Arrays;
@ -27,7 +26,6 @@ import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.NumericUtils;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.StringHelper;
/** Handles intersection of an multi-dimensional shape in byte[] space with a block KD-tree previously written with {@link BKDWriter}.
@ -415,7 +413,7 @@ public class BKDReader implements Accountable {
@Override
public long ramBytesUsed() {
return splitPackedValues.length +
leafBlockFPs.length * RamUsageEstimator.NUM_BYTES_LONG;
leafBlockFPs.length * Long.BYTES;
}
public byte[] getMinPackedValue() {

View File

@ -16,7 +16,6 @@
*/
package org.apache.lucene.util.bkd;
import java.io.Closeable;
import java.io.EOFException;
import java.io.IOException;
@ -40,9 +39,7 @@ import org.apache.lucene.util.IntroSorter;
import org.apache.lucene.util.LongBitSet;
import org.apache.lucene.util.NumericUtils;
import org.apache.lucene.util.OfflineSorter;
import org.apache.lucene.util.OfflineSorter.ByteSequencesWriter;
import org.apache.lucene.util.PriorityQueue;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.StringHelper;
// TODO
@ -152,7 +149,7 @@ public class BKDWriter implements Closeable {
maxPackedValue = new byte[packedBytesLength];
// dimensional values (numDims * bytesPerDim) + ord (long) + docID (int)
bytesPerDoc = packedBytesLength + RamUsageEstimator.NUM_BYTES_LONG + RamUsageEstimator.NUM_BYTES_INT;
bytesPerDoc = packedBytesLength + Long.BYTES + Integer.BYTES;
// As we recurse, we compute temporary partitions of the data, halving the
// number of points at each recursion. Once there are few enough points,

View File

@ -16,12 +16,10 @@
*/
package org.apache.lucene.util.bkd;
import java.util.ArrayList;
import java.util.List;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.RamUsageEstimator;
final class HeapPointWriter implements PointWriter {
int[] docIDs;
@ -94,7 +92,7 @@ final class HeapPointWriter implements PointWriter {
assert closed == false;
assert packedValue.length == packedBytesLength;
if (ords.length == nextWrite) {
int nextSize = Math.min(maxSize, ArrayUtil.oversize(nextWrite+1, RamUsageEstimator.NUM_BYTES_INT));
int nextSize = Math.min(maxSize, ArrayUtil.oversize(nextWrite+1, Integer.BYTES));
assert nextSize > nextWrite: "nextSize=" + nextSize + " vs nextWrite=" + nextWrite;
ords = growExact(ords, nextSize);
docIDs = growExact(docIDs, nextSize);

View File

@ -16,15 +16,12 @@
*/
package org.apache.lucene.util.bkd;
import java.io.EOFException;
import java.io.IOException;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.RamUsageEstimator;
/** Reads points from disk in a fixed-with format, previously written with {@link OfflinePointWriter}. */
final class OfflinePointReader implements PointReader {
@ -41,7 +38,7 @@ final class OfflinePointReader implements PointReader {
private OfflinePointReader(IndexInput in, int packedBytesLength, long start, long length) throws IOException {
this.in = in;
bytesPerDoc = packedBytesLength + RamUsageEstimator.NUM_BYTES_LONG + RamUsageEstimator.NUM_BYTES_INT;
bytesPerDoc = packedBytesLength + Long.BYTES + Integer.BYTES;
long seekFP = start * bytesPerDoc;
in.seek(seekFP);
this.countLeft = length;

View File

@ -16,13 +16,11 @@
*/
package org.apache.lucene.util.bkd;
import java.io.IOException;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.RamUsageEstimator;
/** Writes points to disk in a fixed-with format. */
final class OfflinePointWriter implements PointWriter {
@ -38,7 +36,7 @@ final class OfflinePointWriter implements PointWriter {
this.out = tempDir.createTempOutput(tempFileNamePrefix, "bkd", IOContext.DEFAULT);
this.tempDir = tempDir;
this.packedBytesLength = packedBytesLength;
bytesPerDoc = packedBytesLength + RamUsageEstimator.NUM_BYTES_LONG + RamUsageEstimator.NUM_BYTES_INT;
bytesPerDoc = packedBytesLength + Long.BYTES + Integer.BYTES;
}
/** Initializes on an already written/closed file, just so consumers can use {@link #getReader} to read the file. */
@ -46,7 +44,7 @@ final class OfflinePointWriter implements PointWriter {
this.out = out;
this.tempDir = tempDir;
this.packedBytesLength = packedBytesLength;
bytesPerDoc = packedBytesLength + RamUsageEstimator.NUM_BYTES_LONG + RamUsageEstimator.NUM_BYTES_INT;
bytesPerDoc = packedBytesLength + Long.BYTES + Integer.BYTES;
this.count = count;
closed = true;
}

View File

@ -101,8 +101,8 @@ abstract class AbstractPagedMutable<T extends AbstractPagedMutable<T>> extends L
protected long baseRamBytesUsed() {
return RamUsageEstimator.NUM_BYTES_OBJECT_HEADER
+ RamUsageEstimator.NUM_BYTES_OBJECT_REF
+ RamUsageEstimator.NUM_BYTES_LONG
+ 3 * RamUsageEstimator.NUM_BYTES_INT;
+ Long.BYTES
+ 3 * Integer.BYTES;
}
@Override

View File

@ -62,7 +62,7 @@ final class Direct16 extends PackedInts.MutableImpl {
public long ramBytesUsed() {
return RamUsageEstimator.alignObjectSize(
RamUsageEstimator.NUM_BYTES_OBJECT_HEADER
+ 2 * RamUsageEstimator.NUM_BYTES_INT // valueCount,bitsPerValue
+ 2 * Integer.BYTES // valueCount,bitsPerValue
+ RamUsageEstimator.NUM_BYTES_OBJECT_REF) // values ref
+ RamUsageEstimator.sizeOf(values);
}

View File

@ -62,7 +62,7 @@ final class Direct32 extends PackedInts.MutableImpl {
public long ramBytesUsed() {
return RamUsageEstimator.alignObjectSize(
RamUsageEstimator.NUM_BYTES_OBJECT_HEADER
+ 2 * RamUsageEstimator.NUM_BYTES_INT // valueCount,bitsPerValue
+ 2 * Integer.BYTES // valueCount,bitsPerValue
+ RamUsageEstimator.NUM_BYTES_OBJECT_REF) // values ref
+ RamUsageEstimator.sizeOf(values);
}

View File

@ -57,7 +57,7 @@ final class Direct64 extends PackedInts.MutableImpl {
public long ramBytesUsed() {
return RamUsageEstimator.alignObjectSize(
RamUsageEstimator.NUM_BYTES_OBJECT_HEADER
+ 2 * RamUsageEstimator.NUM_BYTES_INT // valueCount,bitsPerValue
+ 2 * Integer.BYTES // valueCount,bitsPerValue
+ RamUsageEstimator.NUM_BYTES_OBJECT_REF) // values ref
+ RamUsageEstimator.sizeOf(values);
}

View File

@ -60,7 +60,7 @@ final class Direct8 extends PackedInts.MutableImpl {
public long ramBytesUsed() {
return RamUsageEstimator.alignObjectSize(
RamUsageEstimator.NUM_BYTES_OBJECT_HEADER
+ 2 * RamUsageEstimator.NUM_BYTES_INT // valueCount,bitsPerValue
+ 2 * Integer.BYTES // valueCount,bitsPerValue
+ RamUsageEstimator.NUM_BYTES_OBJECT_REF) // values ref
+ RamUsageEstimator.sizeOf(values);
}

View File

@ -131,8 +131,8 @@ public class GrowableWriter extends PackedInts.Mutable {
return RamUsageEstimator.alignObjectSize(
RamUsageEstimator.NUM_BYTES_OBJECT_HEADER
+ RamUsageEstimator.NUM_BYTES_OBJECT_REF
+ RamUsageEstimator.NUM_BYTES_LONG
+ RamUsageEstimator.NUM_BYTES_FLOAT)
+ Long.BYTES
+ Float.BYTES)
+ current.ramBytesUsed();
}

View File

@ -112,7 +112,7 @@ final class Packed16ThreeBlocks extends PackedInts.MutableImpl {
public long ramBytesUsed() {
return RamUsageEstimator.alignObjectSize(
RamUsageEstimator.NUM_BYTES_OBJECT_HEADER
+ 2 * RamUsageEstimator.NUM_BYTES_INT // valueCount,bitsPerValue
+ 2 * Integer.BYTES // valueCount,bitsPerValue
+ RamUsageEstimator.NUM_BYTES_OBJECT_REF) // blocks ref
+ RamUsageEstimator.sizeOf(blocks);
}

View File

@ -246,8 +246,8 @@ class Packed64 extends PackedInts.MutableImpl {
public long ramBytesUsed() {
return RamUsageEstimator.alignObjectSize(
RamUsageEstimator.NUM_BYTES_OBJECT_HEADER
+ 3 * RamUsageEstimator.NUM_BYTES_INT // bpvMinusBlockSize,valueCount,bitsPerValue
+ RamUsageEstimator.NUM_BYTES_LONG // maskRight
+ 3 * Integer.BYTES // bpvMinusBlockSize,valueCount,bitsPerValue
+ Long.BYTES // maskRight
+ RamUsageEstimator.NUM_BYTES_OBJECT_REF) // blocks ref
+ RamUsageEstimator.sizeOf(blocks);
}

View File

@ -61,7 +61,7 @@ abstract class Packed64SingleBlock extends PackedInts.MutableImpl {
public long ramBytesUsed() {
return RamUsageEstimator.alignObjectSize(
RamUsageEstimator.NUM_BYTES_OBJECT_HEADER
+ 2 * RamUsageEstimator.NUM_BYTES_INT // valueCount,bitsPerValue
+ 2 * Integer.BYTES // valueCount,bitsPerValue
+ RamUsageEstimator.NUM_BYTES_OBJECT_REF) // blocks ref
+ RamUsageEstimator.sizeOf(blocks);
}

View File

@ -110,7 +110,7 @@ final class Packed8ThreeBlocks extends PackedInts.MutableImpl {
public long ramBytesUsed() {
return RamUsageEstimator.alignObjectSize(
RamUsageEstimator.NUM_BYTES_OBJECT_HEADER
+ 2 * RamUsageEstimator.NUM_BYTES_INT // valueCount,bitsPerValue
+ 2 * Integer.BYTES // valueCount,bitsPerValue
+ RamUsageEstimator.NUM_BYTES_OBJECT_REF) // blocks ref
+ RamUsageEstimator.sizeOf(blocks);
}

View File

@ -681,7 +681,7 @@ public class PackedInts {
@Override
public long ramBytesUsed() {
return RamUsageEstimator.alignObjectSize(RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + RamUsageEstimator.NUM_BYTES_INT);
return RamUsageEstimator.alignObjectSize(RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + Integer.BYTES);
}
}

View File

@ -16,8 +16,6 @@
*/
package org.apache.lucene.util.packed;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.packed.PackedInts.Mutable;
/**
@ -65,7 +63,7 @@ public final class PagedGrowableWriter extends AbstractPagedMutable<PagedGrowabl
@Override
protected long baseRamBytesUsed() {
return super.baseRamBytesUsed() + RamUsageEstimator.NUM_BYTES_FLOAT;
return super.baseRamBytesUsed() + Float.BYTES;
}
}

View File

@ -22,7 +22,6 @@ import java.util.List;
import org.apache.lucene.util.Counter;
import org.apache.lucene.util.IntBlockPool;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.RamUsageEstimator;
/**
* tests basic {@link IntBlockPool} functionality
@ -53,8 +52,7 @@ public class TestIntBlockPool extends LuceneTestCase {
assertEquals(0, bytesUsed.get());
} else {
pool.reset(true, true);
assertEquals(IntBlockPool.INT_BLOCK_SIZE
* RamUsageEstimator.NUM_BYTES_INT, bytesUsed.get());
assertEquals(IntBlockPool.INT_BLOCK_SIZE * Integer.BYTES, bytesUsed.get());
}
}
}
@ -98,8 +96,7 @@ public class TestIntBlockPool extends LuceneTestCase {
assertEquals(0, bytesUsed.get());
} else {
pool.reset(true, true);
assertEquals(IntBlockPool.INT_BLOCK_SIZE
* RamUsageEstimator.NUM_BYTES_INT, bytesUsed.get());
assertEquals(IntBlockPool.INT_BLOCK_SIZE * Integer.BYTES, bytesUsed.get());
}
}
}
@ -118,14 +115,14 @@ public class TestIntBlockPool extends LuceneTestCase {
@Override
public int[] getIntBlock() {
bytesUsed.addAndGet(blockSize * RamUsageEstimator.NUM_BYTES_INT);
bytesUsed.addAndGet(blockSize * Integer.BYTES);
return new int[blockSize];
}
@Override
public void recycleIntBlocks(int[][] blocks, int start, int end) {
bytesUsed
.addAndGet(-((end - start) * blockSize * RamUsageEstimator.NUM_BYTES_INT));
.addAndGet(-((end - start) * blockSize * Integer.BYTES));
}
}

View File

@ -140,7 +140,7 @@ public class TestBKD extends LuceneTestCase {
if (values[dim] > maxValue[dim]) {
maxValue[dim] = values[dim];
}
NumericUtils.intToBytes(values[dim], scratch, dim);
NumericUtils.intToBytes(values[dim], scratch, dim * Integer.BYTES);
if (VERBOSE) {
System.out.println(" " + dim + " -> " + values[dim]);
}
@ -161,8 +161,8 @@ public class TestBKD extends LuceneTestCase {
byte[] minPackedValue = r.getMinPackedValue();
byte[] maxPackedValue = r.getMaxPackedValue();
for(int dim=0;dim<numDims;dim++) {
assertEquals(minValue[dim], NumericUtils.bytesToInt(minPackedValue, dim));
assertEquals(maxValue[dim], NumericUtils.bytesToInt(maxPackedValue, dim));
assertEquals(minValue[dim], NumericUtils.bytesToInt(minPackedValue, dim * Integer.BYTES));
assertEquals(maxValue[dim], NumericUtils.bytesToInt(maxPackedValue, dim * Integer.BYTES));
}
int iters = atLeast(100);
@ -196,7 +196,7 @@ public class TestBKD extends LuceneTestCase {
public void visit(int docID, byte[] packedValue) {
//System.out.println("visit check docID=" + docID);
for(int dim=0;dim<numDims;dim++) {
int x = NumericUtils.bytesToInt(packedValue, dim);
int x = NumericUtils.bytesToInt(packedValue, dim * Integer.BYTES);
if (x < queryMin[dim] || x > queryMax[dim]) {
//System.out.println(" no");
return;
@ -211,8 +211,8 @@ public class TestBKD extends LuceneTestCase {
public Relation compare(byte[] minPacked, byte[] maxPacked) {
boolean crosses = false;
for(int dim=0;dim<numDims;dim++) {
int min = NumericUtils.bytesToInt(minPacked, dim);
int max = NumericUtils.bytesToInt(maxPacked, dim);
int min = NumericUtils.bytesToInt(minPacked, dim * Integer.BYTES);
int max = NumericUtils.bytesToInt(maxPacked, dim * Integer.BYTES);
assert max >= min;
if (max < queryMin[dim] || min > queryMax[dim]) {

View File

@ -225,8 +225,8 @@ public class MemoryIndex {
this.storePayloads = storePayloads;
this.bytesUsed = Counter.newCounter();
final int maxBufferedByteBlocks = (int)((maxReusedBytes/2) / ByteBlockPool.BYTE_BLOCK_SIZE );
final int maxBufferedIntBlocks = (int) ((maxReusedBytes - (maxBufferedByteBlocks*ByteBlockPool.BYTE_BLOCK_SIZE))/(IntBlockPool.INT_BLOCK_SIZE * RamUsageEstimator.NUM_BYTES_INT));
assert (maxBufferedByteBlocks * ByteBlockPool.BYTE_BLOCK_SIZE) + (maxBufferedIntBlocks * IntBlockPool.INT_BLOCK_SIZE * RamUsageEstimator.NUM_BYTES_INT) <= maxReusedBytes;
final int maxBufferedIntBlocks = (int) ((maxReusedBytes - (maxBufferedByteBlocks*ByteBlockPool.BYTE_BLOCK_SIZE))/(IntBlockPool.INT_BLOCK_SIZE * Integer.BYTES));
assert (maxBufferedByteBlocks * ByteBlockPool.BYTE_BLOCK_SIZE) + (maxBufferedIntBlocks * IntBlockPool.INT_BLOCK_SIZE * Integer.BYTES) <= maxReusedBytes;
byteBlockPool = new ByteBlockPool(new RecyclingByteBlockAllocator(ByteBlockPool.BYTE_BLOCK_SIZE, maxBufferedByteBlocks, bytesUsed));
intBlockPool = new IntBlockPool(new RecyclingIntBlockAllocator(IntBlockPool.INT_BLOCK_SIZE, maxBufferedIntBlocks, bytesUsed));
postingsWriter = new SliceWriter(intBlockPool);
@ -1216,9 +1216,9 @@ public class MemoryIndex {
@Override
public int[] init() {
final int[] ord = super.init();
start = new int[ArrayUtil.oversize(ord.length, RamUsageEstimator.NUM_BYTES_INT)];
end = new int[ArrayUtil.oversize(ord.length, RamUsageEstimator.NUM_BYTES_INT)];
freq = new int[ArrayUtil.oversize(ord.length, RamUsageEstimator.NUM_BYTES_INT)];
start = new int[ArrayUtil.oversize(ord.length, Integer.BYTES)];
end = new int[ArrayUtil.oversize(ord.length, Integer.BYTES)];
freq = new int[ArrayUtil.oversize(ord.length, Integer.BYTES)];
assert start.length >= ord.length;
assert end.length >= ord.length;
assert freq.length >= ord.length;

View File

@ -481,7 +481,7 @@ class FieldCacheImpl implements FieldCache {
@Override
public long ramBytesUsed() {
return values.ramBytesUsed() + RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_LONG;
return values.ramBytesUsed() + RamUsageEstimator.NUM_BYTES_OBJECT_REF + Long.BYTES;
}
}
@ -599,7 +599,7 @@ class FieldCacheImpl implements FieldCache {
termOrdToBytesOffset.ramBytesUsed() +
docToTermOrd.ramBytesUsed() +
3*RamUsageEstimator.NUM_BYTES_OBJECT_REF +
RamUsageEstimator.NUM_BYTES_INT;
Integer.BYTES;
}
@Override

View File

@ -24,7 +24,7 @@ import org.apache.lucene.spatial.util.GeoUtils;
public class LatLonPoint extends Field {
public static final FieldType TYPE = new FieldType();
static {
TYPE.setDimensions(2, 4);
TYPE.setDimensions(2, Integer.BYTES);
TYPE.freeze();
}
@ -45,7 +45,7 @@ public class LatLonPoint extends Field {
}
byte[] bytes = new byte[8];
NumericUtils.intToBytes(encodeLat(lat), bytes, 0);
NumericUtils.intToBytes(encodeLon(lon), bytes, 1);
NumericUtils.intToBytes(encodeLon(lon), bytes, Integer.BYTES);
fieldsData = new BytesRef(bytes);
}

View File

@ -124,7 +124,7 @@ public class PointInPolygonQuery extends Query {
public void visit(int docID, byte[] packedValue) {
assert packedValue.length == 8;
double lat = LatLonPoint.decodeLat(NumericUtils.bytesToInt(packedValue, 0));
double lon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(packedValue, 1));
double lon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(packedValue, Integer.BYTES));
if (GeoRelationUtils.pointInPolygon(polyLons, polyLats, lat, lon)) {
hitCount[0]++;
result.add(docID);
@ -134,9 +134,9 @@ public class PointInPolygonQuery extends Query {
@Override
public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) {
double cellMinLat = LatLonPoint.decodeLat(NumericUtils.bytesToInt(minPackedValue, 0));
double cellMinLon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(minPackedValue, 1));
double cellMinLon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(minPackedValue, Integer.BYTES));
double cellMaxLat = LatLonPoint.decodeLat(NumericUtils.bytesToInt(maxPackedValue, 0));
double cellMaxLon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(maxPackedValue, 1));
double cellMaxLon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(maxPackedValue, Integer.BYTES));
if (cellMinLat <= minLat && cellMaxLat >= maxLat && cellMinLon <= minLon && cellMaxLon >= maxLon) {
// Cell fully encloses the query

View File

@ -98,7 +98,7 @@ public class PointInRectQuery extends Query {
public void visit(int docID, byte[] packedValue) {
assert packedValue.length == 8;
double lat = LatLonPoint.decodeLat(NumericUtils.bytesToInt(packedValue, 0));
double lon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(packedValue, 1));
double lon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(packedValue, Integer.BYTES));
if (lat >= minLat && lat <= maxLat && lon >= minLon && lon <= maxLon) {
hitCount[0]++;
result.add(docID);
@ -108,9 +108,9 @@ public class PointInRectQuery extends Query {
@Override
public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) {
double cellMinLat = LatLonPoint.decodeLat(NumericUtils.bytesToInt(minPackedValue, 0));
double cellMinLon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(minPackedValue, 1));
double cellMinLon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(minPackedValue, Integer.BYTES));
double cellMaxLat = LatLonPoint.decodeLat(NumericUtils.bytesToInt(maxPackedValue, 0));
double cellMaxLon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(maxPackedValue, 1));
double cellMaxLon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(maxPackedValue, Integer.BYTES));
if (minLat <= cellMinLat && maxLat >= cellMaxLat && minLon <= cellMinLon && maxLon >= cellMaxLon) {
return Relation.CELL_INSIDE_QUERY;

View File

@ -83,7 +83,7 @@ public class TestDocValuesRangeQuery extends LuceneTestCase {
if (l == null) {
return null;
} else {
byte[] bytes = new byte[RamUsageEstimator.NUM_BYTES_LONG];
byte[] bytes = new byte[Long.BYTES];
NumericUtils.longToBytes(l, bytes, 0);
return new BytesRef(bytes);
}

View File

@ -353,7 +353,7 @@ public class ContainsPrefixTreeQuery extends AbstractPrefixTreeQuery {
public long ramBytesUsed() {
return RamUsageEstimator.alignObjectSize(
RamUsageEstimator.NUM_BYTES_OBJECT_REF
+ RamUsageEstimator.NUM_BYTES_INT)
+ Integer.BYTES)
+ intSet.ramBytesUsed();
}

View File

@ -32,7 +32,7 @@ public final class Geo3DPoint extends Field {
/** Indexing {@link FieldType}. */
public static final FieldType TYPE = new FieldType();
static {
TYPE.setDimensions(3, RamUsageEstimator.NUM_BYTES_INT);
TYPE.setDimensions(3, Integer.BYTES);
TYPE.freeze();
}
@ -61,8 +61,8 @@ public final class Geo3DPoint extends Field {
private void fillFieldsData(double planetMax, double x, double y, double z) {
byte[] bytes = new byte[12];
NumericUtils.intToBytes(Geo3DUtil.encodeValue(planetMax, x), bytes, 0);
NumericUtils.intToBytes(Geo3DUtil.encodeValue(planetMax, y), bytes, 1);
NumericUtils.intToBytes(Geo3DUtil.encodeValue(planetMax, z), bytes, 2);
NumericUtils.intToBytes(Geo3DUtil.encodeValue(planetMax, y), bytes, Integer.BYTES);
NumericUtils.intToBytes(Geo3DUtil.encodeValue(planetMax, z), bytes, 2 * Integer.BYTES);
fieldsData = new BytesRef(bytes);
}
}

View File

@ -106,8 +106,8 @@ public class PointInGeo3DShapeQuery extends Query {
public void visit(int docID, byte[] packedValue) {
assert packedValue.length == 12;
double x = Geo3DUtil.decodeValueCenter(planetMax, NumericUtils.bytesToInt(packedValue, 0));
double y = Geo3DUtil.decodeValueCenter(planetMax, NumericUtils.bytesToInt(packedValue, 1));
double z = Geo3DUtil.decodeValueCenter(planetMax, NumericUtils.bytesToInt(packedValue, 2));
double y = Geo3DUtil.decodeValueCenter(planetMax, NumericUtils.bytesToInt(packedValue, 1 * Integer.BYTES));
double z = Geo3DUtil.decodeValueCenter(planetMax, NumericUtils.bytesToInt(packedValue, 2 * Integer.BYTES));
if (shape.isWithin(x, y, z)) {
result.add(docID);
hitCount[0]++;
@ -122,10 +122,10 @@ public class PointInGeo3DShapeQuery extends Query {
// a Math.round from double to long, so e.g. 1.4 -> 1, and -1.4 -> -1:
double xMin = Geo3DUtil.decodeValueMin(planetMax, NumericUtils.bytesToInt(minPackedValue, 0));
double xMax = Geo3DUtil.decodeValueMax(planetMax, NumericUtils.bytesToInt(maxPackedValue, 0));
double yMin = Geo3DUtil.decodeValueMin(planetMax, NumericUtils.bytesToInt(minPackedValue, 1));
double yMax = Geo3DUtil.decodeValueMax(planetMax, NumericUtils.bytesToInt(maxPackedValue, 1));
double zMin = Geo3DUtil.decodeValueMin(planetMax, NumericUtils.bytesToInt(minPackedValue, 2));
double zMax = Geo3DUtil.decodeValueMax(planetMax, NumericUtils.bytesToInt(maxPackedValue, 2));
double yMin = Geo3DUtil.decodeValueMin(planetMax, NumericUtils.bytesToInt(minPackedValue, 1 * Integer.BYTES));
double yMax = Geo3DUtil.decodeValueMax(planetMax, NumericUtils.bytesToInt(maxPackedValue, 1 * Integer.BYTES));
double zMin = Geo3DUtil.decodeValueMin(planetMax, NumericUtils.bytesToInt(minPackedValue, 2 * Integer.BYTES));
double zMax = Geo3DUtil.decodeValueMax(planetMax, NumericUtils.bytesToInt(maxPackedValue, 2 * Integer.BYTES));
//System.out.println(" compare: x=" + cellXMin + "-" + cellXMax + " y=" + cellYMin + "-" + cellYMax + " z=" + cellZMin + "-" + cellZMax);
assert xMin <= xMax;

View File

@ -56,7 +56,7 @@ public class TernaryTreeNode {
mem += hiKid.sizeInBytes();
}
if (token != null) {
mem += RamUsageEstimator.shallowSizeOf(token) + RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + RamUsageEstimator.NUM_BYTES_CHAR * token.length();
mem += RamUsageEstimator.shallowSizeOf(token) + RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + Character.BYTES * token.length();
}
mem += RamUsageEstimator.shallowSizeOf(val);
return mem;