From 9ca1a19b81448594bbaf046da5d5e33228e6974f Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Mon, 22 Feb 2016 19:32:28 -0500 Subject: [PATCH] LUCENE-7042: more cleanup for Point encodings --- .../lucene/analysis/en/PorterStemmer.java | 3 +- .../miscellaneous/ASCIIFoldingFilter.java | 4 +- .../miscellaneous/WordDelimiterFilter.java | 3 +- .../analysis/synonym/SynonymFilter.java | 7 +--- .../analysis/util/RollingCharBuffer.java | 4 +- .../tartarus/snowball/SnowballProgram.java | 4 +- .../lucene/analysis/ja/JapaneseTokenizer.java | 5 +-- .../lucene50/Lucene50DocValuesProducer.java | 6 +-- .../codecs/blocktreeords/FSTOrdsOutputs.java | 2 +- .../bloom/BloomFilteringPostingsFormat.java | 5 +-- .../codecs/memory/DirectPostingsFormat.java | 3 +- .../codecs/memory/MemoryPostingsFormat.java | 5 +-- .../CharTermAttributeImpl.java | 8 ++-- .../lucene54/Lucene54DocValuesProducer.java | 3 +- .../perfield/PerFieldDocValuesFormat.java | 5 +-- .../apache/lucene/document/DoublePoint.java | 30 ++++++-------- .../apache/lucene/document/FloatPoint.java | 28 ++++++------- .../org/apache/lucene/document/IntPoint.java | 28 ++++++------- .../org/apache/lucene/document/LongPoint.java | 28 ++++++------- .../apache/lucene/index/BufferedUpdates.java | 16 ++++---- .../apache/lucene/index/DocValuesUpdate.java | 14 +++---- .../index/DocumentsWriterPerThread.java | 7 +--- .../index/FreqProxTermsWriterPerField.java | 10 ++--- .../lucene/index/FrozenBufferedUpdates.java | 3 +- .../lucene/index/ParallelPostingsArray.java | 4 +- .../lucene/index/PointValuesWriter.java | 6 +-- .../apache/lucene/index/PrefixCodedTerms.java | 4 +- .../lucene/index/SortedDocValuesWriter.java | 4 +- .../index/SortedSetDocValuesWriter.java | 6 +-- .../index/TermVectorsConsumerPerField.java | 3 +- .../lucene/search/CachingCollector.java | 8 ++-- .../apache/lucene/search/PointRangeQuery.java | 8 ++-- .../apache/lucene/search/ScoringRewrite.java | 3 +- .../org/apache/lucene/util/ArrayUtil.java | 21 +++++----- .../org/apache/lucene/util/BytesRefArray.java | 6 +-- .../org/apache/lucene/util/BytesRefHash.java | 15 ++++--- .../apache/lucene/util/DocIdSetBuilder.java | 3 +- .../org/apache/lucene/util/NumericUtils.java | 31 +++++++------- .../apache/lucene/util/RamUsageEstimator.java | 41 ++++++++----------- .../util/RecyclingIntBlockAllocator.java | 6 +-- .../apache/lucene/util/SentinelIntSet.java | 2 +- .../lucene/util/automaton/Automaton.java | 4 +- .../lucene/util/automaton/SortedIntSet.java | 4 +- .../org/apache/lucene/util/bkd/BKDReader.java | 4 +- .../org/apache/lucene/util/bkd/BKDWriter.java | 5 +-- .../lucene/util/bkd/HeapPointWriter.java | 4 +- .../lucene/util/bkd/OfflinePointReader.java | 5 +-- .../lucene/util/bkd/OfflinePointWriter.java | 6 +-- .../util/packed/AbstractPagedMutable.java | 4 +- .../apache/lucene/util/packed/Direct16.java | 2 +- .../apache/lucene/util/packed/Direct32.java | 2 +- .../apache/lucene/util/packed/Direct64.java | 2 +- .../apache/lucene/util/packed/Direct8.java | 2 +- .../lucene/util/packed/GrowableWriter.java | 4 +- .../util/packed/Packed16ThreeBlocks.java | 2 +- .../apache/lucene/util/packed/Packed64.java | 4 +- .../util/packed/Packed64SingleBlock.java | 2 +- .../util/packed/Packed8ThreeBlocks.java | 2 +- .../apache/lucene/util/packed/PackedInts.java | 2 +- .../util/packed/PagedGrowableWriter.java | 4 +- .../apache/lucene/index/TestIntBlockPool.java | 11 ++--- .../org/apache/lucene/util/bkd/TestBKD.java | 12 +++--- .../lucene/index/memory/MemoryIndex.java | 10 ++--- .../lucene/uninverting/FieldCacheImpl.java | 4 +- .../apache/lucene/document/LatLonPoint.java | 4 +- .../lucene/search/PointInPolygonQuery.java | 6 +-- .../lucene/search/PointInRectQuery.java | 6 +-- .../search/TestDocValuesRangeQuery.java | 2 +- .../prefix/ContainsPrefixTreeQuery.java | 2 +- .../org/apache/lucene/geo3d/Geo3DPoint.java | 6 +-- .../lucene/geo3d/PointInGeo3DShapeQuery.java | 12 +++--- .../search/suggest/tst/TernaryTreeNode.java | 2 +- 72 files changed, 221 insertions(+), 322 deletions(-) diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/en/PorterStemmer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/en/PorterStemmer.java index 041d7b83b6f..ef239b116d6 100644 --- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/en/PorterStemmer.java +++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/en/PorterStemmer.java @@ -48,7 +48,6 @@ import java.io.IOException; import java.io.InputStream; import java.io.FileInputStream; -import static org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_CHAR; import org.apache.lucene.util.ArrayUtil; /** @@ -453,7 +452,7 @@ class PorterStemmer public boolean stem(char[] wordBuffer, int offset, int wordLen) { reset(); if (b.length < wordLen) { - b = new char[ArrayUtil.oversize(wordLen, NUM_BYTES_CHAR)]; + b = new char[ArrayUtil.oversize(wordLen, Character.BYTES)]; } System.arraycopy(wordBuffer, offset, b, 0, wordLen); i = wordLen; diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/miscellaneous/ASCIIFoldingFilter.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/miscellaneous/ASCIIFoldingFilter.java index bd8d571e41f..a327d175d6b 100644 --- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/miscellaneous/ASCIIFoldingFilter.java +++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/miscellaneous/ASCIIFoldingFilter.java @@ -16,7 +16,6 @@ */ package org.apache.lucene.analysis.miscellaneous; - import java.io.IOException; import org.apache.lucene.analysis.TokenFilter; @@ -24,7 +23,6 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.apache.lucene.util.ArrayUtil; -import org.apache.lucene.util.RamUsageEstimator; /** * This class converts alphabetic, numeric, and symbolic Unicode characters @@ -142,7 +140,7 @@ public final class ASCIIFoldingFilter extends TokenFilter { // Worst-case length required: final int maxSizeNeeded = 4 * length; if (output.length < maxSizeNeeded) { - output = new char[ArrayUtil.oversize(maxSizeNeeded, RamUsageEstimator.NUM_BYTES_CHAR)]; + output = new char[ArrayUtil.oversize(maxSizeNeeded, Character.BYTES)]; } outputPos = foldToASCII(input, 0, output, 0, length); diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/miscellaneous/WordDelimiterFilter.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/miscellaneous/WordDelimiterFilter.java index e2e7074be2a..20e013da5ac 100644 --- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/miscellaneous/WordDelimiterFilter.java +++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/miscellaneous/WordDelimiterFilter.java @@ -28,7 +28,6 @@ import org.apache.lucene.analysis.util.CharArraySet; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.AttributeSource; import org.apache.lucene.util.InPlaceMergeSorter; -import org.apache.lucene.util.RamUsageEstimator; import java.io.IOException; import java.util.Arrays; @@ -429,7 +428,7 @@ public final class WordDelimiterFilter extends TokenFilter { savedType = typeAttribute.type(); if (savedBuffer.length < termAttribute.length()) { - savedBuffer = new char[ArrayUtil.oversize(termAttribute.length(), RamUsageEstimator.NUM_BYTES_CHAR)]; + savedBuffer = new char[ArrayUtil.oversize(termAttribute.length(), Character.BYTES)]; } System.arraycopy(termAttribute.buffer(), 0, savedBuffer, 0, termAttribute.length()); diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/synonym/SynonymFilter.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/synonym/SynonymFilter.java index 8b298f79f99..6a72920d2f6 100644 --- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/synonym/SynonymFilter.java +++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/synonym/SynonymFilter.java @@ -16,7 +16,6 @@ */ package org.apache.lucene.analysis.synonym; - import java.io.IOException; import java.util.Arrays; @@ -31,11 +30,9 @@ import org.apache.lucene.store.ByteArrayDataInput; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.AttributeSource; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.CharsRef; import org.apache.lucene.util.CharsRefBuilder; import org.apache.lucene.util.RamUsageEstimator; -import org.apache.lucene.util.UnicodeUtil; import org.apache.lucene.util.fst.FST; /** @@ -207,12 +204,12 @@ public final class SynonymFilter extends TokenFilter { outputs = Arrays.copyOf(outputs, ArrayUtil.oversize(1+count, RamUsageEstimator.NUM_BYTES_OBJECT_REF)); } if (count == endOffsets.length) { - final int[] next = new int[ArrayUtil.oversize(1+count, RamUsageEstimator.NUM_BYTES_INT)]; + final int[] next = new int[ArrayUtil.oversize(1+count, Integer.BYTES)]; System.arraycopy(endOffsets, 0, next, 0, count); endOffsets = next; } if (count == posLengths.length) { - final int[] next = new int[ArrayUtil.oversize(1+count, RamUsageEstimator.NUM_BYTES_INT)]; + final int[] next = new int[ArrayUtil.oversize(1+count, Integer.BYTES)]; System.arraycopy(posLengths, 0, next, 0, count); posLengths = next; } diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/util/RollingCharBuffer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/util/RollingCharBuffer.java index 1ced960df57..2464ebd55ff 100644 --- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/util/RollingCharBuffer.java +++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/util/RollingCharBuffer.java @@ -16,12 +16,10 @@ */ package org.apache.lucene.analysis.util; - import java.io.IOException; import java.io.Reader; import org.apache.lucene.util.ArrayUtil; -import org.apache.lucene.util.RamUsageEstimator; /** Acts like a forever growing char[] as you read * characters into it from the provided reader, but @@ -71,7 +69,7 @@ public final class RollingCharBuffer { } if (count == buffer.length) { // Grow - final char[] newBuffer = new char[ArrayUtil.oversize(1+count, RamUsageEstimator.NUM_BYTES_CHAR)]; + final char[] newBuffer = new char[ArrayUtil.oversize(1+count, Character.BYTES)]; //System.out.println(Thread.currentThread().getName() + ": cb grow " + newBuffer.length); System.arraycopy(buffer, nextWrite, newBuffer, 0, buffer.length - nextWrite); System.arraycopy(buffer, 0, newBuffer, buffer.length - nextWrite, nextWrite); diff --git a/lucene/analysis/common/src/java/org/tartarus/snowball/SnowballProgram.java b/lucene/analysis/common/src/java/org/tartarus/snowball/SnowballProgram.java index 70327494792..bfc8ec0b1c6 100644 --- a/lucene/analysis/common/src/java/org/tartarus/snowball/SnowballProgram.java +++ b/lucene/analysis/common/src/java/org/tartarus/snowball/SnowballProgram.java @@ -29,11 +29,9 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ - package org.tartarus.snowball; import org.apache.lucene.util.ArrayUtil; -import org.apache.lucene.util.RamUsageEstimator; /** * This is the rev 502 of the Snowball SVN trunk, @@ -397,7 +395,7 @@ public abstract class SnowballProgram { final int newLength = limit + adjustment; //resize if necessary if (newLength > current.length) { - char newBuffer[] = new char[ArrayUtil.oversize(newLength, RamUsageEstimator.NUM_BYTES_CHAR)]; + char newBuffer[] = new char[ArrayUtil.oversize(newLength, Character.BYTES)]; System.arraycopy(current, 0, newBuffer, 0, limit); current = newBuffer; } diff --git a/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/JapaneseTokenizer.java b/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/JapaneseTokenizer.java index 5641e6c1f5e..dfab482d0f8 100644 --- a/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/JapaneseTokenizer.java +++ b/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/JapaneseTokenizer.java @@ -16,7 +16,6 @@ */ package org.apache.lucene.analysis.ja; - import java.io.IOException; import java.io.StringReader; import java.util.ArrayList; @@ -1053,7 +1052,7 @@ public final class JapaneseTokenizer extends Tokenizer { assert baseOffset <= lastOffset; int size = lastOffset - baseOffset + 1; if (rootCapacity < size) { - int oversize = ArrayUtil.oversize(size, RamUsageEstimator.NUM_BYTES_INT); + int oversize = ArrayUtil.oversize(size, Integer.BYTES); lRoot = new int[oversize]; rRoot = new int[oversize]; rootCapacity = oversize; @@ -1067,7 +1066,7 @@ public final class JapaneseTokenizer extends Tokenizer { // Reserve at least N nodes. private void reserve(int n) { if (capacity < n) { - int oversize = ArrayUtil.oversize(n, RamUsageEstimator.NUM_BYTES_INT); + int oversize = ArrayUtil.oversize(n, Integer.BYTES); nodeDicType = new Type[oversize]; nodeWordID = new int[oversize]; nodeMark = new int[oversize]; diff --git a/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene50/Lucene50DocValuesProducer.java b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene50/Lucene50DocValuesProducer.java index c323d6eb923..62c9477abec 100644 --- a/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene50/Lucene50DocValuesProducer.java +++ b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene50/Lucene50DocValuesProducer.java @@ -537,7 +537,7 @@ class Lucene50DocValuesProducer extends DocValuesProducer implements Closeable { addresses = MonotonicBlockPackedReader.of(data, bytes.packedIntsVersion, bytes.blockSize, bytes.count+1, false); if (!merging) { addressInstances.put(field.name, addresses); - ramBytesUsed.addAndGet(addresses.ramBytesUsed() + RamUsageEstimator.NUM_BYTES_INT); + ramBytesUsed.addAndGet(addresses.ramBytesUsed() + Integer.BYTES); } } return addresses; @@ -577,7 +577,7 @@ class Lucene50DocValuesProducer extends DocValuesProducer implements Closeable { addresses = MonotonicBlockPackedReader.of(data, bytes.packedIntsVersion, bytes.blockSize, size, false); if (!merging) { addressInstances.put(field.name, addresses); - ramBytesUsed.addAndGet(addresses.ramBytesUsed() + RamUsageEstimator.NUM_BYTES_INT); + ramBytesUsed.addAndGet(addresses.ramBytesUsed() + Integer.BYTES); } } return addresses; @@ -662,7 +662,7 @@ class Lucene50DocValuesProducer extends DocValuesProducer implements Closeable { instance = MonotonicBlockPackedReader.of(data, entry.packedIntsVersion, entry.blockSize, entry.count+1, false); if (!merging) { ordIndexInstances.put(field.name, instance); - ramBytesUsed.addAndGet(instance.ramBytesUsed() + RamUsageEstimator.NUM_BYTES_INT); + ramBytesUsed.addAndGet(instance.ramBytesUsed() + Integer.BYTES); } } return instance; diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/blocktreeords/FSTOrdsOutputs.java b/lucene/codecs/src/java/org/apache/lucene/codecs/blocktreeords/FSTOrdsOutputs.java index 050ad2fc60e..8d61e2dcf1a 100644 --- a/lucene/codecs/src/java/org/apache/lucene/codecs/blocktreeords/FSTOrdsOutputs.java +++ b/lucene/codecs/src/java/org/apache/lucene/codecs/blocktreeords/FSTOrdsOutputs.java @@ -228,6 +228,6 @@ final class FSTOrdsOutputs extends Outputs { @Override public long ramBytesUsed(Output output) { - return 2 * RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + 2 * RamUsageEstimator.NUM_BYTES_LONG + 2 * RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + 2 * RamUsageEstimator.NUM_BYTES_INT + output.bytes.length; + return 2 * RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + 2 * Long.BYTES + 2 * RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + 2 * Integer.BYTES + output.bytes.length; } } diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/bloom/BloomFilteringPostingsFormat.java b/lucene/codecs/src/java/org/apache/lucene/codecs/bloom/BloomFilteringPostingsFormat.java index 6b838bba323..ffe9fa1002a 100644 --- a/lucene/codecs/src/java/org/apache/lucene/codecs/bloom/BloomFilteringPostingsFormat.java +++ b/lucene/codecs/src/java/org/apache/lucene/codecs/bloom/BloomFilteringPostingsFormat.java @@ -16,7 +16,6 @@ */ package org.apache.lucene.codecs.bloom; - import java.io.IOException; import java.util.ArrayList; import java.util.Collection; @@ -45,10 +44,8 @@ import org.apache.lucene.store.DataOutput; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountables; -import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.IOUtils; -import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.automaton.CompiledAutomaton; /** @@ -380,7 +377,7 @@ public final class BloomFilteringPostingsFormat extends PostingsFormat { public long ramBytesUsed() { long sizeInBytes = ((delegateFieldsProducer!=null) ? delegateFieldsProducer.ramBytesUsed() : 0); for(Map.Entry entry: bloomsByFieldName.entrySet()) { - sizeInBytes += entry.getKey().length() * RamUsageEstimator.NUM_BYTES_CHAR; + sizeInBytes += entry.getKey().length() * Character.BYTES; sizeInBytes += entry.getValue().ramBytesUsed(); } return sizeInBytes; diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/memory/DirectPostingsFormat.java b/lucene/codecs/src/java/org/apache/lucene/codecs/memory/DirectPostingsFormat.java index a1fa24d8ac4..56b1a36b775 100644 --- a/lucene/codecs/src/java/org/apache/lucene/codecs/memory/DirectPostingsFormat.java +++ b/lucene/codecs/src/java/org/apache/lucene/codecs/memory/DirectPostingsFormat.java @@ -16,7 +16,6 @@ */ package org.apache.lucene.codecs.memory; - import java.io.IOException; import java.util.Collection; import java.util.Collections; @@ -154,7 +153,7 @@ public final class DirectPostingsFormat extends PostingsFormat { public long ramBytesUsed() { long sizeInBytes = 0; for(Map.Entry entry: fields.entrySet()) { - sizeInBytes += entry.getKey().length() * RamUsageEstimator.NUM_BYTES_CHAR; + sizeInBytes += entry.getKey().length() * Character.BYTES; sizeInBytes += entry.getValue().ramBytesUsed(); } return sizeInBytes; diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/memory/MemoryPostingsFormat.java b/lucene/codecs/src/java/org/apache/lucene/codecs/memory/MemoryPostingsFormat.java index 0fb320c5c21..1427decaed3 100644 --- a/lucene/codecs/src/java/org/apache/lucene/codecs/memory/MemoryPostingsFormat.java +++ b/lucene/codecs/src/java/org/apache/lucene/codecs/memory/MemoryPostingsFormat.java @@ -16,7 +16,6 @@ */ package org.apache.lucene.codecs.memory; - import java.io.IOException; import java.util.Collection; import java.util.Collections; @@ -50,12 +49,10 @@ import org.apache.lucene.store.RAMOutputStream; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountables; import org.apache.lucene.util.ArrayUtil; -import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.IntsRefBuilder; -import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.fst.Builder; import org.apache.lucene.util.fst.ByteSequenceOutputs; import org.apache.lucene.util.fst.BytesRefFSTEnum; @@ -1016,7 +1013,7 @@ public final class MemoryPostingsFormat extends PostingsFormat { public long ramBytesUsed() { long sizeInBytes = 0; for(Map.Entry entry: fields.entrySet()) { - sizeInBytes += (entry.getKey().length() * RamUsageEstimator.NUM_BYTES_CHAR); + sizeInBytes += (entry.getKey().length() * Character.BYTES); sizeInBytes += entry.getValue().ramBytesUsed(); } return sizeInBytes; diff --git a/lucene/core/src/java/org/apache/lucene/analysis/tokenattributes/CharTermAttributeImpl.java b/lucene/core/src/java/org/apache/lucene/analysis/tokenattributes/CharTermAttributeImpl.java index a1ac69bbafd..cde8dd92686 100644 --- a/lucene/core/src/java/org/apache/lucene/analysis/tokenattributes/CharTermAttributeImpl.java +++ b/lucene/core/src/java/org/apache/lucene/analysis/tokenattributes/CharTermAttributeImpl.java @@ -16,7 +16,6 @@ */ package org.apache.lucene.analysis.tokenattributes; - import java.nio.CharBuffer; import org.apache.lucene.util.ArrayUtil; @@ -24,13 +23,12 @@ import org.apache.lucene.util.AttributeImpl; import org.apache.lucene.util.AttributeReflector; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.RamUsageEstimator; /** Default implementation of {@link CharTermAttribute}. */ public class CharTermAttributeImpl extends AttributeImpl implements CharTermAttribute, TermToBytesRefAttribute, Cloneable { private static int MIN_BUFFER_SIZE = 10; - private char[] termBuffer = new char[ArrayUtil.oversize(MIN_BUFFER_SIZE, RamUsageEstimator.NUM_BYTES_CHAR)]; + private char[] termBuffer = new char[ArrayUtil.oversize(MIN_BUFFER_SIZE, Character.BYTES)]; private int termLength = 0; /** May be used by subclasses to convert to different charsets / encodings for implementing {@link #getBytesRef()}. */ @@ -56,7 +54,7 @@ public class CharTermAttributeImpl extends AttributeImpl implements CharTermAttr if(termBuffer.length < newSize){ // Not big enough; create a new array with slight // over allocation and preserve content - final char[] newCharBuffer = new char[ArrayUtil.oversize(newSize, RamUsageEstimator.NUM_BYTES_CHAR)]; + final char[] newCharBuffer = new char[ArrayUtil.oversize(newSize, Character.BYTES)]; System.arraycopy(termBuffer, 0, newCharBuffer, 0, termBuffer.length); termBuffer = newCharBuffer; } @@ -67,7 +65,7 @@ public class CharTermAttributeImpl extends AttributeImpl implements CharTermAttr if(termBuffer.length < newSize){ // Not big enough; create a new array with slight // over allocation: - termBuffer = new char[ArrayUtil.oversize(newSize, RamUsageEstimator.NUM_BYTES_CHAR)]; + termBuffer = new char[ArrayUtil.oversize(newSize, Character.BYTES)]; } } diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene54/Lucene54DocValuesProducer.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene54/Lucene54DocValuesProducer.java index 4e24c7a6653..67027ea9d73 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/lucene54/Lucene54DocValuesProducer.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene54/Lucene54DocValuesProducer.java @@ -16,7 +16,6 @@ */ package org.apache.lucene.codecs.lucene54; - import java.io.Closeable; import java.io.IOException; import java.util.ArrayList; @@ -753,7 +752,7 @@ final class Lucene54DocValuesProducer extends DocValuesProducer implements Close addresses = MonotonicBlockPackedReader.of(data, bytes.packedIntsVersion, bytes.blockSize, size, false); if (!merging) { addressInstances.put(field.name, addresses); - ramBytesUsed.addAndGet(addresses.ramBytesUsed() + RamUsageEstimator.NUM_BYTES_INT); + ramBytesUsed.addAndGet(addresses.ramBytesUsed() + Integer.BYTES); } } return addresses; diff --git a/lucene/core/src/java/org/apache/lucene/codecs/perfield/PerFieldDocValuesFormat.java b/lucene/core/src/java/org/apache/lucene/codecs/perfield/PerFieldDocValuesFormat.java index 25566e072e9..baadf1e156e 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/perfield/PerFieldDocValuesFormat.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/perfield/PerFieldDocValuesFormat.java @@ -16,7 +16,6 @@ */ package org.apache.lucene.codecs.perfield; - import java.io.Closeable; import java.io.IOException; import java.util.Collection; @@ -44,7 +43,6 @@ import org.apache.lucene.util.Accountables; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.IOUtils; -import org.apache.lucene.util.RamUsageEstimator; /** * Enables per field docvalues support. @@ -324,8 +322,7 @@ public abstract class PerFieldDocValuesFormat extends DocValuesFormat { public long ramBytesUsed() { long size = 0; for (Map.Entry entry : formats.entrySet()) { - size += (entry.getKey().length() * RamUsageEstimator.NUM_BYTES_CHAR) + - entry.getValue().ramBytesUsed(); + size += (entry.getKey().length() * Character.BYTES) + entry.getValue().ramBytesUsed(); } return size; } diff --git a/lucene/core/src/java/org/apache/lucene/document/DoublePoint.java b/lucene/core/src/java/org/apache/lucene/document/DoublePoint.java index 1619857b5e6..9099cce85c6 100644 --- a/lucene/core/src/java/org/apache/lucene/document/DoublePoint.java +++ b/lucene/core/src/java/org/apache/lucene/document/DoublePoint.java @@ -16,10 +16,8 @@ */ package org.apache.lucene.document; - import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.NumericUtils; -import org.apache.lucene.util.RamUsageEstimator; /** A double field that is indexed dimensionally such that finding * all documents within an N-dimensional shape or range at search time is @@ -30,7 +28,7 @@ public final class DoublePoint extends Field { private static FieldType getType(int numDims) { FieldType type = new FieldType(); - type.setDimensions(numDims, RamUsageEstimator.NUM_BYTES_LONG); + type.setDimensions(numDims, Double.BYTES); type.freeze(); return type; } @@ -59,8 +57,8 @@ public final class DoublePoint extends Field { throw new IllegalStateException("this field (name=" + name + ") uses " + type.pointDimensionCount() + " dimensions; cannot convert to a single numeric value"); } BytesRef bytes = (BytesRef) fieldsData; - assert bytes.length == RamUsageEstimator.NUM_BYTES_LONG; - return NumericUtils.sortableLongToDouble(NumericUtils.bytesToLongDirect(bytes.bytes, bytes.offset)); + assert bytes.length == Double.BYTES; + return decodeDimension(bytes.bytes, bytes.offset); } private static BytesRef pack(double... point) { @@ -70,10 +68,10 @@ public final class DoublePoint extends Field { if (point.length == 0) { throw new IllegalArgumentException("point cannot be 0 dimensions"); } - byte[] packed = new byte[point.length * RamUsageEstimator.NUM_BYTES_LONG]; + byte[] packed = new byte[point.length * Double.BYTES]; - for(int dim=0;dim. Say list allocates ~2X size (2*POINTER). Integer is OBJ_HEADER + int */ - final static int BYTES_PER_DEL_DOCID = 2*RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + RamUsageEstimator.NUM_BYTES_INT; + final static int BYTES_PER_DEL_DOCID = 2*RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + Integer.BYTES; /* Rough logic: HashMap has an array[Entry] w/ varying load factor (say 2 * POINTER). Entry is object w/ Query key, Integer val, int hash, Entry next (OBJ_HEADER + 3*POINTER + INT). Query we often undercount (say 24 bytes). Integer is OBJ_HEADER + INT. */ - final static int BYTES_PER_DEL_QUERY = 5*RamUsageEstimator.NUM_BYTES_OBJECT_REF + 2*RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + 2*RamUsageEstimator.NUM_BYTES_INT + 24; + final static int BYTES_PER_DEL_QUERY = 5*RamUsageEstimator.NUM_BYTES_OBJECT_REF + 2*RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + 2*Integer.BYTES + 24; /* Rough logic: NumericUpdate calculates its actual size, * including the update Term and DV field (String). The @@ -82,7 +82,7 @@ class BufferedUpdates { */ final static int BYTES_PER_NUMERIC_FIELD_ENTRY = 7*RamUsageEstimator.NUM_BYTES_OBJECT_REF + 3*RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + - RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + 5*RamUsageEstimator.NUM_BYTES_INT + RamUsageEstimator.NUM_BYTES_FLOAT; + RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + 5*Integer.BYTES + Float.BYTES; /* Rough logic: Incremented when we see another Term for an already updated * field. @@ -93,7 +93,7 @@ class BufferedUpdates { * Term (key) is counted only as POINTER. * NumericUpdate (val) counts its own size and isn't accounted for here. */ - final static int BYTES_PER_NUMERIC_UPDATE_ENTRY = 7*RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + RamUsageEstimator.NUM_BYTES_INT; + final static int BYTES_PER_NUMERIC_UPDATE_ENTRY = 7*RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + Integer.BYTES; /* Rough logic: BinaryUpdate calculates its actual size, * including the update Term and DV field (String). The @@ -111,7 +111,7 @@ class BufferedUpdates { */ final static int BYTES_PER_BINARY_FIELD_ENTRY = 7*RamUsageEstimator.NUM_BYTES_OBJECT_REF + 3*RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + - RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + 5*RamUsageEstimator.NUM_BYTES_INT + RamUsageEstimator.NUM_BYTES_FLOAT; + RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + 5*Integer.BYTES + Float.BYTES; /* Rough logic: Incremented when we see another Term for an already updated * field. @@ -122,7 +122,7 @@ class BufferedUpdates { * Term (key) is counted only as POINTER. * BinaryUpdate (val) counts its own size and isn't accounted for here. */ - final static int BYTES_PER_BINARY_UPDATE_ENTRY = 7*RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + RamUsageEstimator.NUM_BYTES_INT; + final static int BYTES_PER_BINARY_UPDATE_ENTRY = 7*RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + Integer.BYTES; final AtomicInteger numTermDeletes = new AtomicInteger(); final AtomicInteger numNumericUpdates = new AtomicInteger(); @@ -226,7 +226,7 @@ class BufferedUpdates { // is done to respect IndexWriterConfig.setMaxBufferedDeleteTerms. numTermDeletes.incrementAndGet(); if (current == null) { - bytesUsed.addAndGet(BYTES_PER_DEL_TERM + term.bytes.length + (RamUsageEstimator.NUM_BYTES_CHAR * term.field().length())); + bytesUsed.addAndGet(BYTES_PER_DEL_TERM + term.bytes.length + (Character.BYTES * term.field().length())); } } diff --git a/lucene/core/src/java/org/apache/lucene/index/DocValuesUpdate.java b/lucene/core/src/java/org/apache/lucene/index/DocValuesUpdate.java index e7b17815085..c97296464f9 100644 --- a/lucene/core/src/java/org/apache/lucene/index/DocValuesUpdate.java +++ b/lucene/core/src/java/org/apache/lucene/index/DocValuesUpdate.java @@ -16,16 +16,12 @@ */ package org.apache.lucene.index; - import static org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_ARRAY_HEADER; -import static org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_CHAR; -import static org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_INT; import static org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_OBJECT_HEADER; import static org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_OBJECT_REF; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.RamUsageEstimator; /** An in-place update to a DocValues field. */ abstract class DocValuesUpdate { @@ -37,7 +33,7 @@ abstract class DocValuesUpdate { * String: 2*OBJ_HEADER + 4*INT + PTR + string.length*CHAR * T: OBJ_HEADER */ - private static final int RAW_SIZE_IN_BYTES = 8*NUM_BYTES_OBJECT_HEADER + 8*NUM_BYTES_OBJECT_REF + 8*NUM_BYTES_INT; + private static final int RAW_SIZE_IN_BYTES = 8*NUM_BYTES_OBJECT_HEADER + 8*NUM_BYTES_OBJECT_REF + 8*Integer.BYTES; final DocValuesType type; final Term term; @@ -63,9 +59,9 @@ abstract class DocValuesUpdate { final int sizeInBytes() { int sizeInBytes = RAW_SIZE_IN_BYTES; - sizeInBytes += term.field.length() * NUM_BYTES_CHAR; + sizeInBytes += term.field.length() * Character.BYTES; sizeInBytes += term.bytes.bytes.length; - sizeInBytes += field.length() * NUM_BYTES_CHAR; + sizeInBytes += field.length() * Character.BYTES; sizeInBytes += valueSizeInBytes(); return sizeInBytes; } @@ -79,7 +75,7 @@ abstract class DocValuesUpdate { static final class BinaryDocValuesUpdate extends DocValuesUpdate { /* Size of BytesRef: 2*INT + ARRAY_HEADER + PTR */ - private static final long RAW_VALUE_SIZE_IN_BYTES = NUM_BYTES_ARRAY_HEADER + 2*NUM_BYTES_INT + NUM_BYTES_OBJECT_REF; + private static final long RAW_VALUE_SIZE_IN_BYTES = NUM_BYTES_ARRAY_HEADER + 2*Integer.BYTES + NUM_BYTES_OBJECT_REF; BinaryDocValuesUpdate(Term term, String field, BytesRef value) { super(DocValuesType.BINARY, term, field, value); @@ -101,7 +97,7 @@ abstract class DocValuesUpdate { @Override long valueSizeInBytes() { - return RamUsageEstimator.NUM_BYTES_LONG; + return Long.BYTES; } } diff --git a/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java b/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java index e5998deb40a..65d6a144e74 100644 --- a/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java +++ b/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java @@ -16,7 +16,6 @@ */ package org.apache.lucene.index; - import java.io.IOException; import java.text.NumberFormat; import java.util.Collections; @@ -40,7 +39,6 @@ import org.apache.lucene.util.Counter; import org.apache.lucene.util.InfoStream; import org.apache.lucene.util.IntBlockPool; import org.apache.lucene.util.MutableBits; -import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.StringHelper; import org.apache.lucene.util.Version; @@ -576,14 +574,13 @@ class DocumentsWriterPerThread { @Override public int[] getIntBlock() { int[] b = new int[IntBlockPool.INT_BLOCK_SIZE]; - bytesUsed.addAndGet(IntBlockPool.INT_BLOCK_SIZE - * RamUsageEstimator.NUM_BYTES_INT); + bytesUsed.addAndGet(IntBlockPool.INT_BLOCK_SIZE * Integer.BYTES); return b; } @Override public void recycleIntBlocks(int[][] blocks, int offset, int length) { - bytesUsed.addAndGet(-(length * (IntBlockPool.INT_BLOCK_SIZE * RamUsageEstimator.NUM_BYTES_INT))); + bytesUsed.addAndGet(-(length * (IntBlockPool.INT_BLOCK_SIZE * Integer.BYTES))); } } diff --git a/lucene/core/src/java/org/apache/lucene/index/FreqProxTermsWriterPerField.java b/lucene/core/src/java/org/apache/lucene/index/FreqProxTermsWriterPerField.java index 0a7dcfd6d39..28fe8721867 100644 --- a/lucene/core/src/java/org/apache/lucene/index/FreqProxTermsWriterPerField.java +++ b/lucene/core/src/java/org/apache/lucene/index/FreqProxTermsWriterPerField.java @@ -16,13 +16,11 @@ */ package org.apache.lucene.index; - import java.io.IOException; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.analysis.tokenattributes.PayloadAttribute; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.RamUsageEstimator; // TODO: break into separate freq and prox writers as // codecs; make separate container (tii/tis/skip/*) that can @@ -257,15 +255,15 @@ final class FreqProxTermsWriterPerField extends TermsHashPerField { @Override int bytesPerPosting() { - int bytes = ParallelPostingsArray.BYTES_PER_POSTING + 2 * RamUsageEstimator.NUM_BYTES_INT; + int bytes = ParallelPostingsArray.BYTES_PER_POSTING + 2 * Integer.BYTES; if (lastPositions != null) { - bytes += RamUsageEstimator.NUM_BYTES_INT; + bytes += Integer.BYTES; } if (lastOffsets != null) { - bytes += RamUsageEstimator.NUM_BYTES_INT; + bytes += Integer.BYTES; } if (termFreqs != null) { - bytes += RamUsageEstimator.NUM_BYTES_INT; + bytes += Integer.BYTES; } return bytes; diff --git a/lucene/core/src/java/org/apache/lucene/index/FrozenBufferedUpdates.java b/lucene/core/src/java/org/apache/lucene/index/FrozenBufferedUpdates.java index a3a57a30c36..4f482ad4d10 100644 --- a/lucene/core/src/java/org/apache/lucene/index/FrozenBufferedUpdates.java +++ b/lucene/core/src/java/org/apache/lucene/index/FrozenBufferedUpdates.java @@ -16,7 +16,6 @@ */ package org.apache.lucene.index; - import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedHashMap; @@ -40,7 +39,7 @@ import org.apache.lucene.util.RamUsageEstimator; class FrozenBufferedUpdates { /* Query we often undercount (say 24 bytes), plus int. */ - final static int BYTES_PER_DEL_QUERY = RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_INT + 24; + final static int BYTES_PER_DEL_QUERY = RamUsageEstimator.NUM_BYTES_OBJECT_REF + Integer.BYTES + 24; // Terms, in sorted order: final PrefixCodedTerms terms; diff --git a/lucene/core/src/java/org/apache/lucene/index/ParallelPostingsArray.java b/lucene/core/src/java/org/apache/lucene/index/ParallelPostingsArray.java index 400f4416cf4..35e8e4f8921 100644 --- a/lucene/core/src/java/org/apache/lucene/index/ParallelPostingsArray.java +++ b/lucene/core/src/java/org/apache/lucene/index/ParallelPostingsArray.java @@ -16,12 +16,10 @@ */ package org.apache.lucene.index; - import org.apache.lucene.util.ArrayUtil; -import org.apache.lucene.util.RamUsageEstimator; class ParallelPostingsArray { - final static int BYTES_PER_POSTING = 3 * RamUsageEstimator.NUM_BYTES_INT; + final static int BYTES_PER_POSTING = 3 * Integer.BYTES; final int size; final int[] textStarts; diff --git a/lucene/core/src/java/org/apache/lucene/index/PointValuesWriter.java b/lucene/core/src/java/org/apache/lucene/index/PointValuesWriter.java index 1008f05d8b8..283f7bdbea0 100644 --- a/lucene/core/src/java/org/apache/lucene/index/PointValuesWriter.java +++ b/lucene/core/src/java/org/apache/lucene/index/PointValuesWriter.java @@ -16,7 +16,6 @@ */ package org.apache.lucene.index; - import java.io.IOException; import org.apache.lucene.codecs.PointReader; @@ -25,7 +24,6 @@ import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.ByteBlockPool; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.Counter; -import org.apache.lucene.util.RamUsageEstimator; /** Buffers up pending byte[][] value(s) per doc, then flushes when segment flushes. */ class PointValuesWriter { @@ -41,7 +39,7 @@ class PointValuesWriter { this.iwBytesUsed = docWriter.bytesUsed; this.bytes = new ByteBlockPool(docWriter.byteBlockAllocator); docIDs = new int[16]; - iwBytesUsed.addAndGet(16 * RamUsageEstimator.NUM_BYTES_INT); + iwBytesUsed.addAndGet(16 * Integer.BYTES); packedValue = new byte[fieldInfo.getPointDimensionCount() * fieldInfo.getPointNumBytes()]; } @@ -54,7 +52,7 @@ class PointValuesWriter { } if (docIDs.length == numDocs) { docIDs = ArrayUtil.grow(docIDs, numDocs+1); - iwBytesUsed.addAndGet((docIDs.length - numDocs) * RamUsageEstimator.NUM_BYTES_INT); + iwBytesUsed.addAndGet((docIDs.length - numDocs) * Integer.BYTES); } bytes.append(value); docIDs[numDocs] = docID; diff --git a/lucene/core/src/java/org/apache/lucene/index/PrefixCodedTerms.java b/lucene/core/src/java/org/apache/lucene/index/PrefixCodedTerms.java index d4b23febf04..87a9ae2c06b 100644 --- a/lucene/core/src/java/org/apache/lucene/index/PrefixCodedTerms.java +++ b/lucene/core/src/java/org/apache/lucene/index/PrefixCodedTerms.java @@ -16,7 +16,6 @@ */ package org.apache.lucene.index; - import java.io.IOException; import java.util.Objects; @@ -27,7 +26,6 @@ import org.apache.lucene.store.RAMOutputStream; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.RamUsageEstimator; /** * Prefix codes term instances (prefixes are shared) @@ -45,7 +43,7 @@ public class PrefixCodedTerms implements Accountable { @Override public long ramBytesUsed() { - return buffer.ramBytesUsed() + 2 * RamUsageEstimator.NUM_BYTES_LONG; + return buffer.ramBytesUsed() + 2 * Long.BYTES; } /** Records del gen for this packet. */ diff --git a/lucene/core/src/java/org/apache/lucene/index/SortedDocValuesWriter.java b/lucene/core/src/java/org/apache/lucene/index/SortedDocValuesWriter.java index 3231c607dd9..2d8557ba9d6 100644 --- a/lucene/core/src/java/org/apache/lucene/index/SortedDocValuesWriter.java +++ b/lucene/core/src/java/org/apache/lucene/index/SortedDocValuesWriter.java @@ -16,7 +16,6 @@ */ package org.apache.lucene.index; - import static org.apache.lucene.util.ByteBlockPool.BYTE_BLOCK_SIZE; import java.io.IOException; @@ -29,7 +28,6 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefHash.DirectBytesStartArray; import org.apache.lucene.util.BytesRefHash; import org.apache.lucene.util.Counter; -import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.packed.PackedInts; import org.apache.lucene.util.packed.PackedLongValues; @@ -93,7 +91,7 @@ class SortedDocValuesWriter extends DocValuesWriter { // 1. when indexing, when hash is 50% full, rehash() suddenly needs 2*size ints. // TODO: can this same OOM happen in THPF? // 2. when flushing, we need 1 int per value (slot in the ordMap). - iwBytesUsed.addAndGet(2 * RamUsageEstimator.NUM_BYTES_INT); + iwBytesUsed.addAndGet(2 * Integer.BYTES); } pending.add(termID); diff --git a/lucene/core/src/java/org/apache/lucene/index/SortedSetDocValuesWriter.java b/lucene/core/src/java/org/apache/lucene/index/SortedSetDocValuesWriter.java index 52c6b5d858d..e98fc82328e 100644 --- a/lucene/core/src/java/org/apache/lucene/index/SortedSetDocValuesWriter.java +++ b/lucene/core/src/java/org/apache/lucene/index/SortedSetDocValuesWriter.java @@ -16,7 +16,6 @@ */ package org.apache.lucene.index; - import static org.apache.lucene.util.ByteBlockPool.BYTE_BLOCK_SIZE; import java.io.IOException; @@ -31,7 +30,6 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefHash.DirectBytesStartArray; import org.apache.lucene.util.BytesRefHash; import org.apache.lucene.util.Counter; -import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.packed.PackedInts; import org.apache.lucene.util.packed.PackedLongValues; @@ -125,14 +123,14 @@ class SortedSetDocValuesWriter extends DocValuesWriter { // 1. when indexing, when hash is 50% full, rehash() suddenly needs 2*size ints. // TODO: can this same OOM happen in THPF? // 2. when flushing, we need 1 int per value (slot in the ordMap). - iwBytesUsed.addAndGet(2 * RamUsageEstimator.NUM_BYTES_INT); + iwBytesUsed.addAndGet(2 * Integer.BYTES); } if (currentUpto == currentValues.length) { currentValues = ArrayUtil.grow(currentValues, currentValues.length+1); // reserve additional space for max # values per-doc // when flushing, we need an int[] to sort the mapped-ords within the doc - iwBytesUsed.addAndGet((currentValues.length - currentUpto) * 2 * RamUsageEstimator.NUM_BYTES_INT); + iwBytesUsed.addAndGet((currentValues.length - currentUpto) * 2 * Integer.BYTES); } currentValues[currentUpto] = termID; diff --git a/lucene/core/src/java/org/apache/lucene/index/TermVectorsConsumerPerField.java b/lucene/core/src/java/org/apache/lucene/index/TermVectorsConsumerPerField.java index f252147823c..f69817ae526 100644 --- a/lucene/core/src/java/org/apache/lucene/index/TermVectorsConsumerPerField.java +++ b/lucene/core/src/java/org/apache/lucene/index/TermVectorsConsumerPerField.java @@ -16,7 +16,6 @@ */ package org.apache.lucene.index; - import java.io.IOException; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; @@ -283,7 +282,7 @@ final class TermVectorsConsumerPerField extends TermsHashPerField { @Override int bytesPerPosting() { - return super.bytesPerPosting() + 3 * RamUsageEstimator.NUM_BYTES_INT; + return super.bytesPerPosting() + 3 * Integer.BYTES; } } } diff --git a/lucene/core/src/java/org/apache/lucene/search/CachingCollector.java b/lucene/core/src/java/org/apache/lucene/search/CachingCollector.java index 55fb30588a5..344ec3aaebc 100644 --- a/lucene/core/src/java/org/apache/lucene/search/CachingCollector.java +++ b/lucene/core/src/java/org/apache/lucene/search/CachingCollector.java @@ -16,7 +16,6 @@ */ package org.apache.lucene.search; - import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; @@ -24,7 +23,6 @@ import java.util.List; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.util.ArrayUtil; -import org.apache.lucene.util.RamUsageEstimator; /** * Caches all docs, and optionally also scores, coming from @@ -233,7 +231,7 @@ public abstract class CachingCollector extends FilterCollector { if (docCount >= maxDocsToCache) { invalidate(); } else { - final int newLen = Math.min(ArrayUtil.oversize(docCount + 1, RamUsageEstimator.NUM_BYTES_INT), maxDocsToCache); + final int newLen = Math.min(ArrayUtil.oversize(docCount + 1, Integer.BYTES), maxDocsToCache); grow(newLen); } } @@ -329,9 +327,9 @@ public abstract class CachingCollector extends FilterCollector { * scores are cached. */ public static CachingCollector create(Collector other, boolean cacheScores, double maxRAMMB) { - int bytesPerDoc = RamUsageEstimator.NUM_BYTES_INT; + int bytesPerDoc = Integer.BYTES; if (cacheScores) { - bytesPerDoc += RamUsageEstimator.NUM_BYTES_FLOAT; + bytesPerDoc += Float.BYTES; } final int maxDocsToCache = (int) ((maxRAMMB * 1024 * 1024) / bytesPerDoc); return create(other, cacheScores, maxDocsToCache); diff --git a/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java b/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java index 949cffe5540..9fe2a8d7dd8 100644 --- a/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java +++ b/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java @@ -194,7 +194,7 @@ public class PointRangeQuery extends Query { return new PointRangeQuery(field, IntPoint.encode(lowerValue), lowerInclusive, IntPoint.encode(upperValue), upperInclusive) { @Override protected String toString(byte[] value) { - return IntPoint.decodeDimension(value).toString(); + return IntPoint.decodeDimension(value, 0).toString(); } }; } @@ -264,7 +264,7 @@ public class PointRangeQuery extends Query { return new PointRangeQuery(field, LongPoint.encode(lowerValue), lowerInclusive, LongPoint.encode(upperValue), upperInclusive) { @Override protected String toString(byte[] value) { - return LongPoint.decodeDimension(value).toString(); + return LongPoint.decodeDimension(value, 0).toString(); } }; } @@ -334,7 +334,7 @@ public class PointRangeQuery extends Query { return new PointRangeQuery(field, FloatPoint.encode(lowerValue), lowerInclusive, FloatPoint.encode(upperValue), upperInclusive) { @Override protected String toString(byte[] value) { - return FloatPoint.decodeDimension(value).toString(); + return FloatPoint.decodeDimension(value, 0).toString(); } }; } @@ -404,7 +404,7 @@ public class PointRangeQuery extends Query { return new PointRangeQuery(field, DoublePoint.encode(lowerValue), lowerInclusive, DoublePoint.encode(upperValue), upperInclusive) { @Override protected String toString(byte[] value) { - return DoublePoint.decodeDimension(value).toString(); + return DoublePoint.decodeDimension(value, 0).toString(); } }; } diff --git a/lucene/core/src/java/org/apache/lucene/search/ScoringRewrite.java b/lucene/core/src/java/org/apache/lucene/search/ScoringRewrite.java index f472f5c7030..e0917eb49b7 100644 --- a/lucene/core/src/java/org/apache/lucene/search/ScoringRewrite.java +++ b/lucene/core/src/java/org/apache/lucene/search/ScoringRewrite.java @@ -16,7 +16,6 @@ */ package org.apache.lucene.search; - import java.io.IOException; import org.apache.lucene.index.IndexReader; @@ -168,7 +167,7 @@ public abstract class ScoringRewrite extends TermCollectingRewrite { @Override public int[] init() { final int[] ord = super.init(); - boost = new float[ArrayUtil.oversize(ord.length, RamUsageEstimator.NUM_BYTES_FLOAT)]; + boost = new float[ArrayUtil.oversize(ord.length, Float.BYTES)]; termState = new TermContext[ArrayUtil.oversize(ord.length, RamUsageEstimator.NUM_BYTES_OBJECT_REF)]; assert termState.length >= ord.length && boost.length >= ord.length; return ord; diff --git a/lucene/core/src/java/org/apache/lucene/util/ArrayUtil.java b/lucene/core/src/java/org/apache/lucene/util/ArrayUtil.java index a826d353f36..3ab5da287ab 100644 --- a/lucene/core/src/java/org/apache/lucene/util/ArrayUtil.java +++ b/lucene/core/src/java/org/apache/lucene/util/ArrayUtil.java @@ -16,7 +16,6 @@ */ package org.apache.lucene.util; - import java.util.Arrays; import java.util.Collection; import java.util.Comparator; @@ -248,7 +247,7 @@ public final class ArrayUtil { public static short[] grow(short[] array, int minSize) { assert minSize >= 0: "size must be positive (got " + minSize + "): likely integer overflow?"; if (array.length < minSize) { - short[] newArray = new short[oversize(minSize, RamUsageEstimator.NUM_BYTES_SHORT)]; + short[] newArray = new short[oversize(minSize, Short.BYTES)]; System.arraycopy(array, 0, newArray, 0, array.length); return newArray; } else @@ -262,7 +261,7 @@ public final class ArrayUtil { public static float[] grow(float[] array, int minSize) { assert minSize >= 0: "size must be positive (got " + minSize + "): likely integer overflow?"; if (array.length < minSize) { - float[] newArray = new float[oversize(minSize, RamUsageEstimator.NUM_BYTES_FLOAT)]; + float[] newArray = new float[oversize(minSize, Float.BYTES)]; System.arraycopy(array, 0, newArray, 0, array.length); return newArray; } else @@ -276,7 +275,7 @@ public final class ArrayUtil { public static double[] grow(double[] array, int minSize) { assert minSize >= 0: "size must be positive (got " + minSize + "): likely integer overflow?"; if (array.length < minSize) { - double[] newArray = new double[oversize(minSize, RamUsageEstimator.NUM_BYTES_DOUBLE)]; + double[] newArray = new double[oversize(minSize, Double.BYTES)]; System.arraycopy(array, 0, newArray, 0, array.length); return newArray; } else @@ -289,7 +288,7 @@ public final class ArrayUtil { public static short[] shrink(short[] array, int targetSize) { assert targetSize >= 0: "size must be positive (got " + targetSize + "): likely integer overflow?"; - final int newSize = getShrinkSize(array.length, targetSize, RamUsageEstimator.NUM_BYTES_SHORT); + final int newSize = getShrinkSize(array.length, targetSize, Short.BYTES); if (newSize != array.length) { short[] newArray = new short[newSize]; System.arraycopy(array, 0, newArray, 0, newSize); @@ -301,7 +300,7 @@ public final class ArrayUtil { public static int[] grow(int[] array, int minSize) { assert minSize >= 0: "size must be positive (got " + minSize + "): likely integer overflow?"; if (array.length < minSize) { - int[] newArray = new int[oversize(minSize, RamUsageEstimator.NUM_BYTES_INT)]; + int[] newArray = new int[oversize(minSize, Integer.BYTES)]; System.arraycopy(array, 0, newArray, 0, array.length); return newArray; } else @@ -314,7 +313,7 @@ public final class ArrayUtil { public static int[] shrink(int[] array, int targetSize) { assert targetSize >= 0: "size must be positive (got " + targetSize + "): likely integer overflow?"; - final int newSize = getShrinkSize(array.length, targetSize, RamUsageEstimator.NUM_BYTES_INT); + final int newSize = getShrinkSize(array.length, targetSize, Integer.BYTES); if (newSize != array.length) { int[] newArray = new int[newSize]; System.arraycopy(array, 0, newArray, 0, newSize); @@ -326,7 +325,7 @@ public final class ArrayUtil { public static long[] grow(long[] array, int minSize) { assert minSize >= 0: "size must be positive (got " + minSize + "): likely integer overflow?"; if (array.length < minSize) { - long[] newArray = new long[oversize(minSize, RamUsageEstimator.NUM_BYTES_LONG)]; + long[] newArray = new long[oversize(minSize, Long.BYTES)]; System.arraycopy(array, 0, newArray, 0, array.length); return newArray; } else @@ -339,7 +338,7 @@ public final class ArrayUtil { public static long[] shrink(long[] array, int targetSize) { assert targetSize >= 0: "size must be positive (got " + targetSize + "): likely integer overflow?"; - final int newSize = getShrinkSize(array.length, targetSize, RamUsageEstimator.NUM_BYTES_LONG); + final int newSize = getShrinkSize(array.length, targetSize, Long.BYTES); if (newSize != array.length) { long[] newArray = new long[newSize]; System.arraycopy(array, 0, newArray, 0, newSize); @@ -401,7 +400,7 @@ public final class ArrayUtil { public static char[] grow(char[] array, int minSize) { assert minSize >= 0: "size must be positive (got " + minSize + "): likely integer overflow?"; if (array.length < minSize) { - char[] newArray = new char[oversize(minSize, RamUsageEstimator.NUM_BYTES_CHAR)]; + char[] newArray = new char[oversize(minSize, Character.BYTES)]; System.arraycopy(array, 0, newArray, 0, array.length); return newArray; } else @@ -414,7 +413,7 @@ public final class ArrayUtil { public static char[] shrink(char[] array, int targetSize) { assert targetSize >= 0: "size must be positive (got " + targetSize + "): likely integer overflow?"; - final int newSize = getShrinkSize(array.length, targetSize, RamUsageEstimator.NUM_BYTES_CHAR); + final int newSize = getShrinkSize(array.length, targetSize, Character.BYTES); if (newSize != array.length) { char[] newArray = new char[newSize]; System.arraycopy(array, 0, newArray, 0, newSize); diff --git a/lucene/core/src/java/org/apache/lucene/util/BytesRefArray.java b/lucene/core/src/java/org/apache/lucene/util/BytesRefArray.java index d7394c7ddd4..47ca52b7145 100644 --- a/lucene/core/src/java/org/apache/lucene/util/BytesRefArray.java +++ b/lucene/core/src/java/org/apache/lucene/util/BytesRefArray.java @@ -51,8 +51,7 @@ public final class BytesRefArray { this.pool = new ByteBlockPool(new ByteBlockPool.DirectTrackingAllocator( bytesUsed)); pool.nextBuffer(); - bytesUsed.addAndGet(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER - + RamUsageEstimator.NUM_BYTES_INT); + bytesUsed.addAndGet(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER * Integer.BYTES); this.bytesUsed = bytesUsed; } @@ -75,8 +74,7 @@ public final class BytesRefArray { if (lastElement >= offsets.length) { int oldLen = offsets.length; offsets = ArrayUtil.grow(offsets, offsets.length + 1); - bytesUsed.addAndGet((offsets.length - oldLen) - * RamUsageEstimator.NUM_BYTES_INT); + bytesUsed.addAndGet((offsets.length - oldLen) * Integer.BYTES); } pool.append(bytes); offsets[lastElement++] = currentOffset; diff --git a/lucene/core/src/java/org/apache/lucene/util/BytesRefHash.java b/lucene/core/src/java/org/apache/lucene/util/BytesRefHash.java index 149e8b75365..25b74a620e0 100644 --- a/lucene/core/src/java/org/apache/lucene/util/BytesRefHash.java +++ b/lucene/core/src/java/org/apache/lucene/util/BytesRefHash.java @@ -90,7 +90,7 @@ public final class BytesRefHash { this.bytesStartArray = bytesStartArray; bytesStart = bytesStartArray.init(); bytesUsed = bytesStartArray.bytesUsed() == null? Counter.newCounter() : bytesStartArray.bytesUsed(); - bytesUsed.addAndGet(hashSize * RamUsageEstimator.NUM_BYTES_INT); + bytesUsed.addAndGet(hashSize * Integer.BYTES); } /** @@ -213,7 +213,7 @@ public final class BytesRefHash { newSize /= 2; } if (newSize != hashSize) { - bytesUsed.addAndGet(RamUsageEstimator.NUM_BYTES_INT * -(hashSize - newSize)); + bytesUsed.addAndGet(Integer.BYTES * -(hashSize - newSize)); hashSize = newSize; ids = new int[hashSize]; Arrays.fill(ids, -1); @@ -252,7 +252,7 @@ public final class BytesRefHash { public void close() { clear(true); ids = null; - bytesUsed.addAndGet(RamUsageEstimator.NUM_BYTES_INT * -hashSize); + bytesUsed.addAndGet(Integer.BYTES * -hashSize); } /** @@ -408,7 +408,7 @@ public final class BytesRefHash { */ private void rehash(final int newSize, boolean hashOnData) { final int newMask = newSize - 1; - bytesUsed.addAndGet(RamUsageEstimator.NUM_BYTES_INT * (newSize)); + bytesUsed.addAndGet(Integer.BYTES * (newSize)); final int[] newHash = new int[newSize]; Arrays.fill(newHash, -1); for (int i = 0; i < hashSize; i++) { @@ -449,7 +449,7 @@ public final class BytesRefHash { } hashMask = newMask; - bytesUsed.addAndGet(RamUsageEstimator.NUM_BYTES_INT * (-ids.length)); + bytesUsed.addAndGet(Integer.BYTES * (-ids.length)); ids = newHash; hashSize = newSize; hashHalfSize = newSize / 2; @@ -472,7 +472,7 @@ public final class BytesRefHash { if (ids == null) { ids = new int[hashSize]; - bytesUsed.addAndGet(RamUsageEstimator.NUM_BYTES_INT * hashSize); + bytesUsed.addAndGet(Integer.BYTES * hashSize); } } @@ -570,8 +570,7 @@ public final class BytesRefHash { @Override public int[] init() { - return bytesStart = new int[ArrayUtil.oversize(initSize, - RamUsageEstimator.NUM_BYTES_INT)]; + return bytesStart = new int[ArrayUtil.oversize(initSize, Integer.BYTES)]; } @Override diff --git a/lucene/core/src/java/org/apache/lucene/util/DocIdSetBuilder.java b/lucene/core/src/java/org/apache/lucene/util/DocIdSetBuilder.java index 31a9762a783..e8b8b9802c1 100644 --- a/lucene/core/src/java/org/apache/lucene/util/DocIdSetBuilder.java +++ b/lucene/core/src/java/org/apache/lucene/util/DocIdSetBuilder.java @@ -16,7 +16,6 @@ */ package org.apache.lucene.util; - import java.io.IOException; import org.apache.lucene.search.DocIdSet; @@ -68,7 +67,7 @@ public final class DocIdSetBuilder { private void growBuffer(int minSize) { assert minSize < threshold; if (buffer.length < minSize) { - int nextSize = Math.min(threshold, ArrayUtil.oversize(minSize, RamUsageEstimator.NUM_BYTES_INT)); + int nextSize = Math.min(threshold, ArrayUtil.oversize(minSize, Integer.BYTES)); int[] newBuffer = new int[nextSize]; System.arraycopy(buffer, 0, newBuffer, 0, buffer.length); buffer = newBuffer; diff --git a/lucene/core/src/java/org/apache/lucene/util/NumericUtils.java b/lucene/core/src/java/org/apache/lucene/util/NumericUtils.java index b66f4682cba..58e25f37e41 100644 --- a/lucene/core/src/java/org/apache/lucene/util/NumericUtils.java +++ b/lucene/core/src/java/org/apache/lucene/util/NumericUtils.java @@ -155,16 +155,15 @@ public final class NumericUtils { return true; } - public static void intToBytes(int x, byte[] dest, int index) { + public static void intToBytes(int x, byte[] dest, int offset) { // Flip the sign bit, so negative ints sort before positive ints correctly: x ^= 0x80000000; - intToBytesDirect(x, dest, index); + intToBytesDirect(x, dest, offset); } - public static void intToBytesDirect(int x, byte[] dest, int index) { - // Flip the sign bit, so negative ints sort before positive ints correctly: - for(int i=0;i<4;i++) { - dest[4*index+i] = (byte) (x >> 24-i*8); + public static void intToBytesDirect(int x, byte[] dest, int offset) { + for (int i = 0; i < 4; i++) { + dest[offset+i] = (byte) (x >> 24-i*8); } } @@ -174,22 +173,21 @@ public final class NumericUtils { return x ^ 0x80000000; } - public static int bytesToIntDirect(byte[] src, int index) { + public static int bytesToIntDirect(byte[] src, int offset) { int x = 0; - for(int i=0;i<4;i++) { - x |= (src[4*index+i] & 0xff) << (24-i*8); + for (int i = 0; i < 4; i++) { + x |= (src[offset+i] & 0xff) << (24-i*8); } return x; } - public static void longToBytes(long v, byte[] bytes, int dim) { + public static void longToBytes(long v, byte[] bytes, int offset) { // Flip the sign bit so negative longs sort before positive longs: v ^= 0x8000000000000000L; - longToBytesDirect(v, bytes, dim); + longToBytesDirect(v, bytes, offset); } - public static void longToBytesDirect(long v, byte[] bytes, int dim) { - int offset = 8 * dim; + public static void longToBytesDirect(long v, byte[] bytes, int offset) { bytes[offset] = (byte) (v >> 56); bytes[offset+1] = (byte) (v >> 48); bytes[offset+2] = (byte) (v >> 40); @@ -200,15 +198,14 @@ public final class NumericUtils { bytes[offset+7] = (byte) v; } - public static long bytesToLong(byte[] bytes, int index) { - long v = bytesToLongDirect(bytes, index); + public static long bytesToLong(byte[] bytes, int offset) { + long v = bytesToLongDirect(bytes, offset); // Flip the sign bit back v ^= 0x8000000000000000L; return v; } - public static long bytesToLongDirect(byte[] bytes, int index) { - int offset = 8 * index; + public static long bytesToLongDirect(byte[] bytes, int offset) { long v = ((bytes[offset] & 0xffL) << 56) | ((bytes[offset+1] & 0xffL) << 48) | ((bytes[offset+2] & 0xffL) << 40) | diff --git a/lucene/core/src/java/org/apache/lucene/util/RamUsageEstimator.java b/lucene/core/src/java/org/apache/lucene/util/RamUsageEstimator.java index 763d9e5d34b..3ff53751747 100644 --- a/lucene/core/src/java/org/apache/lucene/util/RamUsageEstimator.java +++ b/lucene/core/src/java/org/apache/lucene/util/RamUsageEstimator.java @@ -55,15 +55,6 @@ public final class RamUsageEstimator { /** No instantiation. */ private RamUsageEstimator() {} - public final static int NUM_BYTES_BOOLEAN = 1; - public final static int NUM_BYTES_BYTE = 1; - public final static int NUM_BYTES_CHAR = 2; - public final static int NUM_BYTES_SHORT = 2; - public final static int NUM_BYTES_INT = 4; - public final static int NUM_BYTES_FLOAT = 4; - public final static int NUM_BYTES_LONG = 8; - public final static int NUM_BYTES_DOUBLE = 8; - /** * True, iff compressed references (oops) are enabled by this JVM */ @@ -95,14 +86,14 @@ public final class RamUsageEstimator { */ private static final Map,Integer> primitiveSizes = new IdentityHashMap<>(); static { - primitiveSizes.put(boolean.class, Integer.valueOf(NUM_BYTES_BOOLEAN)); - primitiveSizes.put(byte.class, Integer.valueOf(NUM_BYTES_BYTE)); - primitiveSizes.put(char.class, Integer.valueOf(NUM_BYTES_CHAR)); - primitiveSizes.put(short.class, Integer.valueOf(NUM_BYTES_SHORT)); - primitiveSizes.put(int.class, Integer.valueOf(NUM_BYTES_INT)); - primitiveSizes.put(float.class, Integer.valueOf(NUM_BYTES_FLOAT)); - primitiveSizes.put(double.class, Integer.valueOf(NUM_BYTES_DOUBLE)); - primitiveSizes.put(long.class, Integer.valueOf(NUM_BYTES_LONG)); + primitiveSizes.put(boolean.class, 1); + primitiveSizes.put(byte.class, 1); + primitiveSizes.put(char.class, Integer.valueOf(Character.BYTES)); + primitiveSizes.put(short.class, Integer.valueOf(Short.BYTES)); + primitiveSizes.put(int.class, Integer.valueOf(Integer.BYTES)); + primitiveSizes.put(float.class, Integer.valueOf(Float.BYTES)); + primitiveSizes.put(double.class, Integer.valueOf(Double.BYTES)); + primitiveSizes.put(long.class, Integer.valueOf(Long.BYTES)); } /** @@ -165,7 +156,7 @@ public final class RamUsageEstimator { // "best guess" based on reference size: NUM_BYTES_OBJECT_HEADER = 8 + NUM_BYTES_OBJECT_REF; // array header is NUM_BYTES_OBJECT_HEADER + NUM_BYTES_INT, but aligned (object alignment): - NUM_BYTES_ARRAY_HEADER = (int) alignObjectSize(NUM_BYTES_OBJECT_HEADER + NUM_BYTES_INT); + NUM_BYTES_ARRAY_HEADER = (int) alignObjectSize(NUM_BYTES_OBJECT_HEADER + Integer.BYTES); } else { JVM_IS_HOTSPOT_64BIT = false; COMPRESSED_REFS_ENABLED = false; @@ -173,7 +164,7 @@ public final class RamUsageEstimator { NUM_BYTES_OBJECT_REF = 4; NUM_BYTES_OBJECT_HEADER = 8; // For 32 bit JVMs, no extra alignment of array header: - NUM_BYTES_ARRAY_HEADER = NUM_BYTES_OBJECT_HEADER + NUM_BYTES_INT; + NUM_BYTES_ARRAY_HEADER = NUM_BYTES_OBJECT_HEADER + Integer.BYTES; } // get min/max value of cached Long class instances: @@ -223,32 +214,32 @@ public final class RamUsageEstimator { /** Returns the size in bytes of the char[] object. */ public static long sizeOf(char[] arr) { - return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) NUM_BYTES_CHAR * arr.length); + return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) Character.BYTES * arr.length); } /** Returns the size in bytes of the short[] object. */ public static long sizeOf(short[] arr) { - return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) NUM_BYTES_SHORT * arr.length); + return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) Short.BYTES * arr.length); } /** Returns the size in bytes of the int[] object. */ public static long sizeOf(int[] arr) { - return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) NUM_BYTES_INT * arr.length); + return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) Integer.BYTES * arr.length); } /** Returns the size in bytes of the float[] object. */ public static long sizeOf(float[] arr) { - return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) NUM_BYTES_FLOAT * arr.length); + return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) Float.BYTES * arr.length); } /** Returns the size in bytes of the long[] object. */ public static long sizeOf(long[] arr) { - return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) NUM_BYTES_LONG * arr.length); + return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) Long.BYTES * arr.length); } /** Returns the size in bytes of the double[] object. */ public static long sizeOf(double[] arr) { - return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) NUM_BYTES_DOUBLE * arr.length); + return alignObjectSize((long) NUM_BYTES_ARRAY_HEADER + (long) Double.BYTES * arr.length); } /** Returns the shallow size in bytes of the Object[] object. */ diff --git a/lucene/core/src/java/org/apache/lucene/util/RecyclingIntBlockAllocator.java b/lucene/core/src/java/org/apache/lucene/util/RecyclingIntBlockAllocator.java index 437426cc7f5..57ba1bd2c80 100644 --- a/lucene/core/src/java/org/apache/lucene/util/RecyclingIntBlockAllocator.java +++ b/lucene/core/src/java/org/apache/lucene/util/RecyclingIntBlockAllocator.java @@ -78,7 +78,7 @@ public final class RecyclingIntBlockAllocator extends Allocator { @Override public int[] getIntBlock() { if (freeBlocks == 0) { - bytesUsed.addAndGet(blockSize*RamUsageEstimator.NUM_BYTES_INT); + bytesUsed.addAndGet(blockSize*Integer.BYTES); return new int[blockSize]; } final int[] b = freeByteBlocks[--freeBlocks]; @@ -104,7 +104,7 @@ public final class RecyclingIntBlockAllocator extends Allocator { for (int i = stop; i < end; i++) { blocks[i] = null; } - bytesUsed.addAndGet(-(end - stop) * (blockSize * RamUsageEstimator.NUM_BYTES_INT)); + bytesUsed.addAndGet(-(end - stop) * (blockSize * Integer.BYTES)); assert bytesUsed.get() >= 0; } @@ -150,7 +150,7 @@ public final class RecyclingIntBlockAllocator extends Allocator { while (freeBlocks > stop) { freeByteBlocks[--freeBlocks] = null; } - bytesUsed.addAndGet(-count*blockSize* RamUsageEstimator.NUM_BYTES_INT); + bytesUsed.addAndGet(-count*blockSize*Integer.BYTES); assert bytesUsed.get() >= 0; return count; } diff --git a/lucene/core/src/java/org/apache/lucene/util/SentinelIntSet.java b/lucene/core/src/java/org/apache/lucene/util/SentinelIntSet.java index 46162d929d5..3fc1b57ab92 100644 --- a/lucene/core/src/java/org/apache/lucene/util/SentinelIntSet.java +++ b/lucene/core/src/java/org/apache/lucene/util/SentinelIntSet.java @@ -149,7 +149,7 @@ public class SentinelIntSet { /** Return the memory footprint of this class in bytes. */ public long ramBytesUsed() { return RamUsageEstimator.alignObjectSize( - RamUsageEstimator.NUM_BYTES_INT * 3 + Integer.BYTES * 3 + RamUsageEstimator.NUM_BYTES_OBJECT_REF) + RamUsageEstimator.sizeOf(keys); } diff --git a/lucene/core/src/java/org/apache/lucene/util/automaton/Automaton.java b/lucene/core/src/java/org/apache/lucene/util/automaton/Automaton.java index 6c26a87edef..42f28edacb0 100644 --- a/lucene/core/src/java/org/apache/lucene/util/automaton/Automaton.java +++ b/lucene/core/src/java/org/apache/lucene/util/automaton/Automaton.java @@ -893,7 +893,7 @@ public class Automaton implements Accountable { return RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + RamUsageEstimator.sizeOf(states) + RamUsageEstimator.sizeOf(transitions) + RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + (isAccept.size() / 8) + RamUsageEstimator.NUM_BYTES_OBJECT_REF + 2 * RamUsageEstimator.NUM_BYTES_OBJECT_REF + - 3 * RamUsageEstimator.NUM_BYTES_INT + - RamUsageEstimator.NUM_BYTES_BOOLEAN; + 3 * Integer.BYTES + + 1; } } diff --git a/lucene/core/src/java/org/apache/lucene/util/automaton/SortedIntSet.java b/lucene/core/src/java/org/apache/lucene/util/automaton/SortedIntSet.java index 38218a55765..3251aad9f09 100644 --- a/lucene/core/src/java/org/apache/lucene/util/automaton/SortedIntSet.java +++ b/lucene/core/src/java/org/apache/lucene/util/automaton/SortedIntSet.java @@ -16,11 +16,9 @@ */ package org.apache.lucene.util.automaton; - import java.util.TreeMap; import java.util.Map; import org.apache.lucene.util.ArrayUtil; -import org.apache.lucene.util.RamUsageEstimator; // Just holds a set of int[] states, plus a corresponding // int[] count per state. Used by @@ -135,7 +133,7 @@ final class SortedIntSet { public void computeHash() { if (useTreeMap) { if (map.size() > values.length) { - final int size = ArrayUtil.oversize(map.size(), RamUsageEstimator.NUM_BYTES_INT); + final int size = ArrayUtil.oversize(map.size(), Integer.BYTES); values = new int[size]; counts = new int[size]; } diff --git a/lucene/core/src/java/org/apache/lucene/util/bkd/BKDReader.java b/lucene/core/src/java/org/apache/lucene/util/bkd/BKDReader.java index 8a3986eb9ba..1067f4baf19 100644 --- a/lucene/core/src/java/org/apache/lucene/util/bkd/BKDReader.java +++ b/lucene/core/src/java/org/apache/lucene/util/bkd/BKDReader.java @@ -16,7 +16,6 @@ */ package org.apache.lucene.util.bkd; - import java.io.IOException; import java.util.Arrays; @@ -27,7 +26,6 @@ import org.apache.lucene.store.IndexInput; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.NumericUtils; -import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.StringHelper; /** Handles intersection of an multi-dimensional shape in byte[] space with a block KD-tree previously written with {@link BKDWriter}. @@ -415,7 +413,7 @@ public class BKDReader implements Accountable { @Override public long ramBytesUsed() { return splitPackedValues.length + - leafBlockFPs.length * RamUsageEstimator.NUM_BYTES_LONG; + leafBlockFPs.length * Long.BYTES; } public byte[] getMinPackedValue() { diff --git a/lucene/core/src/java/org/apache/lucene/util/bkd/BKDWriter.java b/lucene/core/src/java/org/apache/lucene/util/bkd/BKDWriter.java index 0d3f8fce570..62c5264ec81 100644 --- a/lucene/core/src/java/org/apache/lucene/util/bkd/BKDWriter.java +++ b/lucene/core/src/java/org/apache/lucene/util/bkd/BKDWriter.java @@ -16,7 +16,6 @@ */ package org.apache.lucene.util.bkd; - import java.io.Closeable; import java.io.EOFException; import java.io.IOException; @@ -40,9 +39,7 @@ import org.apache.lucene.util.IntroSorter; import org.apache.lucene.util.LongBitSet; import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.OfflineSorter; -import org.apache.lucene.util.OfflineSorter.ByteSequencesWriter; import org.apache.lucene.util.PriorityQueue; -import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.StringHelper; // TODO @@ -152,7 +149,7 @@ public class BKDWriter implements Closeable { maxPackedValue = new byte[packedBytesLength]; // dimensional values (numDims * bytesPerDim) + ord (long) + docID (int) - bytesPerDoc = packedBytesLength + RamUsageEstimator.NUM_BYTES_LONG + RamUsageEstimator.NUM_BYTES_INT; + bytesPerDoc = packedBytesLength + Long.BYTES + Integer.BYTES; // As we recurse, we compute temporary partitions of the data, halving the // number of points at each recursion. Once there are few enough points, diff --git a/lucene/core/src/java/org/apache/lucene/util/bkd/HeapPointWriter.java b/lucene/core/src/java/org/apache/lucene/util/bkd/HeapPointWriter.java index ae247c942b9..02361871c4c 100644 --- a/lucene/core/src/java/org/apache/lucene/util/bkd/HeapPointWriter.java +++ b/lucene/core/src/java/org/apache/lucene/util/bkd/HeapPointWriter.java @@ -16,12 +16,10 @@ */ package org.apache.lucene.util.bkd; - import java.util.ArrayList; import java.util.List; import org.apache.lucene.util.ArrayUtil; -import org.apache.lucene.util.RamUsageEstimator; final class HeapPointWriter implements PointWriter { int[] docIDs; @@ -94,7 +92,7 @@ final class HeapPointWriter implements PointWriter { assert closed == false; assert packedValue.length == packedBytesLength; if (ords.length == nextWrite) { - int nextSize = Math.min(maxSize, ArrayUtil.oversize(nextWrite+1, RamUsageEstimator.NUM_BYTES_INT)); + int nextSize = Math.min(maxSize, ArrayUtil.oversize(nextWrite+1, Integer.BYTES)); assert nextSize > nextWrite: "nextSize=" + nextSize + " vs nextWrite=" + nextWrite; ords = growExact(ords, nextSize); docIDs = growExact(docIDs, nextSize); diff --git a/lucene/core/src/java/org/apache/lucene/util/bkd/OfflinePointReader.java b/lucene/core/src/java/org/apache/lucene/util/bkd/OfflinePointReader.java index ad8523898c0..14d714cab1f 100644 --- a/lucene/core/src/java/org/apache/lucene/util/bkd/OfflinePointReader.java +++ b/lucene/core/src/java/org/apache/lucene/util/bkd/OfflinePointReader.java @@ -16,15 +16,12 @@ */ package org.apache.lucene.util.bkd; - import java.io.EOFException; import java.io.IOException; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; -import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.RamUsageEstimator; /** Reads points from disk in a fixed-with format, previously written with {@link OfflinePointWriter}. */ final class OfflinePointReader implements PointReader { @@ -41,7 +38,7 @@ final class OfflinePointReader implements PointReader { private OfflinePointReader(IndexInput in, int packedBytesLength, long start, long length) throws IOException { this.in = in; - bytesPerDoc = packedBytesLength + RamUsageEstimator.NUM_BYTES_LONG + RamUsageEstimator.NUM_BYTES_INT; + bytesPerDoc = packedBytesLength + Long.BYTES + Integer.BYTES; long seekFP = start * bytesPerDoc; in.seek(seekFP); this.countLeft = length; diff --git a/lucene/core/src/java/org/apache/lucene/util/bkd/OfflinePointWriter.java b/lucene/core/src/java/org/apache/lucene/util/bkd/OfflinePointWriter.java index ba1b71c8684..f47f5675538 100644 --- a/lucene/core/src/java/org/apache/lucene/util/bkd/OfflinePointWriter.java +++ b/lucene/core/src/java/org/apache/lucene/util/bkd/OfflinePointWriter.java @@ -16,13 +16,11 @@ */ package org.apache.lucene.util.bkd; - import java.io.IOException; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexOutput; -import org.apache.lucene.util.RamUsageEstimator; /** Writes points to disk in a fixed-with format. */ final class OfflinePointWriter implements PointWriter { @@ -38,7 +36,7 @@ final class OfflinePointWriter implements PointWriter { this.out = tempDir.createTempOutput(tempFileNamePrefix, "bkd", IOContext.DEFAULT); this.tempDir = tempDir; this.packedBytesLength = packedBytesLength; - bytesPerDoc = packedBytesLength + RamUsageEstimator.NUM_BYTES_LONG + RamUsageEstimator.NUM_BYTES_INT; + bytesPerDoc = packedBytesLength + Long.BYTES + Integer.BYTES; } /** Initializes on an already written/closed file, just so consumers can use {@link #getReader} to read the file. */ @@ -46,7 +44,7 @@ final class OfflinePointWriter implements PointWriter { this.out = out; this.tempDir = tempDir; this.packedBytesLength = packedBytesLength; - bytesPerDoc = packedBytesLength + RamUsageEstimator.NUM_BYTES_LONG + RamUsageEstimator.NUM_BYTES_INT; + bytesPerDoc = packedBytesLength + Long.BYTES + Integer.BYTES; this.count = count; closed = true; } diff --git a/lucene/core/src/java/org/apache/lucene/util/packed/AbstractPagedMutable.java b/lucene/core/src/java/org/apache/lucene/util/packed/AbstractPagedMutable.java index 13ab6f9db72..c5fac1e9912 100644 --- a/lucene/core/src/java/org/apache/lucene/util/packed/AbstractPagedMutable.java +++ b/lucene/core/src/java/org/apache/lucene/util/packed/AbstractPagedMutable.java @@ -101,8 +101,8 @@ abstract class AbstractPagedMutable> extends L protected long baseRamBytesUsed() { return RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + RamUsageEstimator.NUM_BYTES_OBJECT_REF - + RamUsageEstimator.NUM_BYTES_LONG - + 3 * RamUsageEstimator.NUM_BYTES_INT; + + Long.BYTES + + 3 * Integer.BYTES; } @Override diff --git a/lucene/core/src/java/org/apache/lucene/util/packed/Direct16.java b/lucene/core/src/java/org/apache/lucene/util/packed/Direct16.java index 82281a9e0e7..3fda7c3c217 100644 --- a/lucene/core/src/java/org/apache/lucene/util/packed/Direct16.java +++ b/lucene/core/src/java/org/apache/lucene/util/packed/Direct16.java @@ -62,7 +62,7 @@ final class Direct16 extends PackedInts.MutableImpl { public long ramBytesUsed() { return RamUsageEstimator.alignObjectSize( RamUsageEstimator.NUM_BYTES_OBJECT_HEADER - + 2 * RamUsageEstimator.NUM_BYTES_INT // valueCount,bitsPerValue + + 2 * Integer.BYTES // valueCount,bitsPerValue + RamUsageEstimator.NUM_BYTES_OBJECT_REF) // values ref + RamUsageEstimator.sizeOf(values); } diff --git a/lucene/core/src/java/org/apache/lucene/util/packed/Direct32.java b/lucene/core/src/java/org/apache/lucene/util/packed/Direct32.java index 502aa3f3d6a..aec9eaf8866 100644 --- a/lucene/core/src/java/org/apache/lucene/util/packed/Direct32.java +++ b/lucene/core/src/java/org/apache/lucene/util/packed/Direct32.java @@ -62,7 +62,7 @@ final class Direct32 extends PackedInts.MutableImpl { public long ramBytesUsed() { return RamUsageEstimator.alignObjectSize( RamUsageEstimator.NUM_BYTES_OBJECT_HEADER - + 2 * RamUsageEstimator.NUM_BYTES_INT // valueCount,bitsPerValue + + 2 * Integer.BYTES // valueCount,bitsPerValue + RamUsageEstimator.NUM_BYTES_OBJECT_REF) // values ref + RamUsageEstimator.sizeOf(values); } diff --git a/lucene/core/src/java/org/apache/lucene/util/packed/Direct64.java b/lucene/core/src/java/org/apache/lucene/util/packed/Direct64.java index 106f641f8cb..b8e06b68ef6 100644 --- a/lucene/core/src/java/org/apache/lucene/util/packed/Direct64.java +++ b/lucene/core/src/java/org/apache/lucene/util/packed/Direct64.java @@ -57,7 +57,7 @@ final class Direct64 extends PackedInts.MutableImpl { public long ramBytesUsed() { return RamUsageEstimator.alignObjectSize( RamUsageEstimator.NUM_BYTES_OBJECT_HEADER - + 2 * RamUsageEstimator.NUM_BYTES_INT // valueCount,bitsPerValue + + 2 * Integer.BYTES // valueCount,bitsPerValue + RamUsageEstimator.NUM_BYTES_OBJECT_REF) // values ref + RamUsageEstimator.sizeOf(values); } diff --git a/lucene/core/src/java/org/apache/lucene/util/packed/Direct8.java b/lucene/core/src/java/org/apache/lucene/util/packed/Direct8.java index 27986c013f4..81fc5a9977f 100644 --- a/lucene/core/src/java/org/apache/lucene/util/packed/Direct8.java +++ b/lucene/core/src/java/org/apache/lucene/util/packed/Direct8.java @@ -60,7 +60,7 @@ final class Direct8 extends PackedInts.MutableImpl { public long ramBytesUsed() { return RamUsageEstimator.alignObjectSize( RamUsageEstimator.NUM_BYTES_OBJECT_HEADER - + 2 * RamUsageEstimator.NUM_BYTES_INT // valueCount,bitsPerValue + + 2 * Integer.BYTES // valueCount,bitsPerValue + RamUsageEstimator.NUM_BYTES_OBJECT_REF) // values ref + RamUsageEstimator.sizeOf(values); } diff --git a/lucene/core/src/java/org/apache/lucene/util/packed/GrowableWriter.java b/lucene/core/src/java/org/apache/lucene/util/packed/GrowableWriter.java index 76d3ecca4a6..a4d97f494ce 100644 --- a/lucene/core/src/java/org/apache/lucene/util/packed/GrowableWriter.java +++ b/lucene/core/src/java/org/apache/lucene/util/packed/GrowableWriter.java @@ -131,8 +131,8 @@ public class GrowableWriter extends PackedInts.Mutable { return RamUsageEstimator.alignObjectSize( RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + RamUsageEstimator.NUM_BYTES_OBJECT_REF - + RamUsageEstimator.NUM_BYTES_LONG - + RamUsageEstimator.NUM_BYTES_FLOAT) + + Long.BYTES + + Float.BYTES) + current.ramBytesUsed(); } diff --git a/lucene/core/src/java/org/apache/lucene/util/packed/Packed16ThreeBlocks.java b/lucene/core/src/java/org/apache/lucene/util/packed/Packed16ThreeBlocks.java index 8e8e94d17ea..02f4e41e535 100644 --- a/lucene/core/src/java/org/apache/lucene/util/packed/Packed16ThreeBlocks.java +++ b/lucene/core/src/java/org/apache/lucene/util/packed/Packed16ThreeBlocks.java @@ -112,7 +112,7 @@ final class Packed16ThreeBlocks extends PackedInts.MutableImpl { public long ramBytesUsed() { return RamUsageEstimator.alignObjectSize( RamUsageEstimator.NUM_BYTES_OBJECT_HEADER - + 2 * RamUsageEstimator.NUM_BYTES_INT // valueCount,bitsPerValue + + 2 * Integer.BYTES // valueCount,bitsPerValue + RamUsageEstimator.NUM_BYTES_OBJECT_REF) // blocks ref + RamUsageEstimator.sizeOf(blocks); } diff --git a/lucene/core/src/java/org/apache/lucene/util/packed/Packed64.java b/lucene/core/src/java/org/apache/lucene/util/packed/Packed64.java index ae43032175f..c78feafdf9e 100644 --- a/lucene/core/src/java/org/apache/lucene/util/packed/Packed64.java +++ b/lucene/core/src/java/org/apache/lucene/util/packed/Packed64.java @@ -246,8 +246,8 @@ class Packed64 extends PackedInts.MutableImpl { public long ramBytesUsed() { return RamUsageEstimator.alignObjectSize( RamUsageEstimator.NUM_BYTES_OBJECT_HEADER - + 3 * RamUsageEstimator.NUM_BYTES_INT // bpvMinusBlockSize,valueCount,bitsPerValue - + RamUsageEstimator.NUM_BYTES_LONG // maskRight + + 3 * Integer.BYTES // bpvMinusBlockSize,valueCount,bitsPerValue + + Long.BYTES // maskRight + RamUsageEstimator.NUM_BYTES_OBJECT_REF) // blocks ref + RamUsageEstimator.sizeOf(blocks); } diff --git a/lucene/core/src/java/org/apache/lucene/util/packed/Packed64SingleBlock.java b/lucene/core/src/java/org/apache/lucene/util/packed/Packed64SingleBlock.java index a7262b3ed90..85e7ea8e078 100644 --- a/lucene/core/src/java/org/apache/lucene/util/packed/Packed64SingleBlock.java +++ b/lucene/core/src/java/org/apache/lucene/util/packed/Packed64SingleBlock.java @@ -61,7 +61,7 @@ abstract class Packed64SingleBlock extends PackedInts.MutableImpl { public long ramBytesUsed() { return RamUsageEstimator.alignObjectSize( RamUsageEstimator.NUM_BYTES_OBJECT_HEADER - + 2 * RamUsageEstimator.NUM_BYTES_INT // valueCount,bitsPerValue + + 2 * Integer.BYTES // valueCount,bitsPerValue + RamUsageEstimator.NUM_BYTES_OBJECT_REF) // blocks ref + RamUsageEstimator.sizeOf(blocks); } diff --git a/lucene/core/src/java/org/apache/lucene/util/packed/Packed8ThreeBlocks.java b/lucene/core/src/java/org/apache/lucene/util/packed/Packed8ThreeBlocks.java index 5a85735b3bb..3ec6df08600 100644 --- a/lucene/core/src/java/org/apache/lucene/util/packed/Packed8ThreeBlocks.java +++ b/lucene/core/src/java/org/apache/lucene/util/packed/Packed8ThreeBlocks.java @@ -110,7 +110,7 @@ final class Packed8ThreeBlocks extends PackedInts.MutableImpl { public long ramBytesUsed() { return RamUsageEstimator.alignObjectSize( RamUsageEstimator.NUM_BYTES_OBJECT_HEADER - + 2 * RamUsageEstimator.NUM_BYTES_INT // valueCount,bitsPerValue + + 2 * Integer.BYTES // valueCount,bitsPerValue + RamUsageEstimator.NUM_BYTES_OBJECT_REF) // blocks ref + RamUsageEstimator.sizeOf(blocks); } diff --git a/lucene/core/src/java/org/apache/lucene/util/packed/PackedInts.java b/lucene/core/src/java/org/apache/lucene/util/packed/PackedInts.java index 4df85801fe1..e028c2cf0b0 100644 --- a/lucene/core/src/java/org/apache/lucene/util/packed/PackedInts.java +++ b/lucene/core/src/java/org/apache/lucene/util/packed/PackedInts.java @@ -681,7 +681,7 @@ public class PackedInts { @Override public long ramBytesUsed() { - return RamUsageEstimator.alignObjectSize(RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + RamUsageEstimator.NUM_BYTES_INT); + return RamUsageEstimator.alignObjectSize(RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + Integer.BYTES); } } diff --git a/lucene/core/src/java/org/apache/lucene/util/packed/PagedGrowableWriter.java b/lucene/core/src/java/org/apache/lucene/util/packed/PagedGrowableWriter.java index ab6c65aa22c..2c22c9eb667 100644 --- a/lucene/core/src/java/org/apache/lucene/util/packed/PagedGrowableWriter.java +++ b/lucene/core/src/java/org/apache/lucene/util/packed/PagedGrowableWriter.java @@ -16,8 +16,6 @@ */ package org.apache.lucene.util.packed; - -import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.packed.PackedInts.Mutable; /** @@ -65,7 +63,7 @@ public final class PagedGrowableWriter extends AbstractPagedMutable maxValue[dim]) { maxValue[dim] = values[dim]; } - NumericUtils.intToBytes(values[dim], scratch, dim); + NumericUtils.intToBytes(values[dim], scratch, dim * Integer.BYTES); if (VERBOSE) { System.out.println(" " + dim + " -> " + values[dim]); } @@ -161,8 +161,8 @@ public class TestBKD extends LuceneTestCase { byte[] minPackedValue = r.getMinPackedValue(); byte[] maxPackedValue = r.getMaxPackedValue(); for(int dim=0;dim= min; if (max < queryMin[dim] || min > queryMax[dim]) { diff --git a/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java b/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java index 5003ec52818..29e60ba50b1 100644 --- a/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java +++ b/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java @@ -225,8 +225,8 @@ public class MemoryIndex { this.storePayloads = storePayloads; this.bytesUsed = Counter.newCounter(); final int maxBufferedByteBlocks = (int)((maxReusedBytes/2) / ByteBlockPool.BYTE_BLOCK_SIZE ); - final int maxBufferedIntBlocks = (int) ((maxReusedBytes - (maxBufferedByteBlocks*ByteBlockPool.BYTE_BLOCK_SIZE))/(IntBlockPool.INT_BLOCK_SIZE * RamUsageEstimator.NUM_BYTES_INT)); - assert (maxBufferedByteBlocks * ByteBlockPool.BYTE_BLOCK_SIZE) + (maxBufferedIntBlocks * IntBlockPool.INT_BLOCK_SIZE * RamUsageEstimator.NUM_BYTES_INT) <= maxReusedBytes; + final int maxBufferedIntBlocks = (int) ((maxReusedBytes - (maxBufferedByteBlocks*ByteBlockPool.BYTE_BLOCK_SIZE))/(IntBlockPool.INT_BLOCK_SIZE * Integer.BYTES)); + assert (maxBufferedByteBlocks * ByteBlockPool.BYTE_BLOCK_SIZE) + (maxBufferedIntBlocks * IntBlockPool.INT_BLOCK_SIZE * Integer.BYTES) <= maxReusedBytes; byteBlockPool = new ByteBlockPool(new RecyclingByteBlockAllocator(ByteBlockPool.BYTE_BLOCK_SIZE, maxBufferedByteBlocks, bytesUsed)); intBlockPool = new IntBlockPool(new RecyclingIntBlockAllocator(IntBlockPool.INT_BLOCK_SIZE, maxBufferedIntBlocks, bytesUsed)); postingsWriter = new SliceWriter(intBlockPool); @@ -1216,9 +1216,9 @@ public class MemoryIndex { @Override public int[] init() { final int[] ord = super.init(); - start = new int[ArrayUtil.oversize(ord.length, RamUsageEstimator.NUM_BYTES_INT)]; - end = new int[ArrayUtil.oversize(ord.length, RamUsageEstimator.NUM_BYTES_INT)]; - freq = new int[ArrayUtil.oversize(ord.length, RamUsageEstimator.NUM_BYTES_INT)]; + start = new int[ArrayUtil.oversize(ord.length, Integer.BYTES)]; + end = new int[ArrayUtil.oversize(ord.length, Integer.BYTES)]; + freq = new int[ArrayUtil.oversize(ord.length, Integer.BYTES)]; assert start.length >= ord.length; assert end.length >= ord.length; assert freq.length >= ord.length; diff --git a/lucene/misc/src/java/org/apache/lucene/uninverting/FieldCacheImpl.java b/lucene/misc/src/java/org/apache/lucene/uninverting/FieldCacheImpl.java index 94e53bdfcf2..c6bd943725d 100644 --- a/lucene/misc/src/java/org/apache/lucene/uninverting/FieldCacheImpl.java +++ b/lucene/misc/src/java/org/apache/lucene/uninverting/FieldCacheImpl.java @@ -481,7 +481,7 @@ class FieldCacheImpl implements FieldCache { @Override public long ramBytesUsed() { - return values.ramBytesUsed() + RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_LONG; + return values.ramBytesUsed() + RamUsageEstimator.NUM_BYTES_OBJECT_REF + Long.BYTES; } } @@ -599,7 +599,7 @@ class FieldCacheImpl implements FieldCache { termOrdToBytesOffset.ramBytesUsed() + docToTermOrd.ramBytesUsed() + 3*RamUsageEstimator.NUM_BYTES_OBJECT_REF + - RamUsageEstimator.NUM_BYTES_INT; + Integer.BYTES; } @Override diff --git a/lucene/sandbox/src/java/org/apache/lucene/document/LatLonPoint.java b/lucene/sandbox/src/java/org/apache/lucene/document/LatLonPoint.java index 77f7d32eed4..539987ca5a0 100644 --- a/lucene/sandbox/src/java/org/apache/lucene/document/LatLonPoint.java +++ b/lucene/sandbox/src/java/org/apache/lucene/document/LatLonPoint.java @@ -24,7 +24,7 @@ import org.apache.lucene.spatial.util.GeoUtils; public class LatLonPoint extends Field { public static final FieldType TYPE = new FieldType(); static { - TYPE.setDimensions(2, 4); + TYPE.setDimensions(2, Integer.BYTES); TYPE.freeze(); } @@ -45,7 +45,7 @@ public class LatLonPoint extends Field { } byte[] bytes = new byte[8]; NumericUtils.intToBytes(encodeLat(lat), bytes, 0); - NumericUtils.intToBytes(encodeLon(lon), bytes, 1); + NumericUtils.intToBytes(encodeLon(lon), bytes, Integer.BYTES); fieldsData = new BytesRef(bytes); } diff --git a/lucene/sandbox/src/java/org/apache/lucene/search/PointInPolygonQuery.java b/lucene/sandbox/src/java/org/apache/lucene/search/PointInPolygonQuery.java index 8e099c4ca2e..6a5ae1e816c 100644 --- a/lucene/sandbox/src/java/org/apache/lucene/search/PointInPolygonQuery.java +++ b/lucene/sandbox/src/java/org/apache/lucene/search/PointInPolygonQuery.java @@ -124,7 +124,7 @@ public class PointInPolygonQuery extends Query { public void visit(int docID, byte[] packedValue) { assert packedValue.length == 8; double lat = LatLonPoint.decodeLat(NumericUtils.bytesToInt(packedValue, 0)); - double lon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(packedValue, 1)); + double lon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(packedValue, Integer.BYTES)); if (GeoRelationUtils.pointInPolygon(polyLons, polyLats, lat, lon)) { hitCount[0]++; result.add(docID); @@ -134,9 +134,9 @@ public class PointInPolygonQuery extends Query { @Override public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { double cellMinLat = LatLonPoint.decodeLat(NumericUtils.bytesToInt(minPackedValue, 0)); - double cellMinLon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(minPackedValue, 1)); + double cellMinLon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(minPackedValue, Integer.BYTES)); double cellMaxLat = LatLonPoint.decodeLat(NumericUtils.bytesToInt(maxPackedValue, 0)); - double cellMaxLon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(maxPackedValue, 1)); + double cellMaxLon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(maxPackedValue, Integer.BYTES)); if (cellMinLat <= minLat && cellMaxLat >= maxLat && cellMinLon <= minLon && cellMaxLon >= maxLon) { // Cell fully encloses the query diff --git a/lucene/sandbox/src/java/org/apache/lucene/search/PointInRectQuery.java b/lucene/sandbox/src/java/org/apache/lucene/search/PointInRectQuery.java index 1d95399c115..5292f22d807 100644 --- a/lucene/sandbox/src/java/org/apache/lucene/search/PointInRectQuery.java +++ b/lucene/sandbox/src/java/org/apache/lucene/search/PointInRectQuery.java @@ -98,7 +98,7 @@ public class PointInRectQuery extends Query { public void visit(int docID, byte[] packedValue) { assert packedValue.length == 8; double lat = LatLonPoint.decodeLat(NumericUtils.bytesToInt(packedValue, 0)); - double lon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(packedValue, 1)); + double lon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(packedValue, Integer.BYTES)); if (lat >= minLat && lat <= maxLat && lon >= minLon && lon <= maxLon) { hitCount[0]++; result.add(docID); @@ -108,9 +108,9 @@ public class PointInRectQuery extends Query { @Override public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { double cellMinLat = LatLonPoint.decodeLat(NumericUtils.bytesToInt(minPackedValue, 0)); - double cellMinLon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(minPackedValue, 1)); + double cellMinLon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(minPackedValue, Integer.BYTES)); double cellMaxLat = LatLonPoint.decodeLat(NumericUtils.bytesToInt(maxPackedValue, 0)); - double cellMaxLon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(maxPackedValue, 1)); + double cellMaxLon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(maxPackedValue, Integer.BYTES)); if (minLat <= cellMinLat && maxLat >= cellMaxLat && minLon <= cellMinLon && maxLon >= cellMaxLon) { return Relation.CELL_INSIDE_QUERY; diff --git a/lucene/sandbox/src/test/org/apache/lucene/search/TestDocValuesRangeQuery.java b/lucene/sandbox/src/test/org/apache/lucene/search/TestDocValuesRangeQuery.java index 7e23aad3daa..f57c1e1c3c2 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/search/TestDocValuesRangeQuery.java +++ b/lucene/sandbox/src/test/org/apache/lucene/search/TestDocValuesRangeQuery.java @@ -83,7 +83,7 @@ public class TestDocValuesRangeQuery extends LuceneTestCase { if (l == null) { return null; } else { - byte[] bytes = new byte[RamUsageEstimator.NUM_BYTES_LONG]; + byte[] bytes = new byte[Long.BYTES]; NumericUtils.longToBytes(l, bytes, 0); return new BytesRef(bytes); } diff --git a/lucene/spatial/src/java/org/apache/lucene/spatial/prefix/ContainsPrefixTreeQuery.java b/lucene/spatial/src/java/org/apache/lucene/spatial/prefix/ContainsPrefixTreeQuery.java index 7eadb646691..00463784ebc 100644 --- a/lucene/spatial/src/java/org/apache/lucene/spatial/prefix/ContainsPrefixTreeQuery.java +++ b/lucene/spatial/src/java/org/apache/lucene/spatial/prefix/ContainsPrefixTreeQuery.java @@ -353,7 +353,7 @@ public class ContainsPrefixTreeQuery extends AbstractPrefixTreeQuery { public long ramBytesUsed() { return RamUsageEstimator.alignObjectSize( RamUsageEstimator.NUM_BYTES_OBJECT_REF - + RamUsageEstimator.NUM_BYTES_INT) + + Integer.BYTES) + intSet.ramBytesUsed(); } diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/Geo3DPoint.java b/lucene/spatial3d/src/java/org/apache/lucene/geo3d/Geo3DPoint.java index ce36c20066f..e3837aa590c 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/Geo3DPoint.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/geo3d/Geo3DPoint.java @@ -32,7 +32,7 @@ public final class Geo3DPoint extends Field { /** Indexing {@link FieldType}. */ public static final FieldType TYPE = new FieldType(); static { - TYPE.setDimensions(3, RamUsageEstimator.NUM_BYTES_INT); + TYPE.setDimensions(3, Integer.BYTES); TYPE.freeze(); } @@ -61,8 +61,8 @@ public final class Geo3DPoint extends Field { private void fillFieldsData(double planetMax, double x, double y, double z) { byte[] bytes = new byte[12]; NumericUtils.intToBytes(Geo3DUtil.encodeValue(planetMax, x), bytes, 0); - NumericUtils.intToBytes(Geo3DUtil.encodeValue(planetMax, y), bytes, 1); - NumericUtils.intToBytes(Geo3DUtil.encodeValue(planetMax, z), bytes, 2); + NumericUtils.intToBytes(Geo3DUtil.encodeValue(planetMax, y), bytes, Integer.BYTES); + NumericUtils.intToBytes(Geo3DUtil.encodeValue(planetMax, z), bytes, 2 * Integer.BYTES); fieldsData = new BytesRef(bytes); } } diff --git a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/PointInGeo3DShapeQuery.java b/lucene/spatial3d/src/java/org/apache/lucene/geo3d/PointInGeo3DShapeQuery.java index 84ff01b57e1..b7ef6220d39 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/geo3d/PointInGeo3DShapeQuery.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/geo3d/PointInGeo3DShapeQuery.java @@ -106,8 +106,8 @@ public class PointInGeo3DShapeQuery extends Query { public void visit(int docID, byte[] packedValue) { assert packedValue.length == 12; double x = Geo3DUtil.decodeValueCenter(planetMax, NumericUtils.bytesToInt(packedValue, 0)); - double y = Geo3DUtil.decodeValueCenter(planetMax, NumericUtils.bytesToInt(packedValue, 1)); - double z = Geo3DUtil.decodeValueCenter(planetMax, NumericUtils.bytesToInt(packedValue, 2)); + double y = Geo3DUtil.decodeValueCenter(planetMax, NumericUtils.bytesToInt(packedValue, 1 * Integer.BYTES)); + double z = Geo3DUtil.decodeValueCenter(planetMax, NumericUtils.bytesToInt(packedValue, 2 * Integer.BYTES)); if (shape.isWithin(x, y, z)) { result.add(docID); hitCount[0]++; @@ -122,10 +122,10 @@ public class PointInGeo3DShapeQuery extends Query { // a Math.round from double to long, so e.g. 1.4 -> 1, and -1.4 -> -1: double xMin = Geo3DUtil.decodeValueMin(planetMax, NumericUtils.bytesToInt(minPackedValue, 0)); double xMax = Geo3DUtil.decodeValueMax(planetMax, NumericUtils.bytesToInt(maxPackedValue, 0)); - double yMin = Geo3DUtil.decodeValueMin(planetMax, NumericUtils.bytesToInt(minPackedValue, 1)); - double yMax = Geo3DUtil.decodeValueMax(planetMax, NumericUtils.bytesToInt(maxPackedValue, 1)); - double zMin = Geo3DUtil.decodeValueMin(planetMax, NumericUtils.bytesToInt(minPackedValue, 2)); - double zMax = Geo3DUtil.decodeValueMax(planetMax, NumericUtils.bytesToInt(maxPackedValue, 2)); + double yMin = Geo3DUtil.decodeValueMin(planetMax, NumericUtils.bytesToInt(minPackedValue, 1 * Integer.BYTES)); + double yMax = Geo3DUtil.decodeValueMax(planetMax, NumericUtils.bytesToInt(maxPackedValue, 1 * Integer.BYTES)); + double zMin = Geo3DUtil.decodeValueMin(planetMax, NumericUtils.bytesToInt(minPackedValue, 2 * Integer.BYTES)); + double zMax = Geo3DUtil.decodeValueMax(planetMax, NumericUtils.bytesToInt(maxPackedValue, 2 * Integer.BYTES)); //System.out.println(" compare: x=" + cellXMin + "-" + cellXMax + " y=" + cellYMin + "-" + cellYMax + " z=" + cellZMin + "-" + cellZMax); assert xMin <= xMax; diff --git a/lucene/suggest/src/java/org/apache/lucene/search/suggest/tst/TernaryTreeNode.java b/lucene/suggest/src/java/org/apache/lucene/search/suggest/tst/TernaryTreeNode.java index 26416b14919..34292fef351 100644 --- a/lucene/suggest/src/java/org/apache/lucene/search/suggest/tst/TernaryTreeNode.java +++ b/lucene/suggest/src/java/org/apache/lucene/search/suggest/tst/TernaryTreeNode.java @@ -56,7 +56,7 @@ public class TernaryTreeNode { mem += hiKid.sizeInBytes(); } if (token != null) { - mem += RamUsageEstimator.shallowSizeOf(token) + RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + RamUsageEstimator.NUM_BYTES_CHAR * token.length(); + mem += RamUsageEstimator.shallowSizeOf(token) + RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + Character.BYTES * token.length(); } mem += RamUsageEstimator.shallowSizeOf(val); return mem;