LUCENE-9387: Remove CodecReader#ramBytesUsed. (#79)

This commit removes `ramBytesUsed()` from `CodecReader` and all file formats
besides vectors, which is the only remaining file format that might use lots of
memory in the default codec. I left `ramBytesUsed()` on the `completion` format
too, which is another feature that could use lots of memory.

Other components that relied on being able to compute memory usage of readers
like facets' TaxonomyReader and the analyzing suggester assume that readers have
a RAM usage of 0 now.
This commit is contained in:
Adrien Grand 2021-04-14 14:37:54 +02:00 committed by GitHub
parent fbbdc62913
commit 79f14b1742
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
91 changed files with 65 additions and 1320 deletions

View File

@ -101,6 +101,8 @@ API Changes
* LUCENE-9853: JapaneseAnalyzer should use CJKWidthCharFilter for full-width and half-width character normalization.
(Tomoko Uchida)
* LUCENE-9387: Removed CodecReader#ramBytesUsed. (Adrien Grand)
Improvements
* LUCENE-9687: Hunspell support improvements: add API for spell-checking and suggestions, support compound words,

View File

@ -376,3 +376,8 @@ better to use the ordinal alone (integer-based datastructures) for per-document
call lookupOrd() a few times at the end (e.g. for the hits you want to display). Otherwise, if you
really don't want per-document ordinals, but instead a per-document `byte[]`, use a BinaryDocValues
field.
## Removed CodecReader.ramBytesUsed() (LUCENE-9387)
Lucene index readers are now using so little memory with the default codec that
it was decided to remove the ability to estimate their RAM usage.

View File

@ -17,18 +17,13 @@
package org.apache.lucene.backward_codecs.lucene40.blocktree;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.store.ByteArrayDataInput;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Accountables;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.automaton.CompiledAutomaton;
import org.apache.lucene.util.fst.ByteSequenceOutputs;
import org.apache.lucene.util.fst.FST;
@ -39,14 +34,10 @@ import org.apache.lucene.util.fst.OffHeapFSTStore;
*
* @lucene.internal
*/
public final class FieldReader extends Terms implements Accountable {
public final class FieldReader extends Terms {
// private final boolean DEBUG = BlockTreeTermsWriter.DEBUG;
private static final long BASE_RAM_BYTES_USED =
RamUsageEstimator.shallowSizeOfInstance(FieldReader.class)
+ 3 * RamUsageEstimator.shallowSizeOfInstance(BytesRef.class);
final long numTerms;
final FieldInfo fieldInfo;
final long sumTotalTermFreq;
@ -200,20 +191,6 @@ public final class FieldReader extends Terms implements Accountable {
this, compiled.automaton, compiled.runAutomaton, compiled.commonSuffixRef, startTerm);
}
@Override
public long ramBytesUsed() {
return BASE_RAM_BYTES_USED + ((index != null) ? index.ramBytesUsed() : 0);
}
@Override
public Collection<Accountable> getChildResources() {
if (index == null) {
return Collections.emptyList();
} else {
return Collections.singleton(Accountables.namedAccountable("term index", index));
}
}
@Override
public String toString() {
return "BlockTreeTerms(seg="

View File

@ -18,7 +18,6 @@ package org.apache.lucene.backward_codecs.lucene40.blocktree;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
@ -35,8 +34,6 @@ import org.apache.lucene.index.SegmentReadState;
import org.apache.lucene.index.Terms;
import org.apache.lucene.store.ChecksumIndexInput;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Accountables;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.fst.ByteSequenceOutputs;
@ -373,23 +370,6 @@ public final class Lucene40BlockTreeTermsReader extends FieldsProducer {
}
}
@Override
public long ramBytesUsed() {
long sizeInBytes = postingsReader.ramBytesUsed();
for (FieldReader reader : fieldMap.values()) {
sizeInBytes += reader.ramBytesUsed();
}
return sizeInBytes;
}
@Override
public Collection<Accountable> getChildResources() {
List<Accountable> resources =
new ArrayList<>(Accountables.namedAccountables("field", fieldMap));
resources.add(Accountables.namedAccountable("delegate", postingsReader));
return Collections.unmodifiableList(resources);
}
@Override
public void checkIntegrity() throws IOException {
// terms index

View File

@ -46,7 +46,6 @@ import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.RamUsageEstimator;
/**
* Concrete class that reads docId(maybe frq,pos,offset,payloads) list with postings format.
@ -55,9 +54,6 @@ import org.apache.lucene.util.RamUsageEstimator;
*/
public final class Lucene50PostingsReader extends PostingsReaderBase {
private static final long BASE_RAM_BYTES_USED =
RamUsageEstimator.shallowSizeOfInstance(Lucene50PostingsReader.class);
private final IndexInput docIn;
private final IndexInput posIn;
private final IndexInput payIn;
@ -1844,11 +1840,6 @@ public final class Lucene50PostingsReader extends PostingsReaderBase {
}
}
@Override
public long ramBytesUsed() {
return BASE_RAM_BYTES_USED;
}
@Override
public void checkIntegrity() throws IOException {
if (docIn != null) {

View File

@ -18,9 +18,8 @@ package org.apache.lucene.backward_codecs.lucene50.compressing;
import java.io.Closeable;
import java.io.IOException;
import org.apache.lucene.util.Accountable;
abstract class FieldsIndex implements Accountable, Cloneable, Closeable {
abstract class FieldsIndex implements Cloneable, Closeable {
/** Get the start pointer for the block that contains the given docID. */
abstract long getStartPointer(int docID);

View File

@ -25,7 +25,6 @@ import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.RandomAccessInput;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.packed.DirectMonotonicReader;
final class FieldsIndexReader extends FieldsIndex {
@ -33,9 +32,6 @@ final class FieldsIndexReader extends FieldsIndex {
static final int VERSION_START = 0;
static final int VERSION_CURRENT = 0;
private static final long BASE_RAM_BYTES_USED =
RamUsageEstimator.shallowSizeOfInstance(FieldsIndexReader.class);
private final int maxDoc;
private final int blockShift;
private final int numChunks;
@ -111,15 +107,6 @@ final class FieldsIndexReader extends FieldsIndex {
startPointers = DirectMonotonicReader.getInstance(startPointersMeta, startPointersSlice);
}
@Override
public long ramBytesUsed() {
return BASE_RAM_BYTES_USED
+ docsMeta.ramBytesUsed()
+ startPointersMeta.ramBytesUsed()
+ docs.ramBytesUsed()
+ startPointers.ramBytesUsed();
}
@Override
public void close() throws IOException {
indexInput.close();

View File

@ -19,24 +19,14 @@ package org.apache.lucene.backward_codecs.lucene50.compressing;
import static org.apache.lucene.util.BitUtil.zigZagDecode;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.SegmentInfo;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Accountables;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.packed.PackedInts;
final class LegacyFieldsIndexReader extends FieldsIndex {
private static final long BASE_RAM_BYTES_USED =
RamUsageEstimator.shallowSizeOfInstance(LegacyFieldsIndexReader.class);
final int maxDoc;
final int[] docBases;
final long[] startPointers;
@ -177,46 +167,6 @@ final class LegacyFieldsIndexReader extends FieldsIndex {
return this;
}
@Override
public long ramBytesUsed() {
long res = BASE_RAM_BYTES_USED;
res += RamUsageEstimator.shallowSizeOf(docBasesDeltas);
for (PackedInts.Reader r : docBasesDeltas) {
res += r.ramBytesUsed();
}
res += RamUsageEstimator.shallowSizeOf(startPointersDeltas);
for (PackedInts.Reader r : startPointersDeltas) {
res += r.ramBytesUsed();
}
res += RamUsageEstimator.sizeOf(docBases);
res += RamUsageEstimator.sizeOf(startPointers);
res += RamUsageEstimator.sizeOf(avgChunkDocs);
res += RamUsageEstimator.sizeOf(avgChunkSizes);
return res;
}
@Override
public Collection<Accountable> getChildResources() {
List<Accountable> resources = new ArrayList<>();
long docBaseDeltaBytes = RamUsageEstimator.shallowSizeOf(docBasesDeltas);
for (PackedInts.Reader r : docBasesDeltas) {
docBaseDeltaBytes += r.ramBytesUsed();
}
resources.add(Accountables.namedAccountable("doc base deltas", docBaseDeltaBytes));
long startPointerDeltaBytes = RamUsageEstimator.shallowSizeOf(startPointersDeltas);
for (PackedInts.Reader r : startPointersDeltas) {
startPointerDeltaBytes += r.ramBytesUsed();
}
resources.add(Accountables.namedAccountable("start pointer deltas", startPointerDeltaBytes));
return Collections.unmodifiableList(resources);
}
@Override
public String toString() {
return getClass().getSimpleName() + "(blocks=" + docBases.length + ")";

View File

@ -19,8 +19,6 @@ package org.apache.lucene.backward_codecs.lucene50.compressing;
import java.io.EOFException;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.codecs.StoredFieldsReader;
import org.apache.lucene.codecs.compressing.CompressionMode;
@ -39,8 +37,6 @@ import org.apache.lucene.store.DataInput;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Accountables;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BitUtil;
import org.apache.lucene.util.BytesRef;
@ -791,16 +787,6 @@ public final class Lucene50CompressingStoredFieldsReader extends StoredFieldsRea
return packedIntsVersion;
}
@Override
public long ramBytesUsed() {
return indexReader.ramBytesUsed();
}
@Override
public Collection<Accountable> getChildResources() {
return Collections.singleton(Accountables.namedAccountable("stored field index", indexReader));
}
@Override
public void checkIntegrity() throws IOException {
indexReader.checkIntegrity();

View File

@ -17,8 +17,6 @@
package org.apache.lucene.backward_codecs.lucene50.compressing;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.NoSuchElementException;
import org.apache.lucene.codecs.CodecUtil;
@ -43,8 +41,6 @@ import org.apache.lucene.store.ChecksumIndexInput;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Accountables;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
@ -1337,16 +1333,6 @@ public final class Lucene50CompressingTermVectorsReader extends TermVectorsReade
return sum;
}
@Override
public long ramBytesUsed() {
return indexReader.ramBytesUsed();
}
@Override
public Collection<Accountable> getChildResources() {
return Collections.singleton(Accountables.namedAccountable("term vector index", indexReader));
}
@Override
public void checkIntegrity() throws IOException {
indexReader.checkIntegrity();

View File

@ -130,11 +130,6 @@ public class Lucene60PointsReader extends PointsReader {
return readers.get(fieldInfo.number);
}
@Override
public long ramBytesUsed() {
return 0L;
}
@Override
public void checkIntegrity() throws IOException {
CodecUtil.checksumEntireFile(dataIn);

View File

@ -43,7 +43,6 @@ import org.apache.lucene.store.RandomAccessInput;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.LongValues;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.packed.DirectMonotonicReader;
import org.apache.lucene.util.packed.DirectReader;
@ -54,7 +53,6 @@ final class Lucene70DocValuesProducer extends DocValuesProducer {
private final Map<String, SortedEntry> sorted = new HashMap<>();
private final Map<String, SortedSetEntry> sortedSets = new HashMap<>();
private final Map<String, SortedNumericEntry> sortedNumerics = new HashMap<>();
private long ramBytesUsed;
private final IndexInput data;
private final int maxDoc;
@ -69,7 +67,6 @@ final class Lucene70DocValuesProducer extends DocValuesProducer {
String metaName =
IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension);
this.maxDoc = state.segmentInfo.maxDoc();
ramBytesUsed = RamUsageEstimator.shallowSizeOfInstance(getClass());
int version = -1;
@ -164,7 +161,6 @@ final class Lucene70DocValuesProducer extends DocValuesProducer {
}
if (tableSize >= 0) {
entry.table = new long[tableSize];
ramBytesUsed += RamUsageEstimator.sizeOf(entry.table);
for (int i = 0; i < tableSize; ++i) {
entry.table[i] = meta.readLong();
}
@ -195,7 +191,6 @@ final class Lucene70DocValuesProducer extends DocValuesProducer {
final int blockShift = meta.readVInt();
entry.addressesMeta =
DirectMonotonicReader.loadMeta(meta, entry.numDocsWithField + 1L, blockShift);
ramBytesUsed += entry.addressesMeta.ramBytesUsed();
entry.addressesLength = meta.readLong();
}
return entry;
@ -235,7 +230,6 @@ final class Lucene70DocValuesProducer extends DocValuesProducer {
final int blockShift = meta.readVInt();
entry.addressesMeta =
DirectMonotonicReader.loadMeta(meta, entry.numDocsWithField + 1, blockShift);
ramBytesUsed += entry.addressesMeta.ramBytesUsed();
entry.addressesLength = meta.readLong();
readTermDict(meta, entry);
return entry;
@ -273,7 +267,6 @@ final class Lucene70DocValuesProducer extends DocValuesProducer {
final int blockShift = meta.readVInt();
entry.addressesMeta =
DirectMonotonicReader.loadMeta(meta, entry.numDocsWithField + 1, blockShift);
ramBytesUsed += entry.addressesMeta.ramBytesUsed();
entry.addressesLength = meta.readLong();
}
return entry;
@ -356,11 +349,6 @@ final class Lucene70DocValuesProducer extends DocValuesProducer {
long addressesLength;
}
@Override
public long ramBytesUsed() {
return ramBytesUsed;
}
@Override
public NumericDocValues getNumeric(FieldInfo field) throws IOException {
NumericEntry entry = numerics.get(field.name);

View File

@ -433,11 +433,6 @@ final class Lucene70NormsProducer extends NormsProducer implements Cloneable {
data.close();
}
@Override
public long ramBytesUsed() {
return 64L * norms.size(); // good enough
}
@Override
public void checkIntegrity() throws IOException {
CodecUtil.checksumEntireFile(data);

View File

@ -45,7 +45,6 @@ import org.apache.lucene.store.RandomAccessInput;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.LongValues;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.compress.LZ4;
import org.apache.lucene.util.packed.DirectMonotonicReader;
import org.apache.lucene.util.packed.DirectReader;
@ -57,7 +56,6 @@ final class Lucene80DocValuesProducer extends DocValuesProducer {
private final Map<String, SortedEntry> sorted = new HashMap<>();
private final Map<String, SortedSetEntry> sortedSets = new HashMap<>();
private final Map<String, SortedNumericEntry> sortedNumerics = new HashMap<>();
private long ramBytesUsed;
private final IndexInput data;
private final int maxDoc;
private int version = -1;
@ -73,7 +71,6 @@ final class Lucene80DocValuesProducer extends DocValuesProducer {
String metaName =
IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension);
this.maxDoc = state.segmentInfo.maxDoc();
ramBytesUsed = RamUsageEstimator.shallowSizeOfInstance(getClass());
// read in the entries from the metadata file.
try (ChecksumIndexInput in = state.directory.openChecksumInput(metaName, state.context)) {
@ -189,7 +186,6 @@ final class Lucene80DocValuesProducer extends DocValuesProducer {
}
if (tableSize >= 0) {
entry.table = new long[tableSize];
ramBytesUsed += RamUsageEstimator.sizeOf(entry.table);
for (int i = 0; i < tableSize; ++i) {
entry.table[i] = meta.readLong();
}
@ -234,7 +230,6 @@ final class Lucene80DocValuesProducer extends DocValuesProducer {
final int blockShift = meta.readVInt();
entry.addressesMeta = DirectMonotonicReader.loadMeta(meta, numAddresses, blockShift);
ramBytesUsed += entry.addressesMeta.ramBytesUsed();
entry.addressesLength = meta.readLong();
}
return entry;
@ -278,7 +273,6 @@ final class Lucene80DocValuesProducer extends DocValuesProducer {
final int blockShift = meta.readVInt();
entry.addressesMeta =
DirectMonotonicReader.loadMeta(meta, entry.numDocsWithField + 1, blockShift);
ramBytesUsed += entry.addressesMeta.ramBytesUsed();
entry.addressesLength = meta.readLong();
readTermDict(meta, entry);
return entry;
@ -327,7 +321,6 @@ final class Lucene80DocValuesProducer extends DocValuesProducer {
final int blockShift = meta.readVInt();
entry.addressesMeta =
DirectMonotonicReader.loadMeta(meta, entry.numDocsWithField + 1, blockShift);
ramBytesUsed += entry.addressesMeta.ramBytesUsed();
entry.addressesLength = meta.readLong();
}
return entry;
@ -426,11 +419,6 @@ final class Lucene80DocValuesProducer extends DocValuesProducer {
long addressesLength;
}
@Override
public long ramBytesUsed() {
return ramBytesUsed;
}
@Override
public NumericDocValues getNumeric(FieldInfo field) throws IOException {
NumericEntry entry = numerics.get(field.name);

View File

@ -475,11 +475,6 @@ final class Lucene80NormsProducer extends NormsProducer implements Cloneable {
data.close();
}
@Override
public long ramBytesUsed() {
return 64L * norms.size(); // good enough
}
@Override
public void checkIntegrity() throws IOException {
CodecUtil.checksumEntireFile(data);

View File

@ -46,7 +46,6 @@ import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BitUtil;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.RamUsageEstimator;
/**
* Concrete class that reads docId(maybe frq,pos,offset,payloads) list with postings format.
@ -55,9 +54,6 @@ import org.apache.lucene.util.RamUsageEstimator;
*/
public final class Lucene84PostingsReader extends PostingsReaderBase {
private static final long BASE_RAM_BYTES_USED =
RamUsageEstimator.shallowSizeOfInstance(Lucene84PostingsReader.class);
private final IndexInput docIn;
private final IndexInput posIn;
private final IndexInput payIn;
@ -2070,11 +2066,6 @@ public final class Lucene84PostingsReader extends PostingsReaderBase {
}
}
@Override
public long ramBytesUsed() {
return BASE_RAM_BYTES_USED;
}
@Override
public void checkIntegrity() throws IOException {
if (docIn != null) {

View File

@ -138,11 +138,6 @@ public class Lucene86PointsReader extends PointsReader {
return readers.get(fieldInfo.number);
}
@Override
public long ramBytesUsed() {
return 0L;
}
@Override
public void checkIntegrity() throws IOException {
CodecUtil.checksumEntireFile(indexIn);

View File

@ -17,11 +17,8 @@
package org.apache.lucene.codecs.blockterms;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.TreeMap;
import org.apache.lucene.codecs.BlockTermState;
import org.apache.lucene.codecs.CodecUtil;
@ -41,7 +38,6 @@ import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.store.ByteArrayDataInput;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Accountables;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
@ -58,8 +54,6 @@ import org.apache.lucene.util.RamUsageEstimator;
* @lucene.experimental
*/
public class BlockTermsReader extends FieldsProducer {
private static final long BASE_RAM_BYTES_USED =
RamUsageEstimator.shallowSizeOfInstance(BlockTermsReader.class);
// Open input to the main terms dict file (_X.tis)
private final IndexInput in;
@ -871,30 +865,6 @@ public class BlockTermsReader extends FieldsProducer {
}
}
@Override
public long ramBytesUsed() {
long ramBytesUsed = BASE_RAM_BYTES_USED;
ramBytesUsed += (postingsReader != null) ? postingsReader.ramBytesUsed() : 0;
ramBytesUsed += (indexReader != null) ? indexReader.ramBytesUsed() : 0;
ramBytesUsed += fields.size() * 2L * RamUsageEstimator.NUM_BYTES_OBJECT_REF;
for (FieldReader reader : fields.values()) {
ramBytesUsed += reader.ramBytesUsed();
}
return ramBytesUsed;
}
@Override
public Collection<Accountable> getChildResources() {
List<Accountable> resources = new ArrayList<>();
if (indexReader != null) {
resources.add(Accountables.namedAccountable("term index", indexReader));
}
if (postingsReader != null) {
resources.add(Accountables.namedAccountable("delegate", postingsReader));
}
return Collections.unmodifiableList(resources);
}
@Override
public String toString() {
return getClass().getSimpleName()

View File

@ -17,11 +17,8 @@
package org.apache.lucene.codecs.blocktreeords;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.TreeMap;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.codecs.FieldsProducer;
@ -34,8 +31,6 @@ import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.SegmentReadState;
import org.apache.lucene.index.Terms;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Accountables;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
@ -253,22 +248,6 @@ public final class OrdsBlockTreeTermsReader extends FieldsProducer {
}
}
@Override
public long ramBytesUsed() {
long sizeInBytes = postingsReader.ramBytesUsed();
for (OrdsFieldReader reader : fields.values()) {
sizeInBytes += reader.ramBytesUsed();
}
return sizeInBytes;
}
@Override
public Collection<Accountable> getChildResources() {
List<Accountable> resources = new ArrayList<>(Accountables.namedAccountables("field", fields));
resources.add(Accountables.namedAccountable("delegate", postingsReader));
return Collections.unmodifiableList(resources);
}
@Override
public void checkIntegrity() throws IOException {
// term dictionary

View File

@ -17,8 +17,6 @@
package org.apache.lucene.codecs.blocktreeords;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import org.apache.lucene.codecs.blocktreeords.FSTOrdsOutputs.Output;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.IndexOptions;
@ -26,14 +24,12 @@ import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.store.ByteArrayDataInput;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Accountables;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.automaton.CompiledAutomaton;
import org.apache.lucene.util.fst.FST;
/** BlockTree's implementation of {@link Terms}. */
final class OrdsFieldReader extends Terms implements Accountable {
final class OrdsFieldReader extends Terms {
final long numTerms;
final FieldInfo fieldInfo;
final long sumTotalTermFreq;
@ -180,20 +176,6 @@ final class OrdsFieldReader extends Terms implements Accountable {
return new OrdsIntersectTermsEnum(this, compiled, startTerm);
}
@Override
public long ramBytesUsed() {
return ((index != null) ? index.ramBytesUsed() : 0);
}
@Override
public Collection<Accountable> getChildResources() {
if (index == null) {
return Collections.emptyList();
} else {
return Collections.singleton(Accountables.namedAccountable("term index", index));
}
}
@Override
public String toString() {
return "OrdsBlockTreeTerms(terms="

View File

@ -18,8 +18,6 @@ package org.apache.lucene.codecs.bloom;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
@ -44,8 +42,6 @@ import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.store.ChecksumIndexInput;
import org.apache.lucene.store.DataOutput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Accountables;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.automaton.CompiledAutomaton;
@ -372,27 +368,6 @@ public final class BloomFilteringPostingsFormat extends PostingsFormat {
}
}
@Override
public long ramBytesUsed() {
long sizeInBytes =
((delegateFieldsProducer != null) ? delegateFieldsProducer.ramBytesUsed() : 0);
for (Map.Entry<String, FuzzySet> entry : bloomsByFieldName.entrySet()) {
sizeInBytes += entry.getKey().length() * Character.BYTES;
sizeInBytes += entry.getValue().ramBytesUsed();
}
return sizeInBytes;
}
@Override
public Collection<Accountable> getChildResources() {
List<Accountable> resources =
new ArrayList<>(Accountables.namedAccountables("field", bloomsByFieldName));
if (delegateFieldsProducer != null) {
resources.add(Accountables.namedAccountable("delegate", delegateFieldsProducer));
}
return Collections.unmodifiableList(resources);
}
@Override
public void checkIntegrity() throws IOException {
delegateFieldsProducer.checkIntegrity();

View File

@ -17,7 +17,6 @@
package org.apache.lucene.codecs.memory;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.Map;
@ -42,7 +41,6 @@ import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.store.ByteBuffersDataOutput;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Accountables;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.RamUsageEstimator;
@ -148,21 +146,6 @@ public final class DirectPostingsFormat extends PostingsFormat {
@Override
public void close() {}
@Override
public long ramBytesUsed() {
long sizeInBytes = 0;
for (Map.Entry<String, DirectField> entry : fields.entrySet()) {
sizeInBytes += entry.getKey().length() * Character.BYTES;
sizeInBytes += entry.getValue().ramBytesUsed();
}
return sizeInBytes;
}
@Override
public Collection<Accountable> getChildResources() {
return Accountables.namedAccountables("field", fields);
}
@Override
public void checkIntegrity() throws IOException {
// if we read entirely into ram, we already validated.

View File

@ -19,10 +19,8 @@ package org.apache.lucene.codecs.memory;
import java.io.IOException;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.TreeMap;
import org.apache.lucene.codecs.BlockTermState;
import org.apache.lucene.codecs.CodecUtil;
@ -42,8 +40,6 @@ import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.store.ByteArrayDataInput;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Accountables;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
@ -173,10 +169,7 @@ public class FSTTermsReader extends FieldsProducer {
}
}
private static final long BASE_RAM_BYTES_USED =
RamUsageEstimator.shallowSizeOfInstance(TermsReader.class);
final class TermsReader extends Terms implements Accountable {
final class TermsReader extends Terms {
final FieldInfo fieldInfo;
final long numTerms;
@ -201,24 +194,6 @@ public class FSTTermsReader extends FieldsProducer {
this.dict = new FST<>(in, in, new FSTTermOutputs(fieldInfo));
}
@Override
public long ramBytesUsed() {
long bytesUsed = BASE_RAM_BYTES_USED;
if (dict != null) {
bytesUsed += dict.ramBytesUsed();
}
return bytesUsed;
}
@Override
public Collection<Accountable> getChildResources() {
if (dict == null) {
return Collections.emptyList();
} else {
return Collections.singletonList(Accountables.namedAccountable("terms", dict));
}
}
@Override
public String toString() {
return "FSTTerms(terms="
@ -796,22 +771,6 @@ public class FSTTermsReader extends FieldsProducer {
}
}
@Override
public long ramBytesUsed() {
long ramBytesUsed = postingsReader.ramBytesUsed();
for (TermsReader r : fields.values()) {
ramBytesUsed += r.ramBytesUsed();
}
return ramBytesUsed;
}
@Override
public Collection<Accountable> getChildResources() {
List<Accountable> resources = new ArrayList<>(Accountables.namedAccountables("field", fields));
resources.add(Accountables.namedAccountable("delegate", postingsReader));
return Collections.unmodifiableCollection(resources);
}
@Override
public String toString() {
return getClass().getSimpleName()

View File

@ -38,25 +38,27 @@ import java.util.Locale;
import java.util.Map;
import java.util.function.IntFunction;
import org.apache.lucene.codecs.DocValuesProducer;
import org.apache.lucene.index.*;
import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.SegmentReadState;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.store.BufferedChecksumIndexInput;
import org.apache.lucene.store.ChecksumIndexInput;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.StringHelper;
class SimpleTextDocValuesReader extends DocValuesProducer {
private static final long BASE_RAM_BYTES_USED =
RamUsageEstimator.shallowSizeOfInstance(SimpleTextDocValuesReader.class)
+ RamUsageEstimator.shallowSizeOfInstance(BytesRef.class);
static class OneField {
private static final long BASE_RAM_BYTES_USED =
RamUsageEstimator.shallowSizeOfInstance(OneField.class);
long dataStartFilePointer;
String pattern;
String ordPattern;
@ -784,14 +786,6 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
scratch.bytes(), prefix.length, scratch.length() - prefix.length, StandardCharsets.UTF_8);
}
@Override
public long ramBytesUsed() {
return BASE_RAM_BYTES_USED
+ RamUsageEstimator.sizeOf(scratch.bytes())
+ fields.size()
* (RamUsageEstimator.NUM_BYTES_OBJECT_REF * 2L + OneField.BASE_RAM_BYTES_USED);
}
@Override
public String toString() {
return getClass().getSimpleName() + "(fields=" + fields.size() + ")";

View File

@ -69,10 +69,6 @@ import org.apache.lucene.util.fst.Util;
class SimpleTextFieldsReader extends FieldsProducer {
private static final long BASE_RAM_BYTES_USED =
RamUsageEstimator.shallowSizeOfInstance(SimpleTextFieldsReader.class)
+ RamUsageEstimator.shallowSizeOfInstance(TreeMap.class);
private final TreeMap<String, Long> fields;
private final IndexInput in;
private final FieldInfos fieldInfos;
@ -744,21 +740,6 @@ class SimpleTextFieldsReader extends FieldsProducer {
in.close();
}
@Override
public synchronized long ramBytesUsed() {
long sizeInBytes =
BASE_RAM_BYTES_USED + fields.size() * 2 * RamUsageEstimator.NUM_BYTES_OBJECT_REF;
for (SimpleTextTerms simpleTextTerms : termsCache.values()) {
sizeInBytes += (simpleTextTerms != null) ? simpleTextTerms.ramBytesUsed() : 0;
}
return sizeInBytes;
}
@Override
public synchronized Collection<Accountable> getChildResources() {
return Accountables.namedAccountables("field", termsCache);
}
@Override
public String toString() {
return getClass().getSimpleName() + "(fields=" + fields.size() + ")";

View File

@ -17,7 +17,6 @@
package org.apache.lucene.codecs.simpletext;
import java.io.IOException;
import java.util.Collection;
import org.apache.lucene.codecs.NormsConsumer;
import org.apache.lucene.codecs.NormsFormat;
import org.apache.lucene.codecs.NormsProducer;
@ -26,7 +25,6 @@ import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.SegmentReadState;
import org.apache.lucene.index.SegmentWriteState;
import org.apache.lucene.util.Accountable;
/**
* plain-text norms format.
@ -74,16 +72,6 @@ public class SimpleTextNormsFormat extends NormsFormat {
impl.close();
}
@Override
public long ramBytesUsed() {
return impl.ramBytesUsed();
}
@Override
public Collection<Accountable> getChildResources() {
return impl.getChildResources();
}
@Override
public void checkIntegrity() throws IOException {
impl.checkIntegrity();

View File

@ -248,11 +248,6 @@ class SimpleTextPointsReader extends PointsReader {
}
}
@Override
public long ramBytesUsed() {
return 0L;
}
@Override
public void close() throws IOException {
dataIn.close();

View File

@ -16,7 +16,18 @@
*/
package org.apache.lucene.codecs.simpletext;
import static org.apache.lucene.codecs.simpletext.SimpleTextStoredFieldsWriter.*;
import static org.apache.lucene.codecs.simpletext.SimpleTextStoredFieldsWriter.DOC;
import static org.apache.lucene.codecs.simpletext.SimpleTextStoredFieldsWriter.END;
import static org.apache.lucene.codecs.simpletext.SimpleTextStoredFieldsWriter.FIELD;
import static org.apache.lucene.codecs.simpletext.SimpleTextStoredFieldsWriter.NAME;
import static org.apache.lucene.codecs.simpletext.SimpleTextStoredFieldsWriter.TYPE;
import static org.apache.lucene.codecs.simpletext.SimpleTextStoredFieldsWriter.TYPE_BINARY;
import static org.apache.lucene.codecs.simpletext.SimpleTextStoredFieldsWriter.TYPE_DOUBLE;
import static org.apache.lucene.codecs.simpletext.SimpleTextStoredFieldsWriter.TYPE_FLOAT;
import static org.apache.lucene.codecs.simpletext.SimpleTextStoredFieldsWriter.TYPE_INT;
import static org.apache.lucene.codecs.simpletext.SimpleTextStoredFieldsWriter.TYPE_LONG;
import static org.apache.lucene.codecs.simpletext.SimpleTextStoredFieldsWriter.TYPE_STRING;
import static org.apache.lucene.codecs.simpletext.SimpleTextStoredFieldsWriter.VALUE;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
@ -36,10 +47,8 @@ import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.CharsRef;
import org.apache.lucene.util.CharsRefBuilder;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.StringHelper;
/**
@ -51,11 +60,6 @@ import org.apache.lucene.util.StringHelper;
*/
public class SimpleTextStoredFieldsReader extends StoredFieldsReader {
private static final long BASE_RAM_BYTES_USED =
RamUsageEstimator.shallowSizeOfInstance(SimpleTextStoredFieldsReader.class)
+ RamUsageEstimator.shallowSizeOfInstance(BytesRef.class)
+ RamUsageEstimator.shallowSizeOfInstance(CharsRef.class);
private long offsets[]; /* docid -> offset in .fld file */
private IndexInput in;
private BytesRefBuilder scratch = new BytesRefBuilder();
@ -223,14 +227,6 @@ public class SimpleTextStoredFieldsReader extends StoredFieldsReader {
b.offset + b.length);
}
@Override
public long ramBytesUsed() {
return BASE_RAM_BYTES_USED
+ RamUsageEstimator.sizeOf(offsets)
+ RamUsageEstimator.sizeOf(scratch.bytes())
+ RamUsageEstimator.sizeOf(scratchUTF16.chars());
}
@Override
public String toString() {
return getClass().getSimpleName();

View File

@ -43,10 +43,8 @@ import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.CharsRef;
import org.apache.lucene.util.CharsRefBuilder;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.StringHelper;
/**
@ -58,11 +56,6 @@ import org.apache.lucene.util.StringHelper;
*/
public class SimpleTextTermVectorsReader extends TermVectorsReader {
private static final long BASE_RAM_BYTES_USED =
RamUsageEstimator.shallowSizeOfInstance(SimpleTextTermVectorsReader.class)
+ RamUsageEstimator.shallowSizeOfInstance(BytesRef.class)
+ RamUsageEstimator.shallowSizeOfInstance(CharsRef.class);
private long offsets[]; /* docid -> offset in .vec file */
private IndexInput in;
private BytesRefBuilder scratch = new BytesRefBuilder();
@ -583,11 +576,6 @@ public class SimpleTextTermVectorsReader extends TermVectorsReader {
}
}
@Override
public long ramBytesUsed() {
return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(offsets);
}
@Override
public String toString() {
return getClass().getSimpleName();

View File

@ -25,7 +25,6 @@ import org.apache.lucene.store.DataOutput;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IntsRefBuilder;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.fst.BytesRefFSTEnum;
import org.apache.lucene.util.fst.FST;
import org.apache.lucene.util.fst.FSTCompiler;
@ -51,20 +50,12 @@ import org.apache.lucene.util.fst.Util;
*/
public class FSTDictionary implements IndexDictionary {
private static final long BASE_RAM_USAGE =
RamUsageEstimator.shallowSizeOfInstance(FSTDictionary.class);
protected final FST<Long> fst;
protected FSTDictionary(FST<Long> fst) {
this.fst = fst;
}
@Override
public long ramBytesUsed() {
return BASE_RAM_USAGE + fst.ramBytesUsed();
}
@Override
public void write(DataOutput output, BlockEncoder blockEncoder) throws IOException {
if (blockEncoder == null) {
@ -168,11 +159,6 @@ public class FSTDictionary implements IndexDictionary {
}
return dictionary.browser();
}
@Override
public long ramBytesUsed() {
return dictionary == null ? 0 : dictionary.ramBytesUsed();
}
}
/**

View File

@ -26,10 +26,8 @@ import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.store.DataInput;
import org.apache.lucene.store.DataOutput;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.RamUsageEstimator;
/**
* Metadata and stats for one field in the index.
@ -38,10 +36,7 @@ import org.apache.lucene.util.RamUsageEstimator;
*
* @lucene.experimental
*/
public class FieldMetadata implements Accountable {
private static final long BASE_RAM_USAGE =
RamUsageEstimator.shallowSizeOfInstance(FieldMetadata.class);
public class FieldMetadata {
protected final FieldInfo fieldInfo;
protected final boolean isMutable;
@ -186,11 +181,6 @@ public class FieldMetadata implements Accountable {
return lastTerm;
}
@Override
public long ramBytesUsed() {
return BASE_RAM_USAGE + (docsSeen == null ? 0 : docsSeen.ramBytesUsed());
}
/** Reads/writes field metadata. */
public static class Serializer {

View File

@ -19,7 +19,6 @@ package org.apache.lucene.codecs.uniformsplit;
import java.io.IOException;
import org.apache.lucene.store.DataOutput;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOSupplier;
@ -39,7 +38,7 @@ import org.apache.lucene.util.IOSupplier;
*
* @lucene.experimental
*/
public interface IndexDictionary extends Accountable {
public interface IndexDictionary {
/**
* Writes this dictionary to the provided output.
@ -103,5 +102,5 @@ public interface IndexDictionary extends Accountable {
* org.apache.lucene.index.TermsEnum#seekExact} are called (it is not loaded for a direct
* all-terms enumeration).
*/
interface BrowserSupplier extends IOSupplier<Browser>, Accountable {}
interface BrowserSupplier extends IOSupplier<Browser> {}
}

View File

@ -23,9 +23,7 @@ import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.automaton.CompiledAutomaton;
/**
@ -38,10 +36,7 @@ import org.apache.lucene.util.automaton.CompiledAutomaton;
* @see UniformSplitTermsWriter
* @lucene.experimental
*/
public class UniformSplitTerms extends Terms implements Accountable {
private static final long BASE_RAM_USAGE =
RamUsageEstimator.shallowSizeOfInstance(UniformSplitTerms.class);
public class UniformSplitTerms extends Terms {
protected final IndexInput blockInput;
protected final FieldMetadata fieldMetadata;
@ -149,17 +144,4 @@ public class UniformSplitTerms extends Terms implements Accountable {
public boolean hasPayloads() {
return fieldMetadata.getFieldInfo().hasPayloads();
}
@Override
public long ramBytesUsed() {
return ramBytesUsedWithoutDictionary() + getDictionaryRamBytesUsed();
}
public long ramBytesUsedWithoutDictionary() {
return BASE_RAM_USAGE + fieldMetadata.ramBytesUsed();
}
public long getDictionaryRamBytesUsed() {
return dictionaryBrowserSupplier.ramBytesUsed();
}
}

View File

@ -17,7 +17,12 @@
package org.apache.lucene.codecs.uniformsplit;
import static org.apache.lucene.codecs.uniformsplit.UniformSplitPostingsFormat.*;
import static org.apache.lucene.codecs.uniformsplit.UniformSplitPostingsFormat.NAME;
import static org.apache.lucene.codecs.uniformsplit.UniformSplitPostingsFormat.TERMS_BLOCKS_EXTENSION;
import static org.apache.lucene.codecs.uniformsplit.UniformSplitPostingsFormat.TERMS_DICTIONARY_EXTENSION;
import static org.apache.lucene.codecs.uniformsplit.UniformSplitPostingsFormat.VERSION_CURRENT;
import static org.apache.lucene.codecs.uniformsplit.UniformSplitPostingsFormat.VERSION_ENCODABLE_FIELDS_METADATA;
import static org.apache.lucene.codecs.uniformsplit.UniformSplitPostingsFormat.VERSION_START;
import java.io.IOException;
import java.util.ArrayList;
@ -40,7 +45,6 @@ import org.apache.lucene.store.DataInput;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.RamUsageEstimator;
/**
* A block-based terms index and dictionary based on the Uniform Split technique.
@ -50,10 +54,6 @@ import org.apache.lucene.util.RamUsageEstimator;
*/
public class UniformSplitTermsReader extends FieldsProducer {
private static final long BASE_RAM_USAGE =
RamUsageEstimator.shallowSizeOfInstance(UniformSplitTermsReader.class)
+ RamUsageEstimator.shallowSizeOfInstance(IndexInput.class) * 2;
protected final PostingsReaderBase postingsReader;
protected final int version;
protected final IndexInput blockInput;
@ -294,24 +294,6 @@ public class UniformSplitTermsReader extends FieldsProducer {
return fieldToTermsMap.size();
}
@Override
public long ramBytesUsed() {
long ramUsage = BASE_RAM_USAGE;
ramUsage += postingsReader.ramBytesUsed();
ramUsage += RamUsageUtil.ramBytesUsedByHashMapOfSize(fieldToTermsMap.size());
ramUsage += getTermsRamBytesUsed();
ramUsage += RamUsageUtil.ramBytesUsedByUnmodifiableArrayListOfSize(sortedFieldNames.size());
return ramUsage;
}
protected long getTermsRamBytesUsed() {
long ramUsage = 0L;
for (UniformSplitTerms terms : fieldToTermsMap.values()) {
ramUsage += terms.ramBytesUsed();
}
return ramUsage;
}
/** Positions the given {@link IndexInput} at the beginning of the fields metadata. */
protected void seekFieldsMetadata(IndexInput indexInput) throws IOException {
indexInput.seek(indexInput.length() - CodecUtil.footerLength() - 8);

View File

@ -17,7 +17,11 @@
package org.apache.lucene.codecs.uniformsplit.sharedterms;
import static org.apache.lucene.codecs.uniformsplit.sharedterms.STUniformSplitPostingsFormat.*;
import static org.apache.lucene.codecs.uniformsplit.UniformSplitPostingsFormat.VERSION_START;
import static org.apache.lucene.codecs.uniformsplit.sharedterms.STUniformSplitPostingsFormat.NAME;
import static org.apache.lucene.codecs.uniformsplit.sharedterms.STUniformSplitPostingsFormat.TERMS_BLOCKS_EXTENSION;
import static org.apache.lucene.codecs.uniformsplit.sharedterms.STUniformSplitPostingsFormat.TERMS_DICTIONARY_EXTENSION;
import static org.apache.lucene.codecs.uniformsplit.sharedterms.STUniformSplitPostingsFormat.VERSION_CURRENT;
import java.io.IOException;
import java.util.Collection;
@ -25,7 +29,6 @@ import org.apache.lucene.codecs.PostingsReaderBase;
import org.apache.lucene.codecs.uniformsplit.BlockDecoder;
import org.apache.lucene.codecs.uniformsplit.FieldMetadata;
import org.apache.lucene.codecs.uniformsplit.IndexDictionary;
import org.apache.lucene.codecs.uniformsplit.UniformSplitTerms;
import org.apache.lucene.codecs.uniformsplit.UniformSplitTermsReader;
import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.SegmentReadState;
@ -124,18 +127,6 @@ public class STUniformSplitTermsReader extends UniformSplitTermsReader {
}
}
@Override
protected long getTermsRamBytesUsed() {
long termsRamUsage = 0L;
long dictionaryRamUsage = 0L;
for (UniformSplitTerms terms : fieldToTermsMap.values()) {
termsRamUsage += terms.ramBytesUsedWithoutDictionary();
dictionaryRamUsage = terms.getDictionaryRamBytesUsed();
}
termsRamUsage += dictionaryRamUsage;
return termsRamUsage;
}
/**
* Creates a virtual {@link FieldMetadata} that is the union of the given {@link FieldMetadata}s.
* Its {@link FieldMetadata#getFirstBlockStartFP}, {@link FieldMetadata#getLastBlockStartFP} and

View File

@ -185,11 +185,6 @@ public class TestTermBytesComparator extends LuceneTestCase {
@Override
public void close() {}
@Override
public long ramBytesUsed() {
return 0;
}
};
}
}

View File

@ -89,11 +89,6 @@ public class TestSTBlockReader extends LuceneTestCase {
public IndexDictionary.Browser get() throws IOException {
return indexDictionary.browser();
}
@Override
public long ramBytesUsed() {
return indexDictionary.ramBytesUsed();
}
};
}
@ -304,11 +299,6 @@ public class TestSTBlockReader extends LuceneTestCase {
@Override
public void close() {}
@Override
public long ramBytesUsed() {
return 0;
}
};
}

View File

@ -24,14 +24,13 @@ import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.util.Accountable;
/**
* Abstract API that produces numeric, binary, sorted, sortedset, and sortednumeric docvalues.
*
* @lucene.experimental
*/
public abstract class DocValuesProducer implements Closeable, Accountable {
public abstract class DocValuesProducer implements Closeable {
/** Sole constructor. (For invocation by subclass constructors, typically implicit.) */
protected DocValuesProducer() {}

View File

@ -19,14 +19,13 @@ package org.apache.lucene.codecs;
import java.io.Closeable;
import java.io.IOException;
import org.apache.lucene.index.Fields;
import org.apache.lucene.util.Accountable;
/**
* Abstract API that produces terms, doc, freq, prox, offset and payloads postings.
*
* @lucene.experimental
*/
public abstract class FieldsProducer extends Fields implements Closeable, Accountable {
public abstract class FieldsProducer extends Fields implements Closeable {
/** Sole constructor. (For invocation by subclass constructors, typically implicit.) */
protected FieldsProducer() {}

View File

@ -179,11 +179,6 @@ public abstract class NormsConsumer implements Closeable {
@Override
public void close() {}
@Override
public long ramBytesUsed() {
return 0;
}
});
}
}

View File

@ -20,14 +20,13 @@ import java.io.Closeable;
import java.io.IOException;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.util.Accountable;
/**
* Abstract API that produces field normalization values
*
* @lucene.experimental
*/
public abstract class NormsProducer implements Closeable, Accountable {
public abstract class NormsProducer implements Closeable {
/** Sole constructor. (For invocation by subclass constructors, typically implicit.) */
protected NormsProducer() {}

View File

@ -57,11 +57,6 @@ public abstract class PointsFormat {
@Override
public void close() {}
@Override
public long ramBytesUsed() {
return 0L;
}
@Override
public void checkIntegrity() {}

View File

@ -19,14 +19,13 @@ package org.apache.lucene.codecs;
import java.io.Closeable;
import java.io.IOException;
import org.apache.lucene.index.PointValues;
import org.apache.lucene.util.Accountable;
/**
* Abstract API to visit point values.
*
* @lucene.experimental
*/
public abstract class PointsReader implements Closeable, Accountable {
public abstract class PointsReader implements Closeable {
/** Sole constructor. (For invocation by subclass constructors, typically implicit.) */
protected PointsReader() {}

View File

@ -61,11 +61,6 @@ public abstract class PointsWriter implements Closeable {
fieldInfo,
new PointsReader() {
@Override
public long ramBytesUsed() {
return 0;
}
@Override
public void close() throws IOException {}

View File

@ -24,7 +24,6 @@ import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.SegmentReadState;
import org.apache.lucene.store.DataInput;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.Accountable;
/**
* The core terms dictionaries (BlockTermsReader, BlockTreeTermsReader) interact with a single
@ -41,7 +40,7 @@ import org.apache.lucene.util.Accountable;
// TODO: find a better name; this defines the API that the
// terms dict impls use to talk to a postings impl.
// TermsDict + PostingsReader/WriterBase == PostingsConsumer/Producer
public abstract class PostingsReaderBase implements Closeable, Accountable {
public abstract class PostingsReaderBase implements Closeable {
/** Sole constructor. (For invocation by subclass constructors, typically implicit.) */
protected PostingsReaderBase() {}

View File

@ -19,7 +19,6 @@ package org.apache.lucene.codecs;
import java.io.Closeable;
import java.io.IOException;
import org.apache.lucene.index.StoredFieldVisitor;
import org.apache.lucene.util.Accountable;
/**
* Codec API for reading stored fields.
@ -30,7 +29,7 @@ import org.apache.lucene.util.Accountable;
*
* @lucene.experimental
*/
public abstract class StoredFieldsReader implements Cloneable, Closeable, Accountable {
public abstract class StoredFieldsReader implements Cloneable, Closeable {
/** Sole constructor. (For invocation by subclass constructors, typically implicit.) */
protected StoredFieldsReader() {}

View File

@ -20,14 +20,13 @@ import java.io.Closeable;
import java.io.IOException;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; // javadocs
import org.apache.lucene.index.Fields;
import org.apache.lucene.util.Accountable;
/**
* Codec API for reading term vectors:
*
* @lucene.experimental
*/
public abstract class TermVectorsReader implements Cloneable, Closeable, Accountable {
public abstract class TermVectorsReader implements Cloneable, Closeable {
/** Sole constructor. (For invocation by subclass constructors, typically implicit.) */
protected TermVectorsReader() {}

View File

@ -45,7 +45,6 @@ import org.apache.lucene.store.RandomAccessInput;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.LongValues;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.compress.LZ4;
import org.apache.lucene.util.packed.DirectMonotonicReader;
import org.apache.lucene.util.packed.DirectReader;
@ -57,7 +56,6 @@ final class Lucene90DocValuesProducer extends DocValuesProducer {
private final Map<String, SortedEntry> sorted = new HashMap<>();
private final Map<String, SortedSetEntry> sortedSets = new HashMap<>();
private final Map<String, SortedNumericEntry> sortedNumerics = new HashMap<>();
private long ramBytesUsed;
private final IndexInput data;
private final int maxDoc;
private int version = -1;
@ -73,7 +71,6 @@ final class Lucene90DocValuesProducer extends DocValuesProducer {
String metaName =
IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension);
this.maxDoc = state.segmentInfo.maxDoc();
ramBytesUsed = RamUsageEstimator.shallowSizeOfInstance(getClass());
// read in the entries from the metadata file.
try (ChecksumIndexInput in = state.directory.openChecksumInput(metaName, state.context)) {
@ -184,7 +181,6 @@ final class Lucene90DocValuesProducer extends DocValuesProducer {
}
if (tableSize >= 0) {
entry.table = new long[tableSize];
ramBytesUsed += RamUsageEstimator.sizeOf(entry.table);
for (int i = 0; i < tableSize; ++i) {
entry.table[i] = meta.readLong();
}
@ -229,7 +225,6 @@ final class Lucene90DocValuesProducer extends DocValuesProducer {
final int blockShift = meta.readVInt();
entry.addressesMeta = DirectMonotonicReader.loadMeta(meta, numAddresses, blockShift);
ramBytesUsed += entry.addressesMeta.ramBytesUsed();
entry.addressesLength = meta.readLong();
}
return entry;
@ -273,7 +268,6 @@ final class Lucene90DocValuesProducer extends DocValuesProducer {
final int blockShift = meta.readVInt();
entry.addressesMeta =
DirectMonotonicReader.loadMeta(meta, entry.numDocsWithField + 1, blockShift);
ramBytesUsed += entry.addressesMeta.ramBytesUsed();
entry.addressesLength = meta.readLong();
readTermDict(meta, entry);
return entry;
@ -322,7 +316,6 @@ final class Lucene90DocValuesProducer extends DocValuesProducer {
final int blockShift = meta.readVInt();
entry.addressesMeta =
DirectMonotonicReader.loadMeta(meta, entry.numDocsWithField + 1, blockShift);
ramBytesUsed += entry.addressesMeta.ramBytesUsed();
entry.addressesLength = meta.readLong();
}
return entry;
@ -421,11 +414,6 @@ final class Lucene90DocValuesProducer extends DocValuesProducer {
long addressesLength;
}
@Override
public long ramBytesUsed() {
return ramBytesUsed;
}
@Override
public NumericDocValues getNumeric(FieldInfo field) throws IOException {
NumericEntry entry = numerics.get(field.name);

View File

@ -475,11 +475,6 @@ final class Lucene90NormsProducer extends NormsProducer implements Cloneable {
data.close();
}
@Override
public long ramBytesUsed() {
return 64L * norms.size(); // good enough
}
@Override
public void checkIntegrity() throws IOException {
CodecUtil.checksumEntireFile(data);

View File

@ -138,11 +138,6 @@ public class Lucene90PointsReader extends PointsReader {
return readers.get(fieldInfo.number);
}
@Override
public long ramBytesUsed() {
return 0L;
}
@Override
public void checkIntegrity() throws IOException {
CodecUtil.checksumEntireFile(indexIn);

View File

@ -45,7 +45,6 @@ import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BitUtil;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.RamUsageEstimator;
/**
* Concrete class that reads docId(maybe frq,pos,offset,payloads) list with postings format.
@ -54,9 +53,6 @@ import org.apache.lucene.util.RamUsageEstimator;
*/
public final class Lucene90PostingsReader extends PostingsReaderBase {
private static final long BASE_RAM_BYTES_USED =
RamUsageEstimator.shallowSizeOfInstance(Lucene90PostingsReader.class);
private final IndexInput docIn;
private final IndexInput posIn;
private final IndexInput payIn;
@ -2047,11 +2043,6 @@ public final class Lucene90PostingsReader extends PostingsReaderBase {
}
}
@Override
public long ramBytesUsed() {
return BASE_RAM_BYTES_USED;
}
@Override
public void checkIntegrity() throws IOException {
if (docIn != null) {

View File

@ -17,18 +17,13 @@
package org.apache.lucene.codecs.lucene90.blocktree;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.store.ByteArrayDataInput;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Accountables;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.automaton.CompiledAutomaton;
import org.apache.lucene.util.fst.ByteSequenceOutputs;
import org.apache.lucene.util.fst.FST;
@ -39,14 +34,10 @@ import org.apache.lucene.util.fst.OffHeapFSTStore;
*
* @lucene.internal
*/
public final class FieldReader extends Terms implements Accountable {
public final class FieldReader extends Terms {
// private final boolean DEBUG = BlockTreeTermsWriter.DEBUG;
private static final long BASE_RAM_BYTES_USED =
RamUsageEstimator.shallowSizeOfInstance(FieldReader.class)
+ 3 * RamUsageEstimator.shallowSizeOfInstance(BytesRef.class);
final long numTerms;
final FieldInfo fieldInfo;
final long sumTotalTermFreq;
@ -196,20 +187,6 @@ public final class FieldReader extends Terms implements Accountable {
this, compiled.automaton, compiled.runAutomaton, compiled.commonSuffixRef, startTerm);
}
@Override
public long ramBytesUsed() {
return BASE_RAM_BYTES_USED + ((index != null) ? index.ramBytesUsed() : 0);
}
@Override
public Collection<Accountable> getChildResources() {
if (index == null) {
return Collections.emptyList();
} else {
return Collections.singleton(Accountables.namedAccountable("term index", index));
}
}
@Override
public String toString() {
return "BlockTreeTerms(seg="

View File

@ -18,7 +18,6 @@ package org.apache.lucene.codecs.lucene90.blocktree;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
@ -35,8 +34,6 @@ import org.apache.lucene.index.SegmentReadState;
import org.apache.lucene.index.Terms;
import org.apache.lucene.store.ChecksumIndexInput;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Accountables;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.fst.ByteSequenceOutputs;
@ -321,23 +318,6 @@ public final class Lucene90BlockTreeTermsReader extends FieldsProducer {
}
}
@Override
public long ramBytesUsed() {
long sizeInBytes = postingsReader.ramBytesUsed();
for (FieldReader reader : fieldMap.values()) {
sizeInBytes += reader.ramBytesUsed();
}
return sizeInBytes;
}
@Override
public Collection<Accountable> getChildResources() {
List<Accountable> resources =
new ArrayList<>(Accountables.namedAccountables("field", fieldMap));
resources.add(Accountables.namedAccountable("delegate", postingsReader));
return Collections.unmodifiableList(resources);
}
@Override
public void checkIntegrity() throws IOException {
// terms index

View File

@ -18,9 +18,8 @@ package org.apache.lucene.codecs.lucene90.compressing;
import java.io.Closeable;
import java.io.IOException;
import org.apache.lucene.util.Accountable;
abstract class FieldsIndex implements Accountable, Cloneable, Closeable {
abstract class FieldsIndex implements Cloneable, Closeable {
/** Get the start pointer for the block that contains the given docID. */
abstract long getStartPointer(int docID);

View File

@ -28,14 +28,10 @@ import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.RandomAccessInput;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.packed.DirectMonotonicReader;
final class FieldsIndexReader extends FieldsIndex {
private static final long BASE_RAM_BYTES_USED =
RamUsageEstimator.shallowSizeOfInstance(FieldsIndexReader.class);
private final int maxDoc;
private final int blockShift;
private final int numChunks;
@ -111,15 +107,6 @@ final class FieldsIndexReader extends FieldsIndex {
startPointers = DirectMonotonicReader.getInstance(startPointersMeta, startPointersSlice);
}
@Override
public long ramBytesUsed() {
return BASE_RAM_BYTES_USED
+ docsMeta.ramBytesUsed()
+ startPointersMeta.ramBytesUsed()
+ docs.ramBytesUsed()
+ startPointers.ramBytesUsed();
}
@Override
public void close() throws IOException {
indexInput.close();

View File

@ -40,8 +40,6 @@ import static org.apache.lucene.codecs.lucene90.compressing.Lucene90CompressingS
import java.io.EOFException;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.codecs.StoredFieldsReader;
import org.apache.lucene.codecs.compressing.CompressionMode;
@ -60,8 +58,6 @@ import org.apache.lucene.store.DataInput;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Accountables;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BitUtil;
import org.apache.lucene.util.BytesRef;
@ -711,16 +707,6 @@ public final class Lucene90CompressingStoredFieldsReader extends StoredFieldsRea
return numDocs;
}
@Override
public long ramBytesUsed() {
return indexReader.ramBytesUsed();
}
@Override
public Collection<Accountable> getChildResources() {
return Collections.singleton(Accountables.namedAccountable("stored field index", indexReader));
}
@Override
public void checkIntegrity() throws IOException {
indexReader.checkIntegrity();

View File

@ -30,8 +30,6 @@ import static org.apache.lucene.codecs.lucene90.compressing.Lucene90CompressingT
import static org.apache.lucene.codecs.lucene90.compressing.Lucene90CompressingTermVectorsWriter.VERSION_START;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.NoSuchElementException;
import org.apache.lucene.codecs.CodecUtil;
@ -56,8 +54,6 @@ import org.apache.lucene.store.ChecksumIndexInput;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Accountables;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
@ -1306,16 +1302,6 @@ public final class Lucene90CompressingTermVectorsReader extends TermVectorsReade
return sum;
}
@Override
public long ramBytesUsed() {
return indexReader.ramBytesUsed();
}
@Override
public Collection<Accountable> getChildResources() {
return Collections.singleton(Accountables.namedAccountable("term vector index", indexReader));
}
@Override
public void checkIntegrity() throws IOException {
indexReader.checkIntegrity();

View File

@ -39,8 +39,6 @@ import org.apache.lucene.index.SegmentWriteState;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Accountables;
import org.apache.lucene.util.IOUtils;
/**
@ -360,20 +358,6 @@ public abstract class PerFieldDocValuesFormat extends DocValuesFormat {
IOUtils.close(formats.values());
}
@Override
public long ramBytesUsed() {
long size = 0;
for (Map.Entry<String, DocValuesProducer> entry : formats.entrySet()) {
size += (entry.getKey().length() * Character.BYTES) + entry.getValue().ramBytesUsed();
}
return size;
}
@Override
public Collection<Accountable> getChildResources() {
return Accountables.namedAccountables("format", formats);
}
@Override
public void checkIntegrity() throws IOException {
for (DocValuesProducer format : formats.values()) {

View File

@ -251,11 +251,6 @@ final class PerFieldMergeState {
this.filtered = new ArrayList<>(filterFields);
}
@Override
public long ramBytesUsed() {
return in.ramBytesUsed();
}
@Override
public Iterator<String> iterator() {
return filtered.iterator();

View File

@ -20,7 +20,6 @@ import java.io.Closeable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
@ -43,11 +42,8 @@ import org.apache.lucene.index.MergeState;
import org.apache.lucene.index.SegmentReadState;
import org.apache.lucene.index.SegmentWriteState;
import org.apache.lucene.index.Terms;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Accountables;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.MergedIterator;
import org.apache.lucene.util.RamUsageEstimator;
/**
* Enables per field postings support.
@ -282,9 +278,6 @@ public abstract class PerFieldPostingsFormat extends PostingsFormat {
private static class FieldsReader extends FieldsProducer {
private static final long BASE_RAM_BYTES_USED =
RamUsageEstimator.shallowSizeOfInstance(FieldsReader.class);
private final Map<String, FieldsProducer> fields = new TreeMap<>();
private final Map<String, FieldsProducer> formats = new HashMap<>();
private final String segment;
@ -368,22 +361,6 @@ public abstract class PerFieldPostingsFormat extends PostingsFormat {
IOUtils.close(formats.values());
}
@Override
public long ramBytesUsed() {
long ramBytesUsed = BASE_RAM_BYTES_USED;
ramBytesUsed += fields.size() * 2L * RamUsageEstimator.NUM_BYTES_OBJECT_REF;
ramBytesUsed += formats.size() * 2L * RamUsageEstimator.NUM_BYTES_OBJECT_REF;
for (Map.Entry<String, FieldsProducer> entry : formats.entrySet()) {
ramBytesUsed += entry.getValue().ramBytesUsed();
}
return ramBytesUsed;
}
@Override
public Collection<Accountable> getChildResources() {
return Accountables.namedAccountables("format", formats);
}
@Override
public void checkIntegrity() throws IOException {
for (FieldsProducer producer : formats.values()) {

View File

@ -53,7 +53,6 @@ import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.Lock;
import org.apache.lucene.util.Accountables;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
@ -790,11 +789,6 @@ public final class CheckIndex implements Closeable {
}
msg(infoStream, "");
if (verbose) {
msg(infoStream, "detailed segment RAM usage: ");
msg(infoStream, Accountables.toString(reader));
}
} catch (Throwable t) {
if (failFast) {
throw IOUtils.rethrowAlways(t);

View File

@ -17,10 +17,6 @@
package org.apache.lucene.index;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import org.apache.lucene.codecs.DocValuesProducer;
import org.apache.lucene.codecs.FieldsProducer;
@ -29,11 +25,9 @@ import org.apache.lucene.codecs.PointsReader;
import org.apache.lucene.codecs.StoredFieldsReader;
import org.apache.lucene.codecs.TermVectorsReader;
import org.apache.lucene.codecs.VectorReader;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Accountables;
/** LeafReader implemented by codec APIs. */
public abstract class CodecReader extends LeafReader implements Accountable {
public abstract class CodecReader extends LeafReader {
/** Sole constructor. (For invocation by subclass constructors, typically implicit.) */
protected CodecReader() {}
@ -227,87 +221,6 @@ public abstract class CodecReader extends LeafReader implements Accountable {
@Override
protected void doClose() throws IOException {}
@Override
public long ramBytesUsed() {
ensureOpen();
// terms/postings
long ramBytesUsed = getPostingsReader().ramBytesUsed();
// norms
if (getNormsReader() != null) {
ramBytesUsed += getNormsReader().ramBytesUsed();
}
// docvalues
if (getDocValuesReader() != null) {
ramBytesUsed += getDocValuesReader().ramBytesUsed();
}
// stored fields
if (getFieldsReader() != null) {
ramBytesUsed += getFieldsReader().ramBytesUsed();
}
// term vectors
if (getTermVectorsReader() != null) {
ramBytesUsed += getTermVectorsReader().ramBytesUsed();
}
// points
if (getPointsReader() != null) {
ramBytesUsed += getPointsReader().ramBytesUsed();
}
// vectors
if (getVectorReader() != null) {
ramBytesUsed += getVectorReader().ramBytesUsed();
}
return ramBytesUsed;
}
@Override
public Collection<Accountable> getChildResources() {
ensureOpen();
final List<Accountable> resources = new ArrayList<>(6);
// terms/postings
resources.add(Accountables.namedAccountable("postings", getPostingsReader()));
// norms
if (getNormsReader() != null) {
resources.add(Accountables.namedAccountable("norms", getNormsReader()));
}
// docvalues
if (getDocValuesReader() != null) {
resources.add(Accountables.namedAccountable("docvalues", getDocValuesReader()));
}
// stored fields
if (getFieldsReader() != null) {
resources.add(Accountables.namedAccountable("stored fields", getFieldsReader()));
}
// term vectors
if (getTermVectorsReader() != null) {
resources.add(Accountables.namedAccountable("term vectors", getTermVectorsReader()));
}
// points
if (getPointsReader() != null) {
resources.add(Accountables.namedAccountable("points", getPointsReader()));
}
// vectors
if (getVectorReader() != null) {
resources.add(Accountables.namedAccountable("vectors", getVectorReader()));
}
return Collections.unmodifiableList(resources);
}
@Override
public void checkIntegrity() throws IOException {
ensureOpen();

View File

@ -61,9 +61,4 @@ public abstract class EmptyDocValuesProducer extends DocValuesProducer {
public void close() {
throw new UnsupportedOperationException();
}
@Override
public long ramBytesUsed() {
throw new UnsupportedOperationException();
}
}

View File

@ -17,7 +17,6 @@
package org.apache.lucene.index;
import java.io.IOException;
import java.util.Collection;
import java.util.Objects;
import org.apache.lucene.codecs.DocValuesProducer;
import org.apache.lucene.codecs.FieldsProducer;
@ -26,7 +25,6 @@ import org.apache.lucene.codecs.PointsReader;
import org.apache.lucene.codecs.StoredFieldsReader;
import org.apache.lucene.codecs.TermVectorsReader;
import org.apache.lucene.codecs.VectorReader;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Bits;
/**
@ -126,16 +124,6 @@ public abstract class FilterCodecReader extends CodecReader {
in.doClose();
}
@Override
public long ramBytesUsed() {
return in.ramBytesUsed();
}
@Override
public Collection<Accountable> getChildResources() {
return in.getChildResources();
}
@Override
public void checkIntegrity() throws IOException {
in.checkIntegrity();

View File

@ -100,11 +100,6 @@ class NormValuesWriter {
@Override
public void close() {}
@Override
public long ramBytesUsed() {
return 0;
}
});
}

View File

@ -186,11 +186,6 @@ class PointValuesWriter {
throw new UnsupportedOperationException();
}
@Override
public long ramBytesUsed() {
return 0L;
}
@Override
public void close() {}
};

View File

@ -18,7 +18,6 @@ package org.apache.lucene.index;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.IdentityHashMap;
@ -27,9 +26,6 @@ import java.util.Map;
import java.util.Set;
import org.apache.lucene.codecs.DocValuesProducer;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Accountables;
import org.apache.lucene.util.RamUsageEstimator;
/** Encapsulates multiple producers when there are docvalues updates as one producer */
// TODO: try to clean up close? no-op?
@ -37,11 +33,6 @@ import org.apache.lucene.util.RamUsageEstimator;
// producer?
class SegmentDocValuesProducer extends DocValuesProducer {
private static final long LONG_RAM_BYTES_USED =
RamUsageEstimator.shallowSizeOfInstance(Long.class);
private static final long BASE_RAM_BYTES_USED =
RamUsageEstimator.shallowSizeOfInstance(SegmentDocValuesProducer.class);
final Map<String, DocValuesProducer> dvProducersByField = new HashMap<>();
final Set<DocValuesProducer> dvProducers =
Collections.newSetFromMap(new IdentityHashMap<DocValuesProducer, Boolean>());
@ -146,27 +137,6 @@ class SegmentDocValuesProducer extends DocValuesProducer {
throw new UnsupportedOperationException(); // there is separate ref tracking
}
@Override
public long ramBytesUsed() {
long ramBytesUsed = BASE_RAM_BYTES_USED;
ramBytesUsed += dvGens.size() * LONG_RAM_BYTES_USED;
ramBytesUsed += dvProducers.size() * RamUsageEstimator.NUM_BYTES_OBJECT_REF;
ramBytesUsed += dvProducersByField.size() * 2 * RamUsageEstimator.NUM_BYTES_OBJECT_REF;
for (DocValuesProducer producer : dvProducers) {
ramBytesUsed += producer.ramBytesUsed();
}
return ramBytesUsed;
}
@Override
public Collection<Accountable> getChildResources() {
final List<Accountable> resources = new ArrayList<>(dvProducers.size());
for (Accountable producer : dvProducers) {
resources.add(Accountables.namedAccountable("delegate", producer));
}
return Collections.unmodifiableList(resources);
}
@Override
public String toString() {
return getClass().getSimpleName() + "(producers=" + dvProducers.size() + ")";

View File

@ -155,11 +155,6 @@ public final class SlowCodecReaderWrapper {
@Override
public void close() {}
@Override
public long ramBytesUsed() {
return 0;
}
};
}
@ -200,11 +195,6 @@ public final class SlowCodecReaderWrapper {
@Override
public void close() {}
@Override
public long ramBytesUsed() {
return 0;
}
};
}
@ -243,11 +233,6 @@ public final class SlowCodecReaderWrapper {
@Override
public void close() {}
@Override
public long ramBytesUsed() {
return 0;
}
};
}
@ -270,11 +255,6 @@ public final class SlowCodecReaderWrapper {
@Override
public void close() {}
@Override
public long ramBytesUsed() {
return 0;
}
};
}
@ -297,11 +277,6 @@ public final class SlowCodecReaderWrapper {
@Override
public void close() {}
@Override
public long ramBytesUsed() {
return 0;
}
};
}
@ -336,11 +311,6 @@ public final class SlowCodecReaderWrapper {
@Override
public void close() {}
@Override
public long ramBytesUsed() {
return 0;
}
};
}
}

View File

@ -235,11 +235,6 @@ public final class SortingCodecReader extends FilterCodecReader {
public int size() {
return postingsReader.size();
}
@Override
public long ramBytesUsed() {
return postingsReader.ramBytesUsed();
}
};
}
@ -270,11 +265,6 @@ public final class SortingCodecReader extends FilterCodecReader {
public void close() throws IOException {
delegate.close();
}
@Override
public long ramBytesUsed() {
return delegate.ramBytesUsed();
}
};
}
@ -306,11 +296,6 @@ public final class SortingCodecReader extends FilterCodecReader {
public void close() throws IOException {
delegate.close();
}
@Override
public long ramBytesUsed() {
return delegate.ramBytesUsed();
}
};
}
@ -359,11 +344,6 @@ public final class SortingCodecReader extends FilterCodecReader {
public void close() throws IOException {
delegate.close();
}
@Override
public long ramBytesUsed() {
return delegate.ramBytesUsed();
}
};
}
@ -439,11 +419,6 @@ public final class SortingCodecReader extends FilterCodecReader {
public void close() throws IOException {
delegate.close();
}
@Override
public long ramBytesUsed() {
return delegate.ramBytesUsed();
}
};
}
@ -486,11 +461,6 @@ public final class SortingCodecReader extends FilterCodecReader {
public void close() throws IOException {
delegate.close();
}
@Override
public long ramBytesUsed() {
return delegate.ramBytesUsed();
}
};
}

View File

@ -834,11 +834,6 @@ public class TestCodecs extends LuceneTestCase {
NormsProducer fakeNorms =
new NormsProducer() {
@Override
public long ramBytesUsed() {
return 0;
}
@Override
public void close() throws IOException {}

View File

@ -17,7 +17,6 @@
package org.apache.lucene.index;
import java.io.IOException;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import org.apache.lucene.analysis.MockAnalyzer;
@ -27,7 +26,6 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField;
import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.LuceneTestCase;
@ -232,16 +230,6 @@ public class TestMultiTermsEnum extends LuceneTestCase {
delegate.checkIntegrity();
}
@Override
public long ramBytesUsed() {
return delegate.ramBytesUsed();
}
@Override
public Collection<Accountable> getChildResources() {
return delegate.getChildResources();
}
@Override
public void close() throws IOException {
delegate.close();

View File

@ -35,11 +35,9 @@ import org.apache.lucene.index.CorruptIndexException; // javadocs
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.MultiTerms;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.ReaderUtil;
import org.apache.lucene.index.SegmentReader;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.Accountable;
@ -369,9 +367,6 @@ public class DirectoryTaxonomyReader extends TaxonomyReader implements Accountab
public synchronized long ramBytesUsed() {
ensureOpen();
long ramBytesUsed = 0;
for (LeafReaderContext ctx : indexReader.leaves()) {
ramBytesUsed += ((SegmentReader) ctx.reader()).ramBytesUsed();
}
if (taxoArrays != null) {
ramBytesUsed += taxoArrays.ramBytesUsed();
}
@ -389,11 +384,6 @@ public class DirectoryTaxonomyReader extends TaxonomyReader implements Accountab
@Override
public synchronized Collection<Accountable> getChildResources() {
final List<Accountable> resources = new ArrayList<>();
long ramBytesUsed = 0;
for (LeafReaderContext ctx : indexReader.leaves()) {
ramBytesUsed += ((SegmentReader) ctx.reader()).ramBytesUsed();
}
resources.add(Accountables.namedAccountable("indexReader", ramBytesUsed));
if (taxoArrays != null) {
resources.add(Accountables.namedAccountable("taxoArrays", taxoArrays));
}

View File

@ -561,6 +561,7 @@ public class TestDirectoryTaxonomyReader extends FacetTestCase {
taxoWriter.close();
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(dir);
taxoReader.getParallelTaxonomyArrays(); // increases memory usage as a side-effect
assertTrue(taxoReader.ramBytesUsed() > 0);
assertTrue(taxoReader.getChildResources().size() > 0);
taxoReader.close();

View File

@ -98,11 +98,6 @@ final class IDVersionPostingsReader extends PostingsReaderBase {
"Should never be called, IDVersionSegmentTermsEnum implements impacts directly");
}
@Override
public long ramBytesUsed() {
return 0;
}
@Override
public void checkIntegrity() throws IOException {}

View File

@ -17,11 +17,8 @@
package org.apache.lucene.sandbox.codecs.idversion;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.TreeMap;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.codecs.FieldsProducer;
@ -32,8 +29,6 @@ import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.SegmentReadState;
import org.apache.lucene.index.Terms;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Accountables;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.fst.PairOutputs.Pair;
@ -260,22 +255,6 @@ public final class VersionBlockTreeTermsReader extends FieldsProducer {
}
}
@Override
public long ramBytesUsed() {
long sizeInBytes = postingsReader.ramBytesUsed();
for (VersionFieldReader reader : fields.values()) {
sizeInBytes += reader.ramBytesUsed();
}
return sizeInBytes;
}
@Override
public Collection<Accountable> getChildResources() {
List<Accountable> resources = new ArrayList<>(Accountables.namedAccountables("field", fields));
resources.add(Accountables.namedAccountable("delegate", postingsReader));
return Collections.unmodifiableList(resources);
}
@Override
public void checkIntegrity() throws IOException {
// term dictionary

View File

@ -17,23 +17,19 @@
package org.apache.lucene.sandbox.codecs.idversion;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.store.ByteArrayDataInput;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Accountables;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.fst.FST;
import org.apache.lucene.util.fst.PairOutputs.Pair;
/** BlockTree's implementation of {@link Terms}. */
// public for CheckIndex:
final class VersionFieldReader extends Terms implements Accountable {
final class VersionFieldReader extends Terms {
final long numTerms;
final FieldInfo fieldInfo;
final long sumTotalTermFreq;
@ -173,20 +169,6 @@ final class VersionFieldReader extends Terms implements Accountable {
return docCount;
}
@Override
public long ramBytesUsed() {
return ((index != null) ? index.ramBytesUsed() : 0);
}
@Override
public Collection<Accountable> getChildResources() {
if (index == null) {
return Collections.emptyList();
} else {
return Collections.singletonList(Accountables.namedAccountable("term index", index));
}
}
@Override
public String toString() {
return "IDVersionTerms(terms="

View File

@ -21,8 +21,6 @@ import java.io.IOException;
import java.io.StringReader;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
@ -44,15 +42,12 @@ import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.FilterLeafReader;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.MultiDocValues;
import org.apache.lucene.index.ReaderUtil;
import org.apache.lucene.index.SegmentReader;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause;
@ -74,10 +69,7 @@ import org.apache.lucene.store.DataInput;
import org.apache.lucene.store.DataOutput;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Accountables;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.RamUsageEstimator;
// TODO:
// - a PostingsFormat that stores super-high-freq terms as
@ -946,58 +938,7 @@ public class AnalyzingInfixSuggester extends Lookup implements Closeable {
@Override
public long ramBytesUsed() {
long mem = RamUsageEstimator.shallowSizeOf(this);
try {
if (searcherMgr != null) {
SearcherManager mgr;
IndexSearcher searcher;
synchronized (searcherMgrLock) {
mgr = searcherMgr; // acquire & release on same SearcherManager, via local reference
searcher = mgr.acquire();
}
try {
for (LeafReaderContext context : searcher.getIndexReader().leaves()) {
LeafReader reader = FilterLeafReader.unwrap(context.reader());
if (reader instanceof SegmentReader) {
mem += ((SegmentReader) context.reader()).ramBytesUsed();
}
}
} finally {
mgr.release(searcher);
}
}
return mem;
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
}
@Override
public Collection<Accountable> getChildResources() {
List<Accountable> resources = new ArrayList<>();
try {
if (searcherMgr != null) {
SearcherManager mgr;
IndexSearcher searcher;
synchronized (searcherMgrLock) {
mgr = searcherMgr; // acquire & release on same SearcherManager, via local reference
searcher = mgr.acquire();
}
try {
for (LeafReaderContext context : searcher.getIndexReader().leaves()) {
LeafReader reader = FilterLeafReader.unwrap(context.reader());
if (reader instanceof SegmentReader) {
resources.add(Accountables.namedAccountable("segment", (SegmentReader) reader));
}
}
} finally {
mgr.release(searcher);
}
}
return Collections.unmodifiableList(resources);
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
return 0L;
}
@Override

View File

@ -53,7 +53,7 @@ import org.apache.lucene.util.IOUtils;
* <p>NOTE: Only the footer is validated for Completion dictionary (.lkp) and not the checksum due
* to random access pattern and checksum validation being too costly at instantiation
*/
final class CompletionFieldsProducer extends FieldsProducer {
final class CompletionFieldsProducer extends FieldsProducer implements Accountable {
private FieldsProducer delegateFieldsProducer;
private Map<String, CompletionsTermsReader> readers;
@ -153,7 +153,7 @@ final class CompletionFieldsProducer extends FieldsProducer {
@Override
public long ramBytesUsed() {
long ramBytesUsed = delegateFieldsProducer.ramBytesUsed();
long ramBytesUsed = 0;
for (CompletionsTermsReader reader : readers.values()) {
ramBytesUsed += reader.ramBytesUsed();
}

View File

@ -19,7 +19,6 @@ package org.apache.lucene.codecs.asserting;
import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS;
import java.io.IOException;
import java.util.Collection;
import org.apache.lucene.codecs.DocValuesConsumer;
import org.apache.lucene.codecs.DocValuesFormat;
import org.apache.lucene.codecs.DocValuesProducer;
@ -33,7 +32,6 @@ import org.apache.lucene.index.SegmentWriteState;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.LongBitSet;
@ -228,8 +226,6 @@ public class AssertingDocValuesFormat extends DocValuesFormat {
this.creationThread = Thread.currentThread();
// do a few simple checks on init
assert toString() != null;
assert ramBytesUsed() >= 0;
assert getChildResources() != null;
}
@Override
@ -293,20 +289,6 @@ public class AssertingDocValuesFormat extends DocValuesFormat {
in.close(); // close again
}
@Override
public long ramBytesUsed() {
long v = in.ramBytesUsed();
assert v >= 0;
return v;
}
@Override
public Collection<Accountable> getChildResources() {
Collection<Accountable> res = in.getChildResources();
TestUtil.checkReadOnly(res);
return res;
}
@Override
public void checkIntegrity() throws IOException {
in.checkIntegrity();

View File

@ -19,7 +19,6 @@ package org.apache.lucene.codecs.asserting;
import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS;
import java.io.IOException;
import java.util.Collection;
import org.apache.lucene.codecs.NormsConsumer;
import org.apache.lucene.codecs.NormsFormat;
import org.apache.lucene.codecs.NormsProducer;
@ -28,7 +27,6 @@ import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.SegmentReadState;
import org.apache.lucene.index.SegmentWriteState;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.TestUtil;
/** Just like the default but with additional asserts. */
@ -95,8 +93,6 @@ public class AssertingNormsFormat extends NormsFormat {
this.creationThread = Thread.currentThread();
// do a few simple checks on init
assert toString() != null;
assert ramBytesUsed() >= 0;
assert getChildResources() != null;
}
@Override
@ -116,20 +112,6 @@ public class AssertingNormsFormat extends NormsFormat {
in.close(); // close again
}
@Override
public long ramBytesUsed() {
long v = in.ramBytesUsed();
assert v >= 0;
return v;
}
@Override
public Collection<Accountable> getChildResources() {
Collection<Accountable> res = in.getChildResources();
TestUtil.checkReadOnly(res);
return res;
}
@Override
public void checkIntegrity() throws IOException {
in.checkIntegrity();

View File

@ -17,7 +17,6 @@
package org.apache.lucene.codecs.asserting;
import java.io.IOException;
import java.util.Collection;
import org.apache.lucene.codecs.PointsFormat;
import org.apache.lucene.codecs.PointsReader;
import org.apache.lucene.codecs.PointsWriter;
@ -27,7 +26,6 @@ import org.apache.lucene.index.MergeState;
import org.apache.lucene.index.PointValues;
import org.apache.lucene.index.SegmentReadState;
import org.apache.lucene.index.SegmentWriteState;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.TestUtil;
/** Just like the default point format but with additional asserts. */
@ -72,8 +70,6 @@ public final class AssertingPointsFormat extends PointsFormat {
this.creationThread = Thread.currentThread();
// do a few simple checks on init
assert toString() != null;
assert ramBytesUsed() >= 0;
assert getChildResources() != null;
}
@Override
@ -94,20 +90,6 @@ public final class AssertingPointsFormat extends PointsFormat {
return new AssertingLeafReader.AssertingPointValues(values, maxDoc);
}
@Override
public long ramBytesUsed() {
long v = in.ramBytesUsed();
assert v >= 0;
return v;
}
@Override
public Collection<Accountable> getChildResources() {
Collection<Accountable> res = in.getChildResources();
TestUtil.checkReadOnly(res);
return res;
}
@Override
public void checkIntegrity() throws IOException {
in.checkIntegrity();

View File

@ -17,7 +17,6 @@
package org.apache.lucene.codecs.asserting;
import java.io.IOException;
import java.util.Collection;
import java.util.Iterator;
import org.apache.lucene.codecs.FieldsConsumer;
import org.apache.lucene.codecs.FieldsProducer;
@ -33,7 +32,6 @@ import org.apache.lucene.index.SegmentReadState;
import org.apache.lucene.index.SegmentWriteState;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.TestUtil;
@ -63,8 +61,6 @@ public final class AssertingPostingsFormat extends PostingsFormat {
this.in = in;
// do a few simple checks on init
assert toString() != null;
assert ramBytesUsed() >= 0;
assert getChildResources() != null;
}
@Override
@ -91,20 +87,6 @@ public final class AssertingPostingsFormat extends PostingsFormat {
return in.size();
}
@Override
public long ramBytesUsed() {
long v = in.ramBytesUsed();
assert v >= 0;
return v;
}
@Override
public Collection<Accountable> getChildResources() {
Collection<Accountable> res = in.getChildResources();
TestUtil.checkReadOnly(res);
return res;
}
@Override
public void checkIntegrity() throws IOException {
in.checkIntegrity();

View File

@ -61,8 +61,6 @@ public class AssertingStoredFieldsFormat extends StoredFieldsFormat {
this.creationThread = Thread.currentThread();
// do a few simple checks on init
assert toString() != null;
assert ramBytesUsed() >= 0;
assert getChildResources() != null;
}
@Override
@ -84,20 +82,6 @@ public class AssertingStoredFieldsFormat extends StoredFieldsFormat {
return new AssertingStoredFieldsReader(in.clone(), maxDoc, false);
}
@Override
public long ramBytesUsed() {
long v = in.ramBytesUsed();
assert v >= 0;
return v;
}
@Override
public Collection<Accountable> getChildResources() {
Collection<Accountable> res = in.getChildResources();
TestUtil.checkReadOnly(res);
return res;
}
@Override
public void checkIntegrity() throws IOException {
in.checkIntegrity();

View File

@ -58,8 +58,6 @@ public class AssertingTermVectorsFormat extends TermVectorsFormat {
this.in = in;
// do a few simple checks on init
assert toString() != null;
assert ramBytesUsed() >= 0;
assert getChildResources() != null;
}
@Override
@ -79,20 +77,6 @@ public class AssertingTermVectorsFormat extends TermVectorsFormat {
return new AssertingTermVectorsReader(in.clone());
}
@Override
public long ramBytesUsed() {
long v = in.ramBytesUsed();
assert v >= 0;
return v;
}
@Override
public Collection<Accountable> getChildResources() {
Collection<Accountable> res = in.getChildResources();
TestUtil.checkReadOnly(res);
return res;
}
@Override
public void checkIntegrity() throws IOException {
in.checkIntegrity();

View File

@ -197,10 +197,5 @@ class CrankyPointsFormat extends PointsFormat {
throw new IOException("Fake IOException");
}
}
@Override
public long ramBytesUsed() {
return delegate.ramBytesUsed();
}
}
}

View File

@ -18,7 +18,6 @@ package org.apache.lucene.codecs.ramonly;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
@ -48,7 +47,6 @@ import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Accountables;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.IOUtils;
@ -88,25 +86,11 @@ public final class RAMOnlyPostingsFormat extends PostingsFormat {
@Override
public void close() {}
@Override
public long ramBytesUsed() {
long sizeInBytes = 0;
for (RAMField field : fieldToTerms.values()) {
sizeInBytes += field.ramBytesUsed();
}
return sizeInBytes;
}
@Override
public Collection<Accountable> getChildResources() {
return Accountables.namedAccountables("field", fieldToTerms);
}
@Override
public void checkIntegrity() throws IOException {}
}
static class RAMField extends Terms implements Accountable {
static class RAMField extends Terms {
final String field;
final SortedMap<String, RAMTerm> termToDocs = new TreeMap<>();
long sumTotalTermFreq;
@ -119,15 +103,6 @@ public final class RAMOnlyPostingsFormat extends PostingsFormat {
this.info = info;
}
@Override
public long ramBytesUsed() {
long sizeInBytes = 0;
for (RAMTerm term : termToDocs.values()) {
sizeInBytes += term.ramBytesUsed();
}
return sizeInBytes;
}
@Override
public long size() {
return termToDocs.size();
@ -175,7 +150,7 @@ public final class RAMOnlyPostingsFormat extends PostingsFormat {
}
}
static class RAMTerm implements Accountable {
static class RAMTerm {
final String term;
long totalTermFreq;
final List<RAMDoc> docs = new ArrayList<>();
@ -183,15 +158,6 @@ public final class RAMOnlyPostingsFormat extends PostingsFormat {
public RAMTerm(String term) {
this.term = term;
}
@Override
public long ramBytesUsed() {
long sizeInBytes = 0;
for (RAMDoc rDoc : docs) {
sizeInBytes += rDoc.ramBytesUsed();
}
return sizeInBytes;
}
}
static class RAMDoc implements Accountable {

View File

@ -391,9 +391,4 @@ public abstract class BaseFieldInfoFormatTestCase extends BaseIndexFileFormatTes
protected void addRandomFields(Document doc) {
doc.add(new StoredField("foobar", TestUtil.randomSimpleString(random())));
}
@Override
public void testRamBytesUsed() throws IOException {
assumeTrue("not applicable for this format", true);
}
}

View File

@ -28,7 +28,6 @@ import java.util.HashSet;
import java.util.IdentityHashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
@ -47,7 +46,6 @@ import org.apache.lucene.codecs.StoredFieldsReader;
import org.apache.lucene.codecs.StoredFieldsWriter;
import org.apache.lucene.codecs.TermVectorsReader;
import org.apache.lucene.codecs.TermVectorsWriter;
import org.apache.lucene.codecs.mockrandom.MockRandomPostingsFormat;
import org.apache.lucene.codecs.simpletext.SimpleTextCodec;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
@ -67,7 +65,6 @@ import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.CloseableThreadLocal;
import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.InfoStream;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.RamUsageTester;
@ -287,72 +284,6 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase {
return true;
}
/** Test the accuracy of the ramBytesUsed estimations. */
@Nightly
public void testRamBytesUsed() throws IOException {
if (Codec.getDefault() instanceof RandomCodec) {
// this test relies on the fact that two segments will be written with
// the same codec so we need to disable MockRandomPF
final Set<String> avoidCodecs = new HashSet<>(((RandomCodec) Codec.getDefault()).avoidCodecs);
avoidCodecs.add(new MockRandomPostingsFormat().getName());
Codec.setDefault(new RandomCodec(random(), avoidCodecs));
}
Directory dir = applyCreatedVersionMajor(newDirectory());
IndexWriterConfig cfg = newIndexWriterConfig(new MockAnalyzer(random()));
IndexWriter w = new IndexWriter(dir, cfg);
// we need to index enough documents so that constant overhead doesn't dominate
final int numDocs = atLeast(10000);
LeafReader reader1 = null;
for (int i = 0; i < numDocs; ++i) {
Document d = new Document();
addRandomFields(d);
w.addDocument(d);
if (i == 100) {
w.forceMerge(1);
w.commit();
reader1 = getOnlyLeafReader(DirectoryReader.open(dir));
}
}
w.forceMerge(1);
w.commit();
w.close();
LeafReader reader2 = getOnlyLeafReader(DirectoryReader.open(dir));
for (LeafReader reader : Arrays.asList(reader1, reader2)) {
new SimpleMergedSegmentWarmer(InfoStream.NO_OUTPUT).warm(reader);
}
long act1 = RamUsageTester.sizeOf(reader2, new Accumulator(reader2));
long act2 = RamUsageTester.sizeOf(reader1, new Accumulator(reader1));
final long measuredBytes = act1 - act2;
long reported1 = ((SegmentReader) reader2).ramBytesUsed();
long reported2 = ((SegmentReader) reader1).ramBytesUsed();
final long reportedBytes = reported1 - reported2;
final long absoluteError = Math.abs(measuredBytes - reportedBytes);
final double relativeError = (double) absoluteError / measuredBytes;
final String message =
String.format(
Locale.ROOT,
"RamUsageTester reports %d bytes but ramBytesUsed() returned %d (%.1f error). "
+ " [Measured: %d, %d. Reported: %d, %d]",
measuredBytes,
reportedBytes,
(100 * relativeError),
act1,
act2,
reported1,
reported2);
assertTrue(message, relativeError < 0.20d || absoluteError < 1000);
reader1.close();
reader2.close();
dir.close();
}
/** Calls close multiple times on closeable codec apis */
public void testMultiClose() throws IOException {
// first make a one doc index
@ -422,11 +353,6 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase {
@Override
public void close() throws IOException {}
@Override
public long ramBytesUsed() {
return 0;
}
@Override
public NumericDocValues getNorms(FieldInfo field) throws IOException {
if (field.hasNorms() == false) {
@ -585,11 +511,6 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase {
@Override
public void close() {}
@Override
public long ramBytesUsed() {
return 0;
}
});
IOUtils.close(consumer);
IOUtils.close(consumer);

View File

@ -656,9 +656,4 @@ public abstract class BaseSegmentInfoFormatTestCase extends BaseIndexFileFormatT
protected void addRandomFields(Document doc) {
doc.add(new StoredField("foobar", TestUtil.randomSimpleString(random())));
}
@Override
public void testRamBytesUsed() throws IOException {
assumeTrue("not applicable for this format", true);
}
}

View File

@ -738,11 +738,6 @@ public class RandomPostingsTester {
@Override
public void close() throws IOException {}
@Override
public long ramBytesUsed() {
return 0;
}
@Override
public NumericDocValues getNorms(FieldInfo field) throws IOException {
if (newFieldInfos.fieldInfo(field.number).hasNorms()) {

View File

@ -70,7 +70,6 @@ import org.apache.lucene.index.ConcurrentMergeScheduler;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FilterLeafReader;
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
@ -82,7 +81,6 @@ import org.apache.lucene.index.MergePolicy;
import org.apache.lucene.index.MergeScheduler;
import org.apache.lucene.index.MultiTerms;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.SegmentReader;
import org.apache.lucene.index.SlowCodecReaderWrapper;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
@ -375,16 +373,6 @@ public final class TestUtil {
System.out.println(bos.toString(IOUtils.UTF_8));
}
LeafReader unwrapped = FilterLeafReader.unwrap(reader);
if (unwrapped instanceof SegmentReader) {
SegmentReader sr = (SegmentReader) unwrapped;
long bytesUsed = sr.ramBytesUsed();
if (sr.ramBytesUsed() < 0) {
throw new IllegalStateException("invalid ramBytesUsed for reader: " + bytesUsed);
}
assert Accountables.toString(sr) != null;
}
// FieldInfos should be cached at the reader and always return the same instance
if (reader.getFieldInfos() != reader.getFieldInfos()) {
throw new RuntimeException(