Added id cache size to nodes stats api

Also added bloom cache size the node *rest* api
Closes #2264
This commit is contained in:
Martijn van Groningen 2012-09-13 17:33:16 +02:00
parent 86e88a86b8
commit afc99ac42c
9 changed files with 104 additions and 6 deletions

View File

@ -48,4 +48,8 @@ public class ExtTObjectIntHasMap<T> extends TObjectIntHashMap<T> {
int index = index(key);
return index < 0 ? null : (T) _set[index];
}
public int _valuesSize() {
return _values.length;
}
}

View File

@ -37,20 +37,22 @@ public class CacheStats implements Streamable, ToXContent {
long fieldEvictions;
long filterEvictions;
long filterCount;
long fieldSize = 0;
long filterSize = 0;
long bloomSize = 0;
long fieldSize;
long filterSize;
long bloomSize;
long idCacheSize;
public CacheStats() {
}
public CacheStats(long fieldEvictions, long filterEvictions, long fieldSize, long filterSize, long filterCount, long bloomSize) {
public CacheStats(long fieldEvictions, long filterEvictions, long fieldSize, long filterSize, long filterCount, long bloomSize, long idCacheSize) {
this.fieldEvictions = fieldEvictions;
this.filterEvictions = filterEvictions;
this.fieldSize = fieldSize;
this.filterSize = filterSize;
this.filterCount = filterCount;
this.bloomSize = bloomSize;
this.idCacheSize = idCacheSize;
}
public void add(CacheStats stats) {
@ -60,6 +62,7 @@ public class CacheStats implements Streamable, ToXContent {
this.filterSize += stats.filterSize;
this.filterCount += stats.filterCount;
this.bloomSize += stats.bloomSize;
this.idCacheSize += stats.idCacheSize;
}
public long fieldEvictions() {
@ -142,6 +145,22 @@ public class CacheStats implements Streamable, ToXContent {
return bloomSize();
}
public long idCacheSizeInBytes() {
return idCacheSize;
}
public long getIdCacheSizeInBytes() {
return idCacheSizeInBytes();
}
public ByteSizeValue idCacheSize() {
return new ByteSizeValue(idCacheSize);
}
public ByteSizeValue getIdCacheSize() {
return bloomSize();
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(Fields.CACHE);
@ -152,6 +171,10 @@ public class CacheStats implements Streamable, ToXContent {
builder.field(Fields.FILTER_EVICTIONS, filterEvictions);
builder.field(Fields.FILTER_SIZE, filterSize().toString());
builder.field(Fields.FILTER_SIZE_IN_BYTES, filterSize);
builder.field(Fields.BLOOM_SIZE, bloomSize().toString());
builder.field(Fields.BLOOM_SIZE_IN_BYTES, bloomSize);
builder.field(Fields.ID_CACHE_SIZE, idCacheSize().toString());
builder.field(Fields.ID_CACHE_SIZE_IN_BYTES, idCacheSize);
builder.endObject();
return builder;
}
@ -165,6 +188,10 @@ public class CacheStats implements Streamable, ToXContent {
static final XContentBuilderString FILTER_COUNT = new XContentBuilderString("filter_count");
static final XContentBuilderString FILTER_SIZE = new XContentBuilderString("filter_size");
static final XContentBuilderString FILTER_SIZE_IN_BYTES = new XContentBuilderString("filter_size_in_bytes");
static final XContentBuilderString BLOOM_SIZE = new XContentBuilderString("bloom_size");
static final XContentBuilderString BLOOM_SIZE_IN_BYTES = new XContentBuilderString("bloom_size_in_bytes");
static final XContentBuilderString ID_CACHE_SIZE = new XContentBuilderString("id_cache_size");
static final XContentBuilderString ID_CACHE_SIZE_IN_BYTES = new XContentBuilderString("id_cache_size_in_bytes");
}
public static CacheStats readCacheStats(StreamInput in) throws IOException {
@ -181,6 +208,7 @@ public class CacheStats implements Streamable, ToXContent {
filterSize = in.readVLong();
filterCount = in.readVLong();
bloomSize = in.readVLong();
idCacheSize = in.readVLong();
}
@Override
@ -191,5 +219,6 @@ public class CacheStats implements Streamable, ToXContent {
out.writeVLong(filterSize);
out.writeVLong(filterCount);
out.writeVLong(bloomSize);
out.writeVLong(idCacheSize);
}
}

View File

@ -85,7 +85,7 @@ public class IndexCache extends AbstractIndexComponent implements CloseableCompo
public synchronized void invalidateCache() {
FilterCache.EntriesStats filterEntriesStats = filterCache.entriesStats();
latestCacheStats = new CacheStats(fieldDataCache.evictions(), filterCache.evictions(), fieldDataCache.sizeInBytes(), filterEntriesStats.sizeInBytes, filterEntriesStats.count, bloomCache.sizeInBytes());
latestCacheStats = new CacheStats(fieldDataCache.evictions(), filterCache.evictions(), fieldDataCache.sizeInBytes(), filterEntriesStats.sizeInBytes, filterEntriesStats.count, bloomCache.sizeInBytes(), idCache.sizeInBytes());
latestCacheStatsTimestamp = System.currentTimeMillis();
}
@ -93,7 +93,7 @@ public class IndexCache extends AbstractIndexComponent implements CloseableCompo
long timestamp = System.currentTimeMillis();
if ((timestamp - latestCacheStatsTimestamp) > refreshInterval.millis()) {
FilterCache.EntriesStats filterEntriesStats = filterCache.entriesStats();
latestCacheStats = new CacheStats(fieldDataCache.evictions(), filterCache.evictions(), fieldDataCache.sizeInBytes(), filterEntriesStats.sizeInBytes, filterEntriesStats.count, bloomCache.sizeInBytes());
latestCacheStats = new CacheStats(fieldDataCache.evictions(), filterCache.evictions(), fieldDataCache.sizeInBytes(), filterEntriesStats.sizeInBytes, filterEntriesStats.count, bloomCache.sizeInBytes(), idCache.sizeInBytes());
latestCacheStatsTimestamp = timestamp;
}
return latestCacheStats;

View File

@ -35,4 +35,7 @@ public interface IdCache extends IndexComponent, CloseableComponent, Iterable<Id
void refresh(IndexReader[] readers) throws Exception;
IdReaderCache reader(IndexReader reader);
long sizeInBytes();
}

View File

@ -33,4 +33,6 @@ public interface IdReaderCache {
HashedBytesArray parentIdByDoc(String type, int docId);
int docById(String type, HashedBytesArray id);
long sizeInBytes();
}

View File

@ -44,4 +44,8 @@ public interface IdReaderTypeCache {
*/
HashedBytesArray idByDoc(int docId);
/**
* @return The size in bytes for this particular instance
*/
long sizeInBytes();
}

View File

@ -218,6 +218,14 @@ public class SimpleIdCache extends AbstractIndexComponent implements IdCache, Se
}
}
public long sizeInBytes() {
long sizeInBytes = 0;
for (SimpleIdReaderCache idReaderCache : idReaders.values()) {
sizeInBytes += idReaderCache.sizeInBytes();
}
return sizeInBytes;
}
private HashedBytesArray checkIfCanReuse(Map<Object, Map<String, TypeBuilder>> builders, HashedBytesArray idAsBytes) {
HashedBytesArray finalIdAsBytes;
// go over and see if we can reuse this id

View File

@ -66,6 +66,14 @@ public class SimpleIdReaderCache implements IdReaderCache {
return -1;
}
public long sizeInBytes() {
long sizeInBytes = 0;
for (SimpleIdReaderTypeCache readerTypeCache : types.values()) {
sizeInBytes += readerTypeCache.sizeInBytes();
}
return sizeInBytes;
}
/**
* Returns an already stored instance if exists, if not, returns null;
*/

View File

@ -19,6 +19,8 @@
package org.elasticsearch.index.cache.id.simple;
import gnu.trove.impl.hash.TObjectHash;
import org.elasticsearch.common.RamUsage;
import org.elasticsearch.common.bytes.HashedBytesArray;
import org.elasticsearch.common.trove.ExtTObjectIntHasMap;
import org.elasticsearch.index.cache.id.IdReaderTypeCache;
@ -38,6 +40,8 @@ public class SimpleIdReaderTypeCache implements IdReaderTypeCache {
private final int[] parentIdsOrdinals;
private long sizeInBytes = -1;
public SimpleIdReaderTypeCache(String type, ExtTObjectIntHasMap<HashedBytesArray> idToDoc, HashedBytesArray[] docIdToId,
HashedBytesArray[] parentIdsValues, int[] parentIdsOrdinals) {
this.type = type;
@ -64,10 +68,46 @@ public class SimpleIdReaderTypeCache implements IdReaderTypeCache {
return docIdToId[docId];
}
public long sizeInBytes() {
if (sizeInBytes == -1) {
sizeInBytes = computeSizeInBytes();
}
return sizeInBytes;
}
/**
* Returns an already stored instance if exists, if not, returns null;
*/
public HashedBytesArray canReuse(HashedBytesArray id) {
return idToDoc.key(id);
}
long computeSizeInBytes() {
long sizeInBytes = 0;
// Ignore type field
// sizeInBytes += ((type.length() * RamUsage.NUM_BYTES_CHAR) + (3 * RamUsage.NUM_BYTES_INT)) + RamUsage.NUM_BYTES_OBJECT_HEADER;
sizeInBytes += RamUsage.NUM_BYTES_ARRAY_HEADER + (idToDoc._valuesSize() * RamUsage.NUM_BYTES_INT);
for (Object o : idToDoc._set) {
if (o == TObjectHash.FREE || o == TObjectHash.REMOVED) {
sizeInBytes += RamUsage.NUM_BYTES_OBJECT_REF;
} else {
HashedBytesArray bytesArray = (HashedBytesArray) o;
sizeInBytes += RamUsage.NUM_BYTES_OBJECT_HEADER + (bytesArray.length() + RamUsage.NUM_BYTES_INT);
}
}
// The docIdToId array contains references to idToDoc for this segment or other segments, so we can use OBJECT_REF
sizeInBytes += RamUsage.NUM_BYTES_ARRAY_HEADER + (RamUsage.NUM_BYTES_OBJECT_REF * docIdToId.length);
for (HashedBytesArray bytesArray : parentIdsValues) {
if (bytesArray == null) {
sizeInBytes += RamUsage.NUM_BYTES_OBJECT_REF;
} else {
sizeInBytes += RamUsage.NUM_BYTES_OBJECT_HEADER + (bytesArray.length() + RamUsage.NUM_BYTES_INT);
}
}
sizeInBytes += RamUsage.NUM_BYTES_ARRAY_HEADER + (RamUsage.NUM_BYTES_INT * parentIdsOrdinals.length);
return sizeInBytes;
}
}