Stats: Add more fine grained memory stats from Lucene segment reader.

This is a start to exposing memory stats improvements from Lucene 5.0.
This adds the following categories of Lucene index pieces to index stats:
* Terms
* Stored fields
* Term Vectors
* Norms
* Doc values
This commit is contained in:
Ryan Ernst 2014-12-08 14:58:32 -08:00
parent 150c2203ac
commit fde32cc599
3 changed files with 213 additions and 13 deletions

View File

@ -19,7 +19,6 @@
package org.elasticsearch.index.engine; package org.elasticsearch.index.engine;
import org.elasticsearch.Version;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.io.stream.Streamable;
@ -30,27 +29,47 @@ import org.elasticsearch.common.xcontent.XContentBuilderString;
import java.io.IOException; import java.io.IOException;
/**
*
*/
public class SegmentsStats implements Streamable, ToXContent { public class SegmentsStats implements Streamable, ToXContent {
private long count; private long count;
private long memoryInBytes; private long memoryInBytes;
private long termsMemoryInBytes;
private long storedFieldsMemoryInBytes;
private long termVectorsMemoryInBytes;
private long normsMemoryInBytes;
private long docValuesMemoryInBytes;
private long indexWriterMemoryInBytes; private long indexWriterMemoryInBytes;
private long indexWriterMaxMemoryInBytes; private long indexWriterMaxMemoryInBytes;
private long versionMapMemoryInBytes; private long versionMapMemoryInBytes;
private long bitsetMemoryInBytes; private long bitsetMemoryInBytes;
public SegmentsStats() { public SegmentsStats() {}
}
public void add(long count, long memoryInBytes) { public void add(long count, long memoryInBytes) {
this.count += count; this.count += count;
this.memoryInBytes += memoryInBytes; this.memoryInBytes += memoryInBytes;
} }
public void addTermsMemoryInBytes(long termsMemoryInBytes) {
this.termsMemoryInBytes += termsMemoryInBytes;
}
public void addStoredFieldsMemoryInBytes(long storedFieldsMemoryInBytes) {
this.storedFieldsMemoryInBytes += storedFieldsMemoryInBytes;
}
public void addTermVectorsMemoryInBytes(long termVectorsMemoryInBytes) {
this.termVectorsMemoryInBytes += termVectorsMemoryInBytes;
}
public void addNormsMemoryInBytes(long normsMemoryInBytes) {
this.normsMemoryInBytes += normsMemoryInBytes;
}
public void addDocValuesMemoryInBytes(long docValuesMemoryInBytes) {
this.docValuesMemoryInBytes += docValuesMemoryInBytes;
}
public void addIndexWriterMemoryInBytes(long indexWriterMemoryInBytes) { public void addIndexWriterMemoryInBytes(long indexWriterMemoryInBytes) {
this.indexWriterMemoryInBytes += indexWriterMemoryInBytes; this.indexWriterMemoryInBytes += indexWriterMemoryInBytes;
} }
@ -72,6 +91,11 @@ public class SegmentsStats implements Streamable, ToXContent {
return; return;
} }
add(mergeStats.count, mergeStats.memoryInBytes); add(mergeStats.count, mergeStats.memoryInBytes);
addTermsMemoryInBytes(mergeStats.termsMemoryInBytes);
addStoredFieldsMemoryInBytes(mergeStats.storedFieldsMemoryInBytes);
addTermVectorsMemoryInBytes(mergeStats.termVectorsMemoryInBytes);
addNormsMemoryInBytes(mergeStats.normsMemoryInBytes);
addDocValuesMemoryInBytes(mergeStats.docValuesMemoryInBytes);
addIndexWriterMemoryInBytes(mergeStats.indexWriterMemoryInBytes); addIndexWriterMemoryInBytes(mergeStats.indexWriterMemoryInBytes);
addIndexWriterMaxMemoryInBytes(mergeStats.indexWriterMaxMemoryInBytes); addIndexWriterMaxMemoryInBytes(mergeStats.indexWriterMaxMemoryInBytes);
addVersionMapMemoryInBytes(mergeStats.versionMapMemoryInBytes); addVersionMapMemoryInBytes(mergeStats.versionMapMemoryInBytes);
@ -79,7 +103,7 @@ public class SegmentsStats implements Streamable, ToXContent {
} }
/** /**
* The the segments count. * The number of segments.
*/ */
public long getCount() { public long getCount() {
return this.count; return this.count;
@ -96,6 +120,61 @@ public class SegmentsStats implements Streamable, ToXContent {
return new ByteSizeValue(memoryInBytes); return new ByteSizeValue(memoryInBytes);
} }
/**
* Estimation of the terms dictionary memory usage by a segment.
*/
public long getTermsMemoryInBytes() {
return this.termsMemoryInBytes;
}
public ByteSizeValue getTermsMemory() {
return new ByteSizeValue(termsMemoryInBytes);
}
/**
* Estimation of the stored fields memory usage by a segment.
*/
public long getStoredFieldsMemoryInBytes() {
return this.storedFieldsMemoryInBytes;
}
public ByteSizeValue getStoredFieldsMemory() {
return new ByteSizeValue(storedFieldsMemoryInBytes);
}
/**
* Estimation of the term vectors memory usage by a segment.
*/
public long getTermVectorsMemoryInBytes() {
return this.termVectorsMemoryInBytes;
}
public ByteSizeValue getTermVectorsMemory() {
return new ByteSizeValue(termVectorsMemoryInBytes);
}
/**
* Estimation of the norms memory usage by a segment.
*/
public long getNormsMemoryInBytes() {
return this.normsMemoryInBytes;
}
public ByteSizeValue getNormsMemory() {
return new ByteSizeValue(normsMemoryInBytes);
}
/**
* Estimation of the doc values memory usage by a segment.
*/
public long getDocValuesMemoryInBytes() {
return this.docValuesMemoryInBytes;
}
public ByteSizeValue getDocValuesMemory() {
return new ByteSizeValue(docValuesMemoryInBytes);
}
/** /**
* Estimation of the memory usage by index writer * Estimation of the memory usage by index writer
*/ */
@ -151,6 +230,11 @@ public class SegmentsStats implements Streamable, ToXContent {
builder.startObject(Fields.SEGMENTS); builder.startObject(Fields.SEGMENTS);
builder.field(Fields.COUNT, count); builder.field(Fields.COUNT, count);
builder.byteSizeField(Fields.MEMORY_IN_BYTES, Fields.MEMORY, memoryInBytes); builder.byteSizeField(Fields.MEMORY_IN_BYTES, Fields.MEMORY, memoryInBytes);
builder.byteSizeField(Fields.TERMS_MEMORY_IN_BYTES, Fields.TERMS_MEMORY, termsMemoryInBytes);
builder.byteSizeField(Fields.STORED_FIELDS_MEMORY_IN_BYTES, Fields.STORED_FIELDS_MEMORY, storedFieldsMemoryInBytes);
builder.byteSizeField(Fields.TERM_VECTORS_MEMORY_IN_BYTES, Fields.TERM_VECTORS_MEMORY, termVectorsMemoryInBytes);
builder.byteSizeField(Fields.NORMS_MEMORY_IN_BYTES, Fields.NORMS_MEMORY, normsMemoryInBytes);
builder.byteSizeField(Fields.DOC_VALUES_MEMORY_IN_BYTES, Fields.DOC_VALUES_MEMORY, docValuesMemoryInBytes);
builder.byteSizeField(Fields.INDEX_WRITER_MEMORY_IN_BYTES, Fields.INDEX_WRITER_MEMORY, indexWriterMemoryInBytes); builder.byteSizeField(Fields.INDEX_WRITER_MEMORY_IN_BYTES, Fields.INDEX_WRITER_MEMORY, indexWriterMemoryInBytes);
builder.byteSizeField(Fields.INDEX_WRITER_MAX_MEMORY_IN_BYTES, Fields.INDEX_WRITER_MAX_MEMORY, indexWriterMaxMemoryInBytes); builder.byteSizeField(Fields.INDEX_WRITER_MAX_MEMORY_IN_BYTES, Fields.INDEX_WRITER_MAX_MEMORY, indexWriterMaxMemoryInBytes);
builder.byteSizeField(Fields.VERSION_MAP_MEMORY_IN_BYTES, Fields.VERSION_MAP_MEMORY, versionMapMemoryInBytes); builder.byteSizeField(Fields.VERSION_MAP_MEMORY_IN_BYTES, Fields.VERSION_MAP_MEMORY, versionMapMemoryInBytes);
@ -164,6 +248,16 @@ public class SegmentsStats implements Streamable, ToXContent {
static final XContentBuilderString COUNT = new XContentBuilderString("count"); static final XContentBuilderString COUNT = new XContentBuilderString("count");
static final XContentBuilderString MEMORY = new XContentBuilderString("memory"); static final XContentBuilderString MEMORY = new XContentBuilderString("memory");
static final XContentBuilderString MEMORY_IN_BYTES = new XContentBuilderString("memory_in_bytes"); static final XContentBuilderString MEMORY_IN_BYTES = new XContentBuilderString("memory_in_bytes");
static final XContentBuilderString TERMS_MEMORY = new XContentBuilderString("terms_memory");
static final XContentBuilderString TERMS_MEMORY_IN_BYTES = new XContentBuilderString("terms_memory_in_bytes");
static final XContentBuilderString STORED_FIELDS_MEMORY = new XContentBuilderString("stored_fields_memory");
static final XContentBuilderString STORED_FIELDS_MEMORY_IN_BYTES = new XContentBuilderString("stored_fields_memory_in_bytes");
static final XContentBuilderString TERM_VECTORS_MEMORY = new XContentBuilderString("term_vectors_memory");
static final XContentBuilderString TERM_VECTORS_MEMORY_IN_BYTES = new XContentBuilderString("term_vectors_memory_in_bytes");
static final XContentBuilderString NORMS_MEMORY = new XContentBuilderString("norms_memory");
static final XContentBuilderString NORMS_MEMORY_IN_BYTES = new XContentBuilderString("norms_memory_in_bytes");
static final XContentBuilderString DOC_VALUES_MEMORY = new XContentBuilderString("doc_values_memory");
static final XContentBuilderString DOC_VALUES_MEMORY_IN_BYTES = new XContentBuilderString("doc_values_memory_in_bytes");
static final XContentBuilderString INDEX_WRITER_MEMORY = new XContentBuilderString("index_writer_memory"); static final XContentBuilderString INDEX_WRITER_MEMORY = new XContentBuilderString("index_writer_memory");
static final XContentBuilderString INDEX_WRITER_MEMORY_IN_BYTES = new XContentBuilderString("index_writer_memory_in_bytes"); static final XContentBuilderString INDEX_WRITER_MEMORY_IN_BYTES = new XContentBuilderString("index_writer_memory_in_bytes");
static final XContentBuilderString INDEX_WRITER_MAX_MEMORY = new XContentBuilderString("index_writer_max_memory"); static final XContentBuilderString INDEX_WRITER_MAX_MEMORY = new XContentBuilderString("index_writer_max_memory");
@ -178,6 +272,11 @@ public class SegmentsStats implements Streamable, ToXContent {
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
count = in.readVLong(); count = in.readVLong();
memoryInBytes = in.readLong(); memoryInBytes = in.readLong();
termsMemoryInBytes = in.readLong();
storedFieldsMemoryInBytes = in.readLong();
termVectorsMemoryInBytes = in.readLong();
normsMemoryInBytes = in.readLong();
docValuesMemoryInBytes = in.readLong();
indexWriterMemoryInBytes = in.readLong(); indexWriterMemoryInBytes = in.readLong();
versionMapMemoryInBytes = in.readLong(); versionMapMemoryInBytes = in.readLong();
indexWriterMaxMemoryInBytes = in.readLong(); indexWriterMaxMemoryInBytes = in.readLong();
@ -188,6 +287,11 @@ public class SegmentsStats implements Streamable, ToXContent {
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
out.writeVLong(count); out.writeVLong(count);
out.writeLong(memoryInBytes); out.writeLong(memoryInBytes);
out.writeLong(termsMemoryInBytes);
out.writeLong(storedFieldsMemoryInBytes);
out.writeLong(termVectorsMemoryInBytes);
out.writeLong(normsMemoryInBytes);
out.writeLong(docValuesMemoryInBytes);
out.writeLong(indexWriterMemoryInBytes); out.writeLong(indexWriterMemoryInBytes);
out.writeLong(versionMapMemoryInBytes); out.writeLong(versionMapMemoryInBytes);
out.writeLong(indexWriterMaxMemoryInBytes); out.writeLong(indexWriterMaxMemoryInBytes);

View File

@ -26,6 +26,7 @@ import org.apache.lucene.search.*;
import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.store.LockObtainFailedException; import org.apache.lucene.store.LockObtainFailedException;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.IOUtils;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
@ -1214,8 +1215,11 @@ public class InternalEngine implements Engine {
return t; return t;
} }
private static long getReaderRamBytesUsed(LeafReaderContext reader) { private long guardedRamBytesUsed(Accountable a) {
return segmentReader(reader.reader()).ramBytesUsed(); if (a == null) {
return 0;
}
return a.ramBytesUsed();
} }
@Override @Override
@ -1225,7 +1229,13 @@ public class InternalEngine implements Engine {
try (final Searcher searcher = acquireSearcher("segments_stats")) { try (final Searcher searcher = acquireSearcher("segments_stats")) {
SegmentsStats stats = new SegmentsStats(); SegmentsStats stats = new SegmentsStats();
for (LeafReaderContext reader : searcher.reader().leaves()) { for (LeafReaderContext reader : searcher.reader().leaves()) {
stats.add(1, getReaderRamBytesUsed(reader)); final SegmentReader segmentReader = segmentReader(reader.reader());
stats.add(1, segmentReader.ramBytesUsed());
stats.addTermsMemoryInBytes(guardedRamBytesUsed(segmentReader.fields()));
stats.addStoredFieldsMemoryInBytes(guardedRamBytesUsed(segmentReader.getFieldsReader()));
stats.addTermVectorsMemoryInBytes(guardedRamBytesUsed(segmentReader.getTermVectorsReader()));
stats.addNormsMemoryInBytes(guardedRamBytesUsed(segmentReader.getNormsReader()));
stats.addDocValuesMemoryInBytes(guardedRamBytesUsed(segmentReader.getDocValuesReader()));
} }
stats.addVersionMapMemoryInBytes(versionMap.ramBytesUsed()); stats.addVersionMapMemoryInBytes(versionMap.ramBytesUsed());
stats.addIndexWriterMemoryInBytes(indexWriter.ramBytesUsed()); stats.addIndexWriterMemoryInBytes(indexWriter.ramBytesUsed());
@ -1258,7 +1268,9 @@ public class InternalEngine implements Engine {
} catch (IOException e) { } catch (IOException e) {
logger.trace("failed to get size for [{}]", e, info.info.name); logger.trace("failed to get size for [{}]", e, info.info.name);
} }
segment.memoryInBytes = getReaderRamBytesUsed(reader); final SegmentReader segmentReader = segmentReader(reader.reader());
segment.memoryInBytes = segmentReader.ramBytesUsed();
// TODO: add more fine grained mem stats values to per segment info here
segments.put(info.info.name, segment); segments.put(info.info.name, segment);
} }
} finally { } finally {

View File

@ -0,0 +1,84 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.indices.stats;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.engine.SegmentsStats;
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.hamcrest.Matchers.*;
public class IndicesStatsTests extends ElasticsearchSingleNodeTest {
public void testSegmentStatsEmptyIndex() {
createIndex("test");
IndicesStatsResponse rsp = client().admin().indices().prepareStats("test").get();
SegmentsStats stats = rsp.getTotal().getSegments();
assertEquals(0, stats.getTermsMemoryInBytes());
assertEquals(0, stats.getStoredFieldsMemoryInBytes());
assertEquals(0, stats.getTermVectorsMemoryInBytes());
assertEquals(0, stats.getNormsMemoryInBytes());
assertEquals(0, stats.getDocValuesMemoryInBytes());
}
public void testSegmentStats() throws Exception {
XContentBuilder mapping = XContentFactory.jsonBuilder()
.startObject()
.startObject("doc")
.startObject("properties")
.startObject("foo")
.field("type", "string")
.field("index", "not_analyzed")
.field("doc_values", true)
.field("store", true)
.field("term_vector", "with_positions_offsets_payloads")
.endObject()
.endObject()
.endObject()
.endObject();
assertAcked(client().admin().indices().prepareCreate("test").addMapping("doc", mapping));
ensureGreen("test");
client().prepareIndex("test", "doc", "1").setSource("foo", "bar").get();
client().admin().indices().prepareRefresh("test").get();
IndicesStatsResponse rsp = client().admin().indices().prepareStats("test").get();
SegmentsStats stats = rsp.getIndex("test").getTotal().getSegments();
assertThat(stats.getTermsMemoryInBytes(), greaterThan(0l));
assertThat(stats.getStoredFieldsMemoryInBytes(), greaterThan(0l));
assertThat(stats.getTermVectorsMemoryInBytes(), greaterThan(0l));
assertThat(stats.getNormsMemoryInBytes(), greaterThan(0l));
assertThat(stats.getDocValuesMemoryInBytes(), greaterThan(0l));
// now check multiple segments stats are merged together
client().prepareIndex("test", "doc", "2").setSource("foo", "bar").get();
client().admin().indices().prepareRefresh("test").get();
rsp = client().admin().indices().prepareStats("test").get();
SegmentsStats stats2 = rsp.getIndex("test").getTotal().getSegments();
assertThat(stats2.getTermsMemoryInBytes(), greaterThan(stats.getTermsMemoryInBytes()));
assertThat(stats2.getStoredFieldsMemoryInBytes(), greaterThan(stats.getStoredFieldsMemoryInBytes()));
assertThat(stats2.getTermVectorsMemoryInBytes(), greaterThan(stats.getTermVectorsMemoryInBytes()));
assertThat(stats2.getNormsMemoryInBytes(), greaterThan(stats.getNormsMemoryInBytes()));
assertThat(stats2.getDocValuesMemoryInBytes(), greaterThan(stats.getDocValuesMemoryInBytes()));
}
}