more usage of field data
note, removed field data from cache stats, it will have its own stats later on (cache part is really misleading)
This commit is contained in:
parent
de013babf8
commit
af757fd821
|
@ -127,10 +127,10 @@ public class TransportClearIndicesCacheAction extends TransportBroadcastOperatio
|
|||
if (request.fieldDataCache()) {
|
||||
clearedAtLeastOne = true;
|
||||
if (request.fields() == null || request.fields().length == 0) {
|
||||
service.cache().fieldData().clear("api");
|
||||
service.fieldData().clear();
|
||||
} else {
|
||||
for (String field : request.fields()) {
|
||||
service.cache().fieldData().clear("api", field);
|
||||
service.fieldData().clearField(field);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -142,7 +142,7 @@ public class TransportClearIndicesCacheAction extends TransportBroadcastOperatio
|
|||
if (request.fields() != null && request.fields().length > 0) {
|
||||
// only clear caches relating to the specified fields
|
||||
for (String field : request.fields()) {
|
||||
service.cache().fieldData().clear("api", field);
|
||||
service.fieldData().clearField(field);
|
||||
}
|
||||
} else {
|
||||
service.cache().clear("api");
|
||||
|
|
|
@ -34,42 +34,28 @@ import java.io.IOException;
|
|||
*/
|
||||
public class CacheStats implements Streamable, ToXContent {
|
||||
|
||||
long fieldEvictions;
|
||||
long filterEvictions;
|
||||
long filterCount;
|
||||
long fieldSize;
|
||||
long filterSize;
|
||||
long idCacheSize;
|
||||
|
||||
public CacheStats() {
|
||||
}
|
||||
|
||||
public CacheStats(long fieldEvictions, long filterEvictions, long fieldSize, long filterSize, long filterCount, long idCacheSize) {
|
||||
this.fieldEvictions = fieldEvictions;
|
||||
public CacheStats(long filterEvictions, long filterSize, long filterCount, long idCacheSize) {
|
||||
this.filterEvictions = filterEvictions;
|
||||
this.fieldSize = fieldSize;
|
||||
this.filterSize = filterSize;
|
||||
this.filterCount = filterCount;
|
||||
this.idCacheSize = idCacheSize;
|
||||
}
|
||||
|
||||
public void add(CacheStats stats) {
|
||||
this.fieldEvictions += stats.fieldEvictions;
|
||||
this.filterEvictions += stats.filterEvictions;
|
||||
this.fieldSize += stats.fieldSize;
|
||||
this.filterSize += stats.filterSize;
|
||||
this.filterCount += stats.filterCount;
|
||||
this.idCacheSize += stats.idCacheSize;
|
||||
}
|
||||
|
||||
public long fieldEvictions() {
|
||||
return this.fieldEvictions;
|
||||
}
|
||||
|
||||
public long getFieldEvictions() {
|
||||
return this.fieldEvictions();
|
||||
}
|
||||
|
||||
public long filterEvictions() {
|
||||
return this.filterEvictions;
|
||||
}
|
||||
|
@ -94,22 +80,6 @@ public class CacheStats implements Streamable, ToXContent {
|
|||
return filterCount;
|
||||
}
|
||||
|
||||
public long fieldSizeInBytes() {
|
||||
return this.fieldSize;
|
||||
}
|
||||
|
||||
public long getFieldSizeInBytes() {
|
||||
return fieldSizeInBytes();
|
||||
}
|
||||
|
||||
public ByteSizeValue fieldSize() {
|
||||
return new ByteSizeValue(fieldSize);
|
||||
}
|
||||
|
||||
public ByteSizeValue getFieldSize() {
|
||||
return this.fieldSize();
|
||||
}
|
||||
|
||||
public long filterSizeInBytes() {
|
||||
return this.filterSize;
|
||||
}
|
||||
|
@ -145,9 +115,6 @@ public class CacheStats implements Streamable, ToXContent {
|
|||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(Fields.CACHE);
|
||||
builder.field(Fields.FIELD_EVICTIONS, fieldEvictions);
|
||||
builder.field(Fields.FIELD_SIZE, fieldSize().toString());
|
||||
builder.field(Fields.FIELD_SIZE_IN_BYTES, fieldSize);
|
||||
builder.field(Fields.FILTER_COUNT, filterCount);
|
||||
builder.field(Fields.FILTER_EVICTIONS, filterEvictions);
|
||||
builder.field(Fields.FILTER_SIZE, filterSize().toString());
|
||||
|
@ -160,9 +127,6 @@ public class CacheStats implements Streamable, ToXContent {
|
|||
|
||||
static final class Fields {
|
||||
static final XContentBuilderString CACHE = new XContentBuilderString("cache");
|
||||
static final XContentBuilderString FIELD_SIZE = new XContentBuilderString("field_size");
|
||||
static final XContentBuilderString FIELD_SIZE_IN_BYTES = new XContentBuilderString("field_size_in_bytes");
|
||||
static final XContentBuilderString FIELD_EVICTIONS = new XContentBuilderString("field_evictions");
|
||||
static final XContentBuilderString FILTER_EVICTIONS = new XContentBuilderString("filter_evictions");
|
||||
static final XContentBuilderString FILTER_COUNT = new XContentBuilderString("filter_count");
|
||||
static final XContentBuilderString FILTER_SIZE = new XContentBuilderString("filter_size");
|
||||
|
@ -179,9 +143,7 @@ public class CacheStats implements Streamable, ToXContent {
|
|||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
fieldEvictions = in.readVLong();
|
||||
filterEvictions = in.readVLong();
|
||||
fieldSize = in.readVLong();
|
||||
filterSize = in.readVLong();
|
||||
filterCount = in.readVLong();
|
||||
idCacheSize = in.readVLong();
|
||||
|
@ -189,9 +151,7 @@ public class CacheStats implements Streamable, ToXContent {
|
|||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVLong(fieldEvictions);
|
||||
out.writeVLong(filterEvictions);
|
||||
out.writeVLong(fieldSize);
|
||||
out.writeVLong(filterSize);
|
||||
out.writeVLong(filterCount);
|
||||
out.writeVLong(idCacheSize);
|
||||
|
|
|
@ -31,7 +31,6 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.index.AbstractIndexComponent;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.cache.field.data.FieldDataCache;
|
||||
import org.elasticsearch.index.cache.filter.FilterCache;
|
||||
import org.elasticsearch.index.cache.id.IdCache;
|
||||
import org.elasticsearch.index.cache.query.parser.QueryParserCache;
|
||||
|
@ -44,8 +43,6 @@ public class IndexCache extends AbstractIndexComponent implements CloseableCompo
|
|||
|
||||
private final FilterCache filterCache;
|
||||
|
||||
private final FieldDataCache fieldDataCache;
|
||||
|
||||
private final QueryParserCache queryParserCache;
|
||||
|
||||
private final IdCache idCache;
|
||||
|
@ -58,11 +55,9 @@ public class IndexCache extends AbstractIndexComponent implements CloseableCompo
|
|||
private CacheStats latestCacheStats;
|
||||
|
||||
@Inject
|
||||
public IndexCache(Index index, @IndexSettings Settings indexSettings, FilterCache filterCache, FieldDataCache fieldDataCache,
|
||||
QueryParserCache queryParserCache, IdCache idCache) {
|
||||
public IndexCache(Index index, @IndexSettings Settings indexSettings, FilterCache filterCache, QueryParserCache queryParserCache, IdCache idCache) {
|
||||
super(index, indexSettings);
|
||||
this.filterCache = filterCache;
|
||||
this.fieldDataCache = fieldDataCache;
|
||||
this.queryParserCache = queryParserCache;
|
||||
this.idCache = idCache;
|
||||
|
||||
|
@ -81,7 +76,7 @@ public class IndexCache extends AbstractIndexComponent implements CloseableCompo
|
|||
|
||||
public synchronized void invalidateCache() {
|
||||
FilterCache.EntriesStats filterEntriesStats = filterCache.entriesStats();
|
||||
latestCacheStats = new CacheStats(fieldDataCache.evictions(), filterCache.evictions(), fieldDataCache.sizeInBytes(), filterEntriesStats.sizeInBytes, filterEntriesStats.count, idCache.sizeInBytes());
|
||||
latestCacheStats = new CacheStats(filterCache.evictions(), filterEntriesStats.sizeInBytes, filterEntriesStats.count, idCache.sizeInBytes());
|
||||
latestCacheStatsTimestamp = System.currentTimeMillis();
|
||||
}
|
||||
|
||||
|
@ -89,7 +84,7 @@ public class IndexCache extends AbstractIndexComponent implements CloseableCompo
|
|||
long timestamp = System.currentTimeMillis();
|
||||
if ((timestamp - latestCacheStatsTimestamp) > refreshInterval.millis()) {
|
||||
FilterCache.EntriesStats filterEntriesStats = filterCache.entriesStats();
|
||||
latestCacheStats = new CacheStats(fieldDataCache.evictions(), filterCache.evictions(), fieldDataCache.sizeInBytes(), filterEntriesStats.sizeInBytes, filterEntriesStats.count, idCache.sizeInBytes());
|
||||
latestCacheStats = new CacheStats(filterCache.evictions(), filterEntriesStats.sizeInBytes, filterEntriesStats.count, idCache.sizeInBytes());
|
||||
latestCacheStatsTimestamp = timestamp;
|
||||
}
|
||||
return latestCacheStats;
|
||||
|
@ -99,10 +94,6 @@ public class IndexCache extends AbstractIndexComponent implements CloseableCompo
|
|||
return filterCache;
|
||||
}
|
||||
|
||||
public FieldDataCache fieldData() {
|
||||
return fieldDataCache;
|
||||
}
|
||||
|
||||
public IdCache idCache() {
|
||||
return this.idCache;
|
||||
}
|
||||
|
@ -114,7 +105,6 @@ public class IndexCache extends AbstractIndexComponent implements CloseableCompo
|
|||
@Override
|
||||
public void close() throws ElasticSearchException {
|
||||
filterCache.close();
|
||||
fieldDataCache.close();
|
||||
idCache.close();
|
||||
queryParserCache.close();
|
||||
if (clusterService != null) {
|
||||
|
@ -124,13 +114,11 @@ public class IndexCache extends AbstractIndexComponent implements CloseableCompo
|
|||
|
||||
public void clear(IndexReader reader) {
|
||||
filterCache.clear(reader);
|
||||
fieldDataCache.clear(reader);
|
||||
idCache.clear(reader);
|
||||
}
|
||||
|
||||
public void clear(String reason) {
|
||||
filterCache.clear(reason);
|
||||
fieldDataCache.clear(reason);
|
||||
idCache.clear();
|
||||
queryParserCache.clear();
|
||||
}
|
||||
|
|
|
@ -46,12 +46,10 @@ import org.elasticsearch.index.AbstractIndexComponent;
|
|||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.cache.IndexCache;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
import org.elasticsearch.index.field.data.FieldData;
|
||||
import org.elasticsearch.index.field.data.FieldDataType;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.mapper.Uid;
|
||||
import org.elasticsearch.index.fielddata.BytesValues;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.mapper.*;
|
||||
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
|
@ -363,6 +361,7 @@ public class PercolatorExecutor extends AbstractIndexComponent {
|
|||
}
|
||||
|
||||
static class QueryCollector extends Collector {
|
||||
private final IndexFieldData uidFieldData;
|
||||
private final IndexSearcher searcher;
|
||||
private final IndexService percolatorIndex;
|
||||
private final List<String> matches;
|
||||
|
@ -371,7 +370,7 @@ public class PercolatorExecutor extends AbstractIndexComponent {
|
|||
|
||||
private final Lucene.ExistsCollector collector = new Lucene.ExistsCollector();
|
||||
|
||||
private FieldData fieldData;
|
||||
private BytesValues values;
|
||||
|
||||
QueryCollector(ESLogger logger, Map<String, Query> queries, IndexSearcher searcher, IndexService percolatorIndex, List<String> matches) {
|
||||
this.logger = logger;
|
||||
|
@ -379,6 +378,8 @@ public class PercolatorExecutor extends AbstractIndexComponent {
|
|||
this.searcher = searcher;
|
||||
this.percolatorIndex = percolatorIndex;
|
||||
this.matches = matches;
|
||||
// TODO: when we move to a UID level mapping def on the index level, we can use that one, now, its per type, and we can't easily choose one
|
||||
this.uidFieldData = percolatorIndex.fieldData().getForField(new FieldMapper.Names(UidFieldMapper.NAME), new FieldDataType("string", "paged_bytes"));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -387,7 +388,7 @@ public class PercolatorExecutor extends AbstractIndexComponent {
|
|||
|
||||
@Override
|
||||
public void collect(int doc) throws IOException {
|
||||
BytesRef uid = fieldData.stringValue(doc);
|
||||
BytesRef uid = values.getValue(doc);
|
||||
if (uid == null) {
|
||||
return;
|
||||
}
|
||||
|
@ -412,7 +413,7 @@ public class PercolatorExecutor extends AbstractIndexComponent {
|
|||
@Override
|
||||
public void setNextReader(AtomicReaderContext context) throws IOException {
|
||||
// we use the UID because id might not be indexed
|
||||
fieldData = percolatorIndex.cache().fieldData().cache(FieldDataType.DefaultTypes.STRING, context.reader(), UidFieldMapper.NAME);
|
||||
values = uidFieldData.load(context).getBytesValues();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
Loading…
Reference in New Issue