use cache recycler for count histo and data histo facets

This commit is contained in:
kimchy 2011-04-04 19:44:46 +03:00
parent 105d60ac9c
commit c9838d5c75
4 changed files with 39 additions and 4 deletions

View File

@ -61,7 +61,7 @@ public class HistogramFacetSearchBenchmark {
Client client = clientNode.client(); Client client = clientNode.client();
long COUNT = SizeValue.parseSizeValue("1m").singles(); long COUNT = SizeValue.parseSizeValue("5m").singles();
int BATCH = 500; int BATCH = 500;
int QUERY_WARMUP = 20; int QUERY_WARMUP = 20;
int QUERY_COUNT = 200; int QUERY_COUNT = 200;
@ -112,7 +112,8 @@ public class HistogramFacetSearchBenchmark {
} }
} }
client.admin().indices().prepareRefresh().execute().actionGet(); client.admin().indices().prepareRefresh().execute().actionGet();
System.out.println("--> Number of docs in index: " + client.prepareCount().setQuery(matchAllQuery()).execute().actionGet().count()); COUNT = client.prepareCount().setQuery(matchAllQuery()).execute().actionGet().count();
System.out.println("--> Number of docs in index: " + COUNT);
System.out.println("--> Warmup..."); System.out.println("--> Warmup...");
// run just the child query, warm up first // run just the child query, warm up first

View File

@ -38,6 +38,36 @@ public class CacheRecycler {
intArray.remove(); intArray.remove();
} }
// ----- TLongLongHashMap ----
private static ThreadLocal<SoftReference<Deque<TLongLongHashMap>>> longLongHashMap = new ThreadLocal<SoftReference<Deque<TLongLongHashMap>>>();
public static TLongLongHashMap popLongLongMap() {
SoftReference<Deque<TLongLongHashMap>> ref = longLongHashMap.get();
Deque<TLongLongHashMap> deque = ref == null ? null : ref.get();
if (deque == null) {
deque = new ArrayDeque<TLongLongHashMap>();
longLongHashMap.set(new SoftReference<Deque<TLongLongHashMap>>(deque));
}
if (deque.isEmpty()) {
return new TLongLongHashMap();
}
TLongLongHashMap map = deque.pollFirst();
map.clear();
return map;
}
public static void pushLongLongMap(TLongLongHashMap map) {
SoftReference<Deque<TLongLongHashMap>> ref = longLongHashMap.get();
Deque<TLongLongHashMap> deque = ref == null ? null : ref.get();
if (deque == null) {
deque = new ArrayDeque<TLongLongHashMap>();
longLongHashMap.set(new SoftReference<Deque<TLongLongHashMap>>(deque));
}
deque.add(map);
}
// ----- TIntIntHashMap ---- // ----- TIntIntHashMap ----
private static ThreadLocal<SoftReference<Deque<TIntIntHashMap>>> intIntHashMap = new ThreadLocal<SoftReference<Deque<TIntIntHashMap>>>(); private static ThreadLocal<SoftReference<Deque<TIntIntHashMap>>> intIntHashMap = new ThreadLocal<SoftReference<Deque<TIntIntHashMap>>>();

View File

@ -20,6 +20,7 @@
package org.elasticsearch.search.facet.datehistogram; package org.elasticsearch.search.facet.datehistogram;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.common.joda.time.MutableDateTime; import org.elasticsearch.common.joda.time.MutableDateTime;
import org.elasticsearch.common.trove.map.hash.TLongLongHashMap; import org.elasticsearch.common.trove.map.hash.TLongLongHashMap;
import org.elasticsearch.index.cache.field.data.FieldDataCache; import org.elasticsearch.index.cache.field.data.FieldDataCache;
@ -93,6 +94,7 @@ public class CountDateHistogramFacetCollector extends AbstractFacetCollector {
} }
@Override public Facet facet() { @Override public Facet facet() {
CacheRecycler.pushLongLongMap(histoProc.counts());
return new InternalCountDateHistogramFacet(facetName, comparatorType, histoProc.counts()); return new InternalCountDateHistogramFacet(facetName, comparatorType, histoProc.counts());
} }
@ -102,7 +104,7 @@ public class CountDateHistogramFacetCollector extends AbstractFacetCollector {
public static class DateHistogramProc implements LongFieldData.DateValueInDocProc { public static class DateHistogramProc implements LongFieldData.DateValueInDocProc {
protected final TLongLongHashMap counts = new TLongLongHashMap(); protected final TLongLongHashMap counts = CacheRecycler.popLongLongMap();
@Override public void onValue(int docId, MutableDateTime dateTime) { @Override public void onValue(int docId, MutableDateTime dateTime) {
counts.adjustOrPutValue(dateTime.getMillis(), 1, 1); counts.adjustOrPutValue(dateTime.getMillis(), 1, 1);

View File

@ -20,6 +20,7 @@
package org.elasticsearch.search.facet.histogram; package org.elasticsearch.search.facet.histogram;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.common.trove.map.hash.TLongLongHashMap; import org.elasticsearch.common.trove.map.hash.TLongLongHashMap;
import org.elasticsearch.index.cache.field.data.FieldDataCache; import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldDataType; import org.elasticsearch.index.field.data.FieldDataType;
@ -85,6 +86,7 @@ public class CountHistogramFacetCollector extends AbstractFacetCollector {
} }
@Override public Facet facet() { @Override public Facet facet() {
CacheRecycler.pushLongLongMap(histoProc.counts());
return new InternalCountHistogramFacet(facetName, comparatorType, histoProc.counts()); return new InternalCountHistogramFacet(facetName, comparatorType, histoProc.counts());
} }
@ -96,7 +98,7 @@ public class CountHistogramFacetCollector extends AbstractFacetCollector {
private final long interval; private final long interval;
private final TLongLongHashMap counts = new TLongLongHashMap(); private final TLongLongHashMap counts = CacheRecycler.popLongLongMap();
private int missing; private int missing;