enhance memory usage when doing reduce on histogram facets

This commit is contained in:
kimchy 2011-02-23 05:55:23 +02:00
parent 9cccfc3bd3
commit 8617e3b0e3
5 changed files with 36 additions and 64 deletions

View File

@ -19,7 +19,6 @@
package org.elasticsearch.search.facet.datehistogram; package org.elasticsearch.search.facet.datehistogram;
import org.elasticsearch.common.collect.ImmutableList;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.trove.iterator.TLongDoubleIterator; import org.elasticsearch.common.trove.iterator.TLongDoubleIterator;
@ -32,10 +31,9 @@ import org.elasticsearch.search.facet.Facet;
import org.elasticsearch.search.facet.histogram.HistogramFacet; import org.elasticsearch.search.facet.histogram.HistogramFacet;
import java.io.IOException; import java.io.IOException;
import java.util.Collection; import java.util.Arrays;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.TreeSet;
/** /**
* @author kimchy (shay.banon) * @author kimchy (shay.banon)
@ -114,7 +112,7 @@ public class InternalCountAndTotalDateHistogramFacet extends InternalDateHistogr
TLongDoubleHashMap totals; TLongDoubleHashMap totals;
Collection<CountAndTotalEntry> entries = null; CountAndTotalEntry[] entries = null;
private InternalCountAndTotalDateHistogramFacet() { private InternalCountAndTotalDateHistogramFacet() {
} }
@ -143,11 +141,7 @@ public class InternalCountAndTotalDateHistogramFacet extends InternalDateHistogr
} }
@Override public List<CountAndTotalEntry> entries() { @Override public List<CountAndTotalEntry> entries() {
computeEntries(); return Arrays.asList(computeEntries());
if (!(entries instanceof List)) {
entries = ImmutableList.copyOf(entries);
}
return (List<CountAndTotalEntry>) entries;
} }
@Override public List<CountAndTotalEntry> getEntries() { @Override public List<CountAndTotalEntry> getEntries() {
@ -155,19 +149,20 @@ public class InternalCountAndTotalDateHistogramFacet extends InternalDateHistogr
} }
@Override public Iterator<Entry> iterator() { @Override public Iterator<Entry> iterator() {
return (Iterator) computeEntries().iterator(); return (Iterator) entries().iterator();
} }
private Collection<CountAndTotalEntry> computeEntries() { private CountAndTotalEntry[] computeEntries() {
if (entries != null) { if (entries != null) {
return entries; return entries;
} }
TreeSet<CountAndTotalEntry> set = new TreeSet<CountAndTotalEntry>(comparatorType.comparator()); entries = new CountAndTotalEntry[counts.size()];
int i = 0;
for (TLongLongIterator it = counts.iterator(); it.hasNext();) { for (TLongLongIterator it = counts.iterator(); it.hasNext();) {
it.advance(); it.advance();
set.add(new CountAndTotalEntry(it.key(), it.value(), totals.get(it.key()))); entries[i++] = new CountAndTotalEntry(it.key(), it.value(), totals.get(it.key()));
} }
entries = set; Arrays.sort(entries, comparatorType.comparator());
return entries; return entries;
} }

View File

@ -19,7 +19,6 @@
package org.elasticsearch.search.facet.datehistogram; package org.elasticsearch.search.facet.datehistogram;
import org.elasticsearch.common.collect.ImmutableList;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.trove.iterator.TLongLongIterator; import org.elasticsearch.common.trove.iterator.TLongLongIterator;
@ -30,10 +29,9 @@ import org.elasticsearch.search.facet.Facet;
import org.elasticsearch.search.facet.histogram.HistogramFacet; import org.elasticsearch.search.facet.histogram.HistogramFacet;
import java.io.IOException; import java.io.IOException;
import java.util.Collection; import java.util.Arrays;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.TreeSet;
/** /**
* @author kimchy (shay.banon) * @author kimchy (shay.banon)
@ -108,7 +106,7 @@ public class InternalCountDateHistogramFacet extends InternalDateHistogramFacet
TLongLongHashMap counts; TLongLongHashMap counts;
Collection<CountEntry> entries = null; CountEntry[] entries = null;
private InternalCountDateHistogramFacet() { private InternalCountDateHistogramFacet() {
} }
@ -136,11 +134,7 @@ public class InternalCountDateHistogramFacet extends InternalDateHistogramFacet
} }
@Override public List<CountEntry> entries() { @Override public List<CountEntry> entries() {
computeEntries(); return Arrays.asList(computeEntries());
if (!(entries instanceof List)) {
entries = ImmutableList.copyOf(entries);
}
return (List<CountEntry>) entries;
} }
@Override public List<CountEntry> getEntries() { @Override public List<CountEntry> getEntries() {
@ -148,19 +142,20 @@ public class InternalCountDateHistogramFacet extends InternalDateHistogramFacet
} }
@Override public Iterator<Entry> iterator() { @Override public Iterator<Entry> iterator() {
return (Iterator) computeEntries().iterator(); return (Iterator) entries().iterator();
} }
private Collection<CountEntry> computeEntries() { private CountEntry[] computeEntries() {
if (entries != null) { if (entries != null) {
return entries; return entries;
} }
TreeSet<CountEntry> set = new TreeSet<CountEntry>(comparatorType.comparator()); entries = new CountEntry[counts.size()];
int i = 0;
for (TLongLongIterator it = counts.iterator(); it.hasNext();) { for (TLongLongIterator it = counts.iterator(); it.hasNext();) {
it.advance(); it.advance();
set.add(new CountEntry(it.key(), it.value())); entries[i++] = new CountEntry(it.key(), it.value());
} }
entries = set; Arrays.sort(entries, comparatorType.comparator());
return entries; return entries;
} }

View File

@ -158,13 +158,5 @@ public class ValueScriptDateHistogramFacetCollector extends AbstractFacetCollect
double scriptValue = valueScript.runAsDouble(); double scriptValue = valueScript.runAsDouble();
totals.adjustOrPutValue(bucket, scriptValue, scriptValue); totals.adjustOrPutValue(bucket, scriptValue, scriptValue);
} }
public TLongLongHashMap counts() {
return counts;
}
public TLongDoubleHashMap totals() {
return totals;
}
} }
} }

View File

@ -19,7 +19,6 @@
package org.elasticsearch.search.facet.histogram; package org.elasticsearch.search.facet.histogram;
import org.elasticsearch.common.collect.ImmutableList;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.trove.iterator.TLongDoubleIterator; import org.elasticsearch.common.trove.iterator.TLongDoubleIterator;
@ -31,10 +30,9 @@ import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.search.facet.Facet; import org.elasticsearch.search.facet.Facet;
import java.io.IOException; import java.io.IOException;
import java.util.Collection; import java.util.Arrays;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.TreeSet;
/** /**
* @author kimchy (shay.banon) * @author kimchy (shay.banon)
@ -137,7 +135,7 @@ public class InternalCountAndTotalHistogramFacet extends InternalHistogramFacet
TLongDoubleHashMap totals; TLongDoubleHashMap totals;
Collection<CountAndTotalEntry> entries = null; CountAndTotalEntry[] entries = null;
private InternalCountAndTotalHistogramFacet() { private InternalCountAndTotalHistogramFacet() {
} }
@ -166,11 +164,7 @@ public class InternalCountAndTotalHistogramFacet extends InternalHistogramFacet
} }
@Override public List<CountAndTotalEntry> entries() { @Override public List<CountAndTotalEntry> entries() {
computeEntries(); return Arrays.asList(computeEntries());
if (!(entries instanceof List)) {
entries = ImmutableList.copyOf(entries);
}
return (List<CountAndTotalEntry>) entries;
} }
@Override public List<CountAndTotalEntry> getEntries() { @Override public List<CountAndTotalEntry> getEntries() {
@ -178,19 +172,20 @@ public class InternalCountAndTotalHistogramFacet extends InternalHistogramFacet
} }
@Override public Iterator<Entry> iterator() { @Override public Iterator<Entry> iterator() {
return (Iterator) computeEntries().iterator(); return (Iterator) entries().iterator();
} }
private Collection<CountAndTotalEntry> computeEntries() { private CountAndTotalEntry[] computeEntries() {
if (entries != null) { if (entries != null) {
return entries; return entries;
} }
TreeSet<CountAndTotalEntry> set = new TreeSet<CountAndTotalEntry>(comparatorType.comparator()); entries = new CountAndTotalEntry[counts.size()];
int i = 0;
for (TLongLongIterator it = counts.iterator(); it.hasNext();) { for (TLongLongIterator it = counts.iterator(); it.hasNext();) {
it.advance(); it.advance();
set.add(new CountAndTotalEntry(it.key(), it.value(), totals.get(it.key()))); entries[i++] = new CountAndTotalEntry(it.key(), it.value(), totals.get(it.key()));
} }
entries = set; Arrays.sort(entries, comparatorType.comparator());
return entries; return entries;
} }

View File

@ -19,7 +19,6 @@
package org.elasticsearch.search.facet.histogram; package org.elasticsearch.search.facet.histogram;
import org.elasticsearch.common.collect.ImmutableList;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.trove.iterator.TLongLongIterator; import org.elasticsearch.common.trove.iterator.TLongLongIterator;
@ -29,10 +28,9 @@ import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.search.facet.Facet; import org.elasticsearch.search.facet.Facet;
import java.io.IOException; import java.io.IOException;
import java.util.Collection; import java.util.Arrays;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.TreeSet;
/** /**
* @author kimchy (shay.banon) * @author kimchy (shay.banon)
@ -131,7 +129,7 @@ public class InternalCountHistogramFacet extends InternalHistogramFacet {
TLongLongHashMap counts; TLongLongHashMap counts;
Collection<CountEntry> entries = null; CountEntry[] entries = null;
private InternalCountHistogramFacet() { private InternalCountHistogramFacet() {
} }
@ -159,11 +157,7 @@ public class InternalCountHistogramFacet extends InternalHistogramFacet {
} }
@Override public List<CountEntry> entries() { @Override public List<CountEntry> entries() {
computeEntries(); return Arrays.asList(computeEntries());
if (!(entries instanceof List)) {
entries = ImmutableList.copyOf(entries);
}
return (List<CountEntry>) entries;
} }
@Override public List<CountEntry> getEntries() { @Override public List<CountEntry> getEntries() {
@ -171,19 +165,20 @@ public class InternalCountHistogramFacet extends InternalHistogramFacet {
} }
@Override public Iterator<Entry> iterator() { @Override public Iterator<Entry> iterator() {
return (Iterator) computeEntries().iterator(); return (Iterator) entries().iterator();
} }
private Collection<CountEntry> computeEntries() { private CountEntry[] computeEntries() {
if (entries != null) { if (entries != null) {
return entries; return entries;
} }
TreeSet<CountEntry> set = new TreeSet<CountEntry>(comparatorType.comparator()); entries = new CountEntry[counts.size()];
int i = 0;
for (TLongLongIterator it = counts.iterator(); it.hasNext();) { for (TLongLongIterator it = counts.iterator(); it.hasNext();) {
it.advance(); it.advance();
set.add(new CountEntry(it.key(), it.value())); entries[i++] = new CountEntry(it.key(), it.value());
} }
entries = set; Arrays.sort(entries, comparatorType.comparator());
return entries; return entries;
} }