enhance memory usage when doing reduce on histogram facets

This commit is contained in:
kimchy 2011-02-23 05:55:23 +02:00
parent 9cccfc3bd3
commit 8617e3b0e3
5 changed files with 36 additions and 64 deletions

View File

@ -19,7 +19,6 @@
package org.elasticsearch.search.facet.datehistogram;
import org.elasticsearch.common.collect.ImmutableList;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.trove.iterator.TLongDoubleIterator;
@ -32,10 +31,9 @@ import org.elasticsearch.search.facet.Facet;
import org.elasticsearch.search.facet.histogram.HistogramFacet;
import java.io.IOException;
import java.util.Collection;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.TreeSet;
/**
* @author kimchy (shay.banon)
@ -114,7 +112,7 @@ public class InternalCountAndTotalDateHistogramFacet extends InternalDateHistogr
TLongDoubleHashMap totals;
Collection<CountAndTotalEntry> entries = null;
CountAndTotalEntry[] entries = null;
private InternalCountAndTotalDateHistogramFacet() {
}
@ -143,11 +141,7 @@ public class InternalCountAndTotalDateHistogramFacet extends InternalDateHistogr
}
@Override public List<CountAndTotalEntry> entries() {
computeEntries();
if (!(entries instanceof List)) {
entries = ImmutableList.copyOf(entries);
}
return (List<CountAndTotalEntry>) entries;
return Arrays.asList(computeEntries());
}
@Override public List<CountAndTotalEntry> getEntries() {
@ -155,19 +149,20 @@ public class InternalCountAndTotalDateHistogramFacet extends InternalDateHistogr
}
@Override public Iterator<Entry> iterator() {
return (Iterator) computeEntries().iterator();
return (Iterator) entries().iterator();
}
private Collection<CountAndTotalEntry> computeEntries() {
private CountAndTotalEntry[] computeEntries() {
if (entries != null) {
return entries;
}
TreeSet<CountAndTotalEntry> set = new TreeSet<CountAndTotalEntry>(comparatorType.comparator());
entries = new CountAndTotalEntry[counts.size()];
int i = 0;
for (TLongLongIterator it = counts.iterator(); it.hasNext();) {
it.advance();
set.add(new CountAndTotalEntry(it.key(), it.value(), totals.get(it.key())));
entries[i++] = new CountAndTotalEntry(it.key(), it.value(), totals.get(it.key()));
}
entries = set;
Arrays.sort(entries, comparatorType.comparator());
return entries;
}

View File

@ -19,7 +19,6 @@
package org.elasticsearch.search.facet.datehistogram;
import org.elasticsearch.common.collect.ImmutableList;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.trove.iterator.TLongLongIterator;
@ -30,10 +29,9 @@ import org.elasticsearch.search.facet.Facet;
import org.elasticsearch.search.facet.histogram.HistogramFacet;
import java.io.IOException;
import java.util.Collection;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.TreeSet;
/**
* @author kimchy (shay.banon)
@ -108,7 +106,7 @@ public class InternalCountDateHistogramFacet extends InternalDateHistogramFacet
TLongLongHashMap counts;
Collection<CountEntry> entries = null;
CountEntry[] entries = null;
private InternalCountDateHistogramFacet() {
}
@ -136,11 +134,7 @@ public class InternalCountDateHistogramFacet extends InternalDateHistogramFacet
}
@Override public List<CountEntry> entries() {
computeEntries();
if (!(entries instanceof List)) {
entries = ImmutableList.copyOf(entries);
}
return (List<CountEntry>) entries;
return Arrays.asList(computeEntries());
}
@Override public List<CountEntry> getEntries() {
@ -148,19 +142,20 @@ public class InternalCountDateHistogramFacet extends InternalDateHistogramFacet
}
@Override public Iterator<Entry> iterator() {
return (Iterator) computeEntries().iterator();
return (Iterator) entries().iterator();
}
private Collection<CountEntry> computeEntries() {
private CountEntry[] computeEntries() {
if (entries != null) {
return entries;
}
TreeSet<CountEntry> set = new TreeSet<CountEntry>(comparatorType.comparator());
entries = new CountEntry[counts.size()];
int i = 0;
for (TLongLongIterator it = counts.iterator(); it.hasNext();) {
it.advance();
set.add(new CountEntry(it.key(), it.value()));
entries[i++] = new CountEntry(it.key(), it.value());
}
entries = set;
Arrays.sort(entries, comparatorType.comparator());
return entries;
}

View File

@ -158,13 +158,5 @@ public class ValueScriptDateHistogramFacetCollector extends AbstractFacetCollect
double scriptValue = valueScript.runAsDouble();
totals.adjustOrPutValue(bucket, scriptValue, scriptValue);
}
public TLongLongHashMap counts() {
return counts;
}
public TLongDoubleHashMap totals() {
return totals;
}
}
}

View File

@ -19,7 +19,6 @@
package org.elasticsearch.search.facet.histogram;
import org.elasticsearch.common.collect.ImmutableList;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.trove.iterator.TLongDoubleIterator;
@ -31,10 +30,9 @@ import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.search.facet.Facet;
import java.io.IOException;
import java.util.Collection;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.TreeSet;
/**
* @author kimchy (shay.banon)
@ -137,7 +135,7 @@ public class InternalCountAndTotalHistogramFacet extends InternalHistogramFacet
TLongDoubleHashMap totals;
Collection<CountAndTotalEntry> entries = null;
CountAndTotalEntry[] entries = null;
private InternalCountAndTotalHistogramFacet() {
}
@ -166,11 +164,7 @@ public class InternalCountAndTotalHistogramFacet extends InternalHistogramFacet
}
@Override public List<CountAndTotalEntry> entries() {
computeEntries();
if (!(entries instanceof List)) {
entries = ImmutableList.copyOf(entries);
}
return (List<CountAndTotalEntry>) entries;
return Arrays.asList(computeEntries());
}
@Override public List<CountAndTotalEntry> getEntries() {
@ -178,19 +172,20 @@ public class InternalCountAndTotalHistogramFacet extends InternalHistogramFacet
}
@Override public Iterator<Entry> iterator() {
return (Iterator) computeEntries().iterator();
return (Iterator) entries().iterator();
}
private Collection<CountAndTotalEntry> computeEntries() {
private CountAndTotalEntry[] computeEntries() {
if (entries != null) {
return entries;
}
TreeSet<CountAndTotalEntry> set = new TreeSet<CountAndTotalEntry>(comparatorType.comparator());
entries = new CountAndTotalEntry[counts.size()];
int i = 0;
for (TLongLongIterator it = counts.iterator(); it.hasNext();) {
it.advance();
set.add(new CountAndTotalEntry(it.key(), it.value(), totals.get(it.key())));
entries[i++] = new CountAndTotalEntry(it.key(), it.value(), totals.get(it.key()));
}
entries = set;
Arrays.sort(entries, comparatorType.comparator());
return entries;
}

View File

@ -19,7 +19,6 @@
package org.elasticsearch.search.facet.histogram;
import org.elasticsearch.common.collect.ImmutableList;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.trove.iterator.TLongLongIterator;
@ -29,10 +28,9 @@ import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.search.facet.Facet;
import java.io.IOException;
import java.util.Collection;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.TreeSet;
/**
* @author kimchy (shay.banon)
@ -131,7 +129,7 @@ public class InternalCountHistogramFacet extends InternalHistogramFacet {
TLongLongHashMap counts;
Collection<CountEntry> entries = null;
CountEntry[] entries = null;
private InternalCountHistogramFacet() {
}
@ -159,11 +157,7 @@ public class InternalCountHistogramFacet extends InternalHistogramFacet {
}
@Override public List<CountEntry> entries() {
computeEntries();
if (!(entries instanceof List)) {
entries = ImmutableList.copyOf(entries);
}
return (List<CountEntry>) entries;
return Arrays.asList(computeEntries());
}
@Override public List<CountEntry> getEntries() {
@ -171,19 +165,20 @@ public class InternalCountHistogramFacet extends InternalHistogramFacet {
}
@Override public Iterator<Entry> iterator() {
return (Iterator) computeEntries().iterator();
return (Iterator) entries().iterator();
}
private Collection<CountEntry> computeEntries() {
private CountEntry[] computeEntries() {
if (entries != null) {
return entries;
}
TreeSet<CountEntry> set = new TreeSet<CountEntry>(comparatorType.comparator());
entries = new CountEntry[counts.size()];
int i = 0;
for (TLongLongIterator it = counts.iterator(); it.hasNext();) {
it.advance();
set.add(new CountEntry(it.key(), it.value()));
entries[i++] = new CountEntry(it.key(), it.value());
}
entries = set;
Arrays.sort(entries, comparatorType.comparator());
return entries;
}