normalize to/from properly in bounded histogram

This commit is contained in:
kimchy 2011-04-05 15:10:55 +03:00
parent 1428abf2b7
commit 6f827731d6
6 changed files with 51 additions and 9 deletions

View File

@ -68,8 +68,13 @@ public class BoundedCountHistogramFacetCollector extends AbstractFacetCollector
indexFieldName = mapper.names().indexName();
fieldDataType = mapper.fieldDataType();
long offset = -from;
int size = (int) ((to - from) / interval);
long normalizedFrom = (((long) ((double) from / interval)) * interval);
long normalizedTo = (((long) ((double) to / interval)) * interval);
if ((to % interval) != 0) {
normalizedTo += interval;
}
long offset = -normalizedFrom;
int size = (int) ((normalizedTo - normalizedFrom) / interval);
histoProc = new HistogramProc(from, to, interval, offset, size);
}

View File

@ -82,8 +82,13 @@ public class BoundedValueHistogramFacetCollector extends AbstractFacetCollector
valueIndexFieldName = mapper.names().indexName();
valueFieldDataType = mapper.fieldDataType();
long offset = -from;
int size = (int) ((to - from) / interval);
long normalizedFrom = (((long) ((double) from / interval)) * interval);
long normalizedTo = (((long) ((double) to / interval)) * interval);
if ((to % interval) != 0) {
normalizedTo += interval;
}
long offset = -normalizedFrom;
int size = (int) ((normalizedTo - normalizedFrom) / interval);
histoProc = new HistogramProc(from, to, interval, offset, size);
}

View File

@ -78,8 +78,13 @@ public class BoundedValueScriptHistogramFacetCollector extends AbstractFacetColl
indexFieldName = mapper.names().indexName();
fieldDataType = mapper.fieldDataType();
long offset = -from;
int size = (int) ((to - from) / interval);
long normalizedFrom = (((long) ((double) from / interval)) * interval);
long normalizedTo = (((long) ((double) to / interval)) * interval);
if ((to % interval) != 0) {
normalizedTo += interval;
}
long offset = -normalizedFrom;
int size = (int) ((normalizedTo - normalizedFrom) / interval);
histoProc = new HistogramProc(from, to, interval, offset, size, this.valueScript);
}

View File

@ -182,14 +182,15 @@ public class InternalBoundedCountHistogramFacet extends InternalHistogramFacet {
for (int i = 0; i < size; i++) {
entries[i] = new CountEntry((i * interval) + offset, counts[i]);
}
if (comparatorType != ComparatorType.KEY) {
Arrays.sort(entries, comparatorType.comparator());
}
return entries;
}
@Override public Facet reduce(String name, List<Facet> facets) {
if (facets.size() == 1) {
InternalBoundedCountHistogramFacet firstHistoFacet = (InternalBoundedCountHistogramFacet) facets.get(0);
if (comparatorType != ComparatorType.KEY) {
Arrays.sort(firstHistoFacet.entries, comparatorType.comparator());
}
return facets.get(0);
}
InternalBoundedCountHistogramFacet firstHistoFacet = (InternalBoundedCountHistogramFacet) facets.get(0);
@ -199,6 +200,9 @@ public class InternalBoundedCountHistogramFacet extends InternalHistogramFacet {
firstHistoFacet.counts[j] += histoFacet.counts[j];
}
}
if (comparatorType != ComparatorType.KEY) {
Arrays.sort(firstHistoFacet.entries, comparatorType.comparator());
}
return firstHistoFacet;
}

View File

@ -236,6 +236,11 @@ public class InternalBoundedFullHistogramFacet extends InternalHistogramFacet {
}
}
}
if (comparatorType != ComparatorType.KEY) {
Arrays.sort(agg, (Comparator) comparatorType.comparator());
}
InternalBoundedFullHistogramFacet internalFacet = (InternalBoundedFullHistogramFacet) facets.get(0);
internalFacet.entries = agg;
internalFacet.entriesSafe = true;

View File

@ -937,6 +937,8 @@ public class SimpleFacetsTests extends AbstractNodesTests {
.addFacet(histogramFacet("stats10").field("num").bounds(1000, 1300).interval(100)) // for bounded, we also get 0s
.addFacet(histogramFacet("stats11").field("num").valueField("num").bounds(1000, 1300).interval(100)) // for bounded, we also get 0s
.addFacet(histogramScriptFacet("stats12").keyField("num").valueScript("doc['num'].value").bounds(1000, 1300).interval(100)) // for bounded, we also get 0s
.addFacet(histogramFacet("stats13").field("num").bounds(1056, 1176).interval(100))
.addFacet(histogramFacet("stats14").field("num").valueField("num").bounds(1056, 1176).interval(100))
.execute().actionGet();
if (searchResponse.failedShards() > 0) {
@ -1117,6 +1119,22 @@ public class SimpleFacetsTests extends AbstractNodesTests {
assertThat(facet.entries().get(2).key(), equalTo(1200l));
assertThat(facet.entries().get(2).count(), equalTo(0l));
assertThat(facet.entries().get(2).totalCount(), equalTo(0l));
facet = searchResponse.facets().facet("stats13");
assertThat(facet.name(), equalTo("stats13"));
assertThat(facet.entries().size(), equalTo(2));
assertThat(facet.entries().get(0).key(), equalTo(1000l));
assertThat(facet.entries().get(0).count(), equalTo(1l));
assertThat(facet.entries().get(1).key(), equalTo(1100l));
assertThat(facet.entries().get(1).count(), equalTo(1l));
facet = searchResponse.facets().facet("stats14");
assertThat(facet.name(), equalTo("stats14"));
assertThat(facet.entries().size(), equalTo(2));
assertThat(facet.entries().get(0).key(), equalTo(1000l));
assertThat(facet.entries().get(0).count(), equalTo(1l));
assertThat(facet.entries().get(1).key(), equalTo(1100l));
assertThat(facet.entries().get(1).count(), equalTo(1l));
}
@Test public void testRangeFacets() throws Exception {