Changed the caching of FieldDataSource in aggs to be based on field name + required Value Source type as a combi key (used to be only field name). This fixes a problem where multiple aggregations where defined on the same field, yet require different types of value sources.
Closes #5190
This commit is contained in:
parent
db57f7ed0e
commit
428080b49a
|
@ -48,7 +48,7 @@ public class AggregationContext implements ReaderContextAware, ScorerAware {
|
||||||
|
|
||||||
private final SearchContext searchContext;
|
private final SearchContext searchContext;
|
||||||
|
|
||||||
private ObjectObjectOpenHashMap<String, FieldDataSource>[] perDepthFieldDataSources = new ObjectObjectOpenHashMap[4];
|
private ObjectObjectOpenHashMap<ConfigCacheKey, FieldDataSource>[] perDepthFieldDataSources = new ObjectObjectOpenHashMap[4];
|
||||||
private List<ReaderContextAware> readerAwares = new ArrayList<ReaderContextAware>();
|
private List<ReaderContextAware> readerAwares = new ArrayList<ReaderContextAware>();
|
||||||
private List<ScorerAware> scorerAwares = new ArrayList<ScorerAware>();
|
private List<ScorerAware> scorerAwares = new ArrayList<ScorerAware>();
|
||||||
|
|
||||||
|
@ -102,9 +102,9 @@ public class AggregationContext implements ReaderContextAware, ScorerAware {
|
||||||
perDepthFieldDataSources = Arrays.copyOf(perDepthFieldDataSources, ArrayUtil.oversize(1 + depth, RamUsageEstimator.NUM_BYTES_OBJECT_REF));
|
perDepthFieldDataSources = Arrays.copyOf(perDepthFieldDataSources, ArrayUtil.oversize(1 + depth, RamUsageEstimator.NUM_BYTES_OBJECT_REF));
|
||||||
}
|
}
|
||||||
if (perDepthFieldDataSources[depth] == null) {
|
if (perDepthFieldDataSources[depth] == null) {
|
||||||
perDepthFieldDataSources[depth] = new ObjectObjectOpenHashMap<String, FieldDataSource>();
|
perDepthFieldDataSources[depth] = new ObjectObjectOpenHashMap<ConfigCacheKey, FieldDataSource>();
|
||||||
}
|
}
|
||||||
final ObjectObjectOpenHashMap<String, FieldDataSource> fieldDataSources = perDepthFieldDataSources[depth];
|
final ObjectObjectOpenHashMap<ConfigCacheKey, FieldDataSource> fieldDataSources = perDepthFieldDataSources[depth];
|
||||||
|
|
||||||
if (config.fieldContext == null) {
|
if (config.fieldContext == null) {
|
||||||
if (NumericValuesSource.class.isAssignableFrom(config.valueSourceType)) {
|
if (NumericValuesSource.class.isAssignableFrom(config.valueSourceType)) {
|
||||||
|
@ -139,14 +139,15 @@ public class AggregationContext implements ReaderContextAware, ScorerAware {
|
||||||
return new NumericValuesSource(source, config.formatter(), config.parser());
|
return new NumericValuesSource(source, config.formatter(), config.parser());
|
||||||
}
|
}
|
||||||
|
|
||||||
private NumericValuesSource numericField(ObjectObjectOpenHashMap<String, FieldDataSource> fieldDataSources, ValuesSourceConfig<?> config) {
|
private NumericValuesSource numericField(ObjectObjectOpenHashMap<ConfigCacheKey, FieldDataSource> fieldDataSources, ValuesSourceConfig<?> config) {
|
||||||
FieldDataSource.Numeric dataSource = (FieldDataSource.Numeric) fieldDataSources.get(config.fieldContext.field());
|
final ConfigCacheKey cacheKey = new ConfigCacheKey(config);
|
||||||
|
FieldDataSource.Numeric dataSource = (FieldDataSource.Numeric) fieldDataSources.get(cacheKey);
|
||||||
if (dataSource == null) {
|
if (dataSource == null) {
|
||||||
FieldDataSource.MetaData metaData = FieldDataSource.MetaData.load(config.fieldContext.indexFieldData(), searchContext);
|
FieldDataSource.MetaData metaData = FieldDataSource.MetaData.load(config.fieldContext.indexFieldData(), searchContext);
|
||||||
dataSource = new FieldDataSource.Numeric.FieldData((IndexNumericFieldData<?>) config.fieldContext.indexFieldData(), metaData);
|
dataSource = new FieldDataSource.Numeric.FieldData((IndexNumericFieldData<?>) config.fieldContext.indexFieldData(), metaData);
|
||||||
setReaderIfNeeded((ReaderContextAware) dataSource);
|
setReaderIfNeeded((ReaderContextAware) dataSource);
|
||||||
readerAwares.add((ReaderContextAware) dataSource);
|
readerAwares.add((ReaderContextAware) dataSource);
|
||||||
fieldDataSources.put(config.fieldContext.field(), dataSource);
|
fieldDataSources.put(cacheKey, dataSource);
|
||||||
}
|
}
|
||||||
if (config.script != null) {
|
if (config.script != null) {
|
||||||
setScorerIfNeeded(config.script);
|
setScorerIfNeeded(config.script);
|
||||||
|
@ -166,8 +167,9 @@ public class AggregationContext implements ReaderContextAware, ScorerAware {
|
||||||
return new NumericValuesSource(dataSource, config.formatter(), config.parser());
|
return new NumericValuesSource(dataSource, config.formatter(), config.parser());
|
||||||
}
|
}
|
||||||
|
|
||||||
private ValuesSource bytesField(ObjectObjectOpenHashMap<String, FieldDataSource> fieldDataSources, ValuesSourceConfig<?> config) {
|
private ValuesSource bytesField(ObjectObjectOpenHashMap<ConfigCacheKey, FieldDataSource> fieldDataSources, ValuesSourceConfig<?> config) {
|
||||||
FieldDataSource dataSource = fieldDataSources.get(config.fieldContext.field());
|
final ConfigCacheKey cacheKey = new ConfigCacheKey(config);
|
||||||
|
FieldDataSource dataSource = fieldDataSources.get(cacheKey);
|
||||||
if (dataSource == null) {
|
if (dataSource == null) {
|
||||||
final IndexFieldData<?> indexFieldData = config.fieldContext.indexFieldData();
|
final IndexFieldData<?> indexFieldData = config.fieldContext.indexFieldData();
|
||||||
FieldDataSource.MetaData metaData = FieldDataSource.MetaData.load(config.fieldContext.indexFieldData(), searchContext);
|
FieldDataSource.MetaData metaData = FieldDataSource.MetaData.load(config.fieldContext.indexFieldData(), searchContext);
|
||||||
|
@ -178,7 +180,7 @@ public class AggregationContext implements ReaderContextAware, ScorerAware {
|
||||||
}
|
}
|
||||||
setReaderIfNeeded((ReaderContextAware) dataSource);
|
setReaderIfNeeded((ReaderContextAware) dataSource);
|
||||||
readerAwares.add((ReaderContextAware) dataSource);
|
readerAwares.add((ReaderContextAware) dataSource);
|
||||||
fieldDataSources.put(config.fieldContext.field(), dataSource);
|
fieldDataSources.put(cacheKey, dataSource);
|
||||||
}
|
}
|
||||||
if (config.script != null) {
|
if (config.script != null) {
|
||||||
setScorerIfNeeded(config.script);
|
setScorerIfNeeded(config.script);
|
||||||
|
@ -218,14 +220,15 @@ public class AggregationContext implements ReaderContextAware, ScorerAware {
|
||||||
return new BytesValuesSource(source);
|
return new BytesValuesSource(source);
|
||||||
}
|
}
|
||||||
|
|
||||||
private GeoPointValuesSource geoPointField(ObjectObjectOpenHashMap<String, FieldDataSource> fieldDataSources, ValuesSourceConfig<?> config) {
|
private GeoPointValuesSource geoPointField(ObjectObjectOpenHashMap<ConfigCacheKey, FieldDataSource> fieldDataSources, ValuesSourceConfig<?> config) {
|
||||||
FieldDataSource.GeoPoint dataSource = (FieldDataSource.GeoPoint) fieldDataSources.get(config.fieldContext.field());
|
final ConfigCacheKey cacheKey = new ConfigCacheKey(config);
|
||||||
|
FieldDataSource.GeoPoint dataSource = (FieldDataSource.GeoPoint) fieldDataSources.get(cacheKey);
|
||||||
if (dataSource == null) {
|
if (dataSource == null) {
|
||||||
FieldDataSource.MetaData metaData = FieldDataSource.MetaData.load(config.fieldContext.indexFieldData(), searchContext);
|
FieldDataSource.MetaData metaData = FieldDataSource.MetaData.load(config.fieldContext.indexFieldData(), searchContext);
|
||||||
dataSource = new FieldDataSource.GeoPoint((IndexGeoPointFieldData<?>) config.fieldContext.indexFieldData(), metaData);
|
dataSource = new FieldDataSource.GeoPoint((IndexGeoPointFieldData<?>) config.fieldContext.indexFieldData(), metaData);
|
||||||
setReaderIfNeeded(dataSource);
|
setReaderIfNeeded(dataSource);
|
||||||
readerAwares.add(dataSource);
|
readerAwares.add(dataSource);
|
||||||
fieldDataSources.put(config.fieldContext.field(), dataSource);
|
fieldDataSources.put(cacheKey, dataSource);
|
||||||
}
|
}
|
||||||
if (config.needsHashes) {
|
if (config.needsHashes) {
|
||||||
dataSource.setNeedsHashes(true);
|
dataSource.setNeedsHashes(true);
|
||||||
|
@ -254,4 +257,35 @@ public class AggregationContext implements ReaderContextAware, ScorerAware {
|
||||||
scorerAware.setScorer(scorer);
|
scorerAware.setScorer(scorer);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static class ConfigCacheKey {
|
||||||
|
|
||||||
|
private final String field;
|
||||||
|
private final Class<? extends ValuesSource> valueSourceType;
|
||||||
|
|
||||||
|
private ConfigCacheKey(ValuesSourceConfig config) {
|
||||||
|
this.field = config.fieldContext.field();
|
||||||
|
this.valueSourceType = config.valueSourceType;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object o) {
|
||||||
|
if (this == o) return true;
|
||||||
|
if (o == null || getClass() != o.getClass()) return false;
|
||||||
|
|
||||||
|
ConfigCacheKey that = (ConfigCacheKey) o;
|
||||||
|
|
||||||
|
if (!field.equals(that.field)) return false;
|
||||||
|
if (!valueSourceType.equals(that.valueSourceType)) return false;
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
int result = field.hashCode();
|
||||||
|
result = 31 * result + valueSourceType.hashCode();
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,113 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.search.aggregations;
|
||||||
|
|
||||||
|
import com.carrotsearch.hppc.IntIntMap;
|
||||||
|
import com.carrotsearch.hppc.IntIntOpenHashMap;
|
||||||
|
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||||
|
import org.elasticsearch.action.search.SearchResponse;
|
||||||
|
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||||
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.search.aggregations.bucket.missing.Missing;
|
||||||
|
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
|
||||||
|
import org.elasticsearch.test.ElasticsearchIntegrationTest;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.util.Collection;
|
||||||
|
|
||||||
|
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||||
|
import static org.elasticsearch.search.aggregations.AggregationBuilders.missing;
|
||||||
|
import static org.elasticsearch.search.aggregations.AggregationBuilders.terms;
|
||||||
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
|
||||||
|
import static org.hamcrest.CoreMatchers.equalTo;
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
public class CombiTests extends ElasticsearchIntegrationTest {
|
||||||
|
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Settings indexSettings() {
|
||||||
|
return ImmutableSettings.builder()
|
||||||
|
.put("index.number_of_shards", between(1, 5))
|
||||||
|
.put("index.number_of_replicas", between(0, 1))
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Making sure that if there are multiple aggregations, working on the same field, yet require different
|
||||||
|
* value source type, they can all still work. It used to fail as we used to cache the ValueSource by the
|
||||||
|
* field name. If the cached value source was of type "bytes" and another aggregation on the field required to see
|
||||||
|
* it as "numeric", it didn't work. Now we cache the Value Sources by a custom key (field name + ValueSource type)
|
||||||
|
* so there's no conflict there.
|
||||||
|
*/
|
||||||
|
@Test
|
||||||
|
public void multipleAggs_OnSameField_WithDifferentRequiredValueSourceType() throws Exception {
|
||||||
|
|
||||||
|
createIndex("idx");
|
||||||
|
IndexRequestBuilder[] builders = new IndexRequestBuilder[randomInt(30)];
|
||||||
|
IntIntMap values = new IntIntOpenHashMap();
|
||||||
|
long missingValues = 0;
|
||||||
|
for (int i = 0; i < builders.length; i++) {
|
||||||
|
String name = "name_" + randomIntBetween(1, 10);
|
||||||
|
if (rarely()) {
|
||||||
|
missingValues++;
|
||||||
|
builders[i] = client().prepareIndex("idx", "type").setSource(jsonBuilder()
|
||||||
|
.startObject()
|
||||||
|
.field("name", name)
|
||||||
|
.endObject());
|
||||||
|
} else {
|
||||||
|
int value = randomIntBetween(1, 10);
|
||||||
|
values.put(value, values.getOrDefault(value, 0) + 1);
|
||||||
|
builders[i] = client().prepareIndex("idx", "type").setSource(jsonBuilder()
|
||||||
|
.startObject()
|
||||||
|
.field("name", name)
|
||||||
|
.field("value", value)
|
||||||
|
.endObject());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
indexRandom(true, builders);
|
||||||
|
ensureSearchable();
|
||||||
|
|
||||||
|
|
||||||
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
|
.addAggregation(missing("missing_values").field("value"))
|
||||||
|
.addAggregation(terms("values").field("value"))
|
||||||
|
.execute().actionGet();
|
||||||
|
|
||||||
|
assertSearchResponse(response);
|
||||||
|
|
||||||
|
Aggregations aggs = response.getAggregations();
|
||||||
|
|
||||||
|
Missing missing = aggs.get("missing_values");
|
||||||
|
assertNotNull(missing);
|
||||||
|
assertThat(missing.getDocCount(), equalTo(missingValues));
|
||||||
|
|
||||||
|
Terms terms = aggs.get("values");
|
||||||
|
assertNotNull(terms);
|
||||||
|
Collection<Terms.Bucket> buckets = terms.getBuckets();
|
||||||
|
assertThat(buckets.size(), equalTo(values.size()));
|
||||||
|
for (Terms.Bucket bucket : buckets) {
|
||||||
|
values.remove(bucket.getKeyAsNumber().intValue());
|
||||||
|
}
|
||||||
|
assertTrue(values.isEmpty());
|
||||||
|
}
|
||||||
|
}
|
|
@ -42,7 +42,10 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFa
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
import static org.hamcrest.core.IsNull.notNullValue;
|
import static org.hamcrest.core.IsNull.notNullValue;
|
||||||
|
|
||||||
/** Additional tests that aim at testing more complex aggregation trees on larger random datasets, so that things like the growth of dynamic arrays is tested. */
|
/**
|
||||||
|
* Additional tests that aim at testing more complex aggregation trees on larger random datasets, so that things like
|
||||||
|
* the growth of dynamic arrays is tested.
|
||||||
|
*/
|
||||||
public class RandomTests extends ElasticsearchIntegrationTest {
|
public class RandomTests extends ElasticsearchIntegrationTest {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -115,7 +115,7 @@ public class StringTermsTests extends ElasticsearchIntegrationTest {
|
||||||
.size(0))
|
.size(0))
|
||||||
.execute().actionGet();
|
.execute().actionGet();
|
||||||
|
|
||||||
assertSearchResponse(response);System.out.println(response);
|
assertSearchResponse(response);
|
||||||
|
|
||||||
Terms terms = response.getAggregations().get("terms");
|
Terms terms = response.getAggregations().get("terms");
|
||||||
assertThat(terms, notNullValue());
|
assertThat(terms, notNullValue());
|
||||||
|
|
Loading…
Reference in New Issue