mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-20 03:45:02 +00:00
run bwc test also as integ test and share methods
We had no itegration test before with long terms and several shards only a bwc test. related to #14948
This commit is contained in:
parent
609d9db470
commit
16b84b3999
@ -18,91 +18,18 @@
|
|||||||
*/
|
*/
|
||||||
package org.elasticsearch.search.aggregations.bucket;
|
package org.elasticsearch.search.aggregations.bucket;
|
||||||
|
|
||||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
|
||||||
import org.elasticsearch.action.search.SearchResponse;
|
|
||||||
import org.elasticsearch.search.aggregations.Aggregation;
|
|
||||||
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms;
|
|
||||||
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsBuilder;
|
|
||||||
import org.elasticsearch.search.aggregations.bucket.terms.StringTerms;
|
|
||||||
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
|
|
||||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsBuilder;
|
|
||||||
import org.elasticsearch.test.ESBackcompatTestCase;
|
import org.elasticsearch.test.ESBackcompatTestCase;
|
||||||
|
import org.elasticsearch.test.search.aggregations.bucket.SharedSignificantTermsTestMethods;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.concurrent.ExecutionException;
|
import java.util.concurrent.ExecutionException;
|
||||||
|
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
|
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
|
||||||
|
|
||||||
/**
|
|
||||||
*/
|
|
||||||
public class SignificantTermsBackwardCompatibilityIT extends ESBackcompatTestCase {
|
public class SignificantTermsBackwardCompatibilityIT extends ESBackcompatTestCase {
|
||||||
|
|
||||||
static final String INDEX_NAME = "testidx";
|
|
||||||
static final String DOC_TYPE = "doc";
|
|
||||||
static final String TEXT_FIELD = "text";
|
|
||||||
static final String CLASS_FIELD = "class";
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test for streaming significant terms buckets to old es versions.
|
* Test for streaming significant terms buckets to old es versions.
|
||||||
*/
|
*/
|
||||||
public void testBucketStreaming() throws IOException, ExecutionException, InterruptedException {
|
public void testAggregateAndCheckFromSeveralShards() throws IOException, ExecutionException, InterruptedException {
|
||||||
logger.debug("testBucketStreaming: indexing documents");
|
SharedSignificantTermsTestMethods.aggregateAndCheckFromSeveralShards(this);
|
||||||
String type = randomBoolean() ? "string" : "long";
|
|
||||||
String settings = "{\"index.number_of_shards\": 5, \"index.number_of_replicas\": 0}";
|
|
||||||
index01Docs(type, settings);
|
|
||||||
ensureGreen();
|
|
||||||
logClusterState();
|
|
||||||
checkSignificantTermsAggregationCorrect();
|
|
||||||
logger.debug("testBucketStreaming: done testing significant terms while upgrading");
|
|
||||||
}
|
|
||||||
|
|
||||||
private void index01Docs(String type, String settings) throws ExecutionException, InterruptedException {
|
|
||||||
String mappings = "{\"doc\": {\"properties\":{\"" + TEXT_FIELD + "\": {\"type\":\"" + type + "\"},\"" + CLASS_FIELD
|
|
||||||
+ "\": {\"type\":\"string\"}}}}";
|
|
||||||
assertAcked(prepareCreate(INDEX_NAME).setSettings(settings).addMapping("doc", mappings));
|
|
||||||
String[] gb = {"0", "1"};
|
|
||||||
List<IndexRequestBuilder> indexRequestBuilderList = new ArrayList<>();
|
|
||||||
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "1")
|
|
||||||
.setSource(TEXT_FIELD, "1", CLASS_FIELD, "1"));
|
|
||||||
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "2")
|
|
||||||
.setSource(TEXT_FIELD, "1", CLASS_FIELD, "1"));
|
|
||||||
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "3")
|
|
||||||
.setSource(TEXT_FIELD, "0", CLASS_FIELD, "0"));
|
|
||||||
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "4")
|
|
||||||
.setSource(TEXT_FIELD, "0", CLASS_FIELD, "0"));
|
|
||||||
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "5")
|
|
||||||
.setSource(TEXT_FIELD, gb, CLASS_FIELD, "1"));
|
|
||||||
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "6")
|
|
||||||
.setSource(TEXT_FIELD, gb, CLASS_FIELD, "0"));
|
|
||||||
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "7")
|
|
||||||
.setSource(TEXT_FIELD, "0", CLASS_FIELD, "0"));
|
|
||||||
indexRandom(true, indexRequestBuilderList);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void checkSignificantTermsAggregationCorrect() {
|
|
||||||
|
|
||||||
SearchResponse response = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE)
|
|
||||||
.addAggregation(new TermsBuilder("class").field(CLASS_FIELD).subAggregation(
|
|
||||||
new SignificantTermsBuilder("sig_terms")
|
|
||||||
.field(TEXT_FIELD)))
|
|
||||||
.execute()
|
|
||||||
.actionGet();
|
|
||||||
assertSearchResponse(response);
|
|
||||||
StringTerms classes = response.getAggregations().get("class");
|
|
||||||
assertThat(classes.getBuckets().size(), equalTo(2));
|
|
||||||
for (Terms.Bucket classBucket : classes.getBuckets()) {
|
|
||||||
Map<String, Aggregation> aggs = classBucket.getAggregations().asMap();
|
|
||||||
assertTrue(aggs.containsKey("sig_terms"));
|
|
||||||
SignificantTerms agg = (SignificantTerms) aggs.get("sig_terms");
|
|
||||||
assertThat(agg.getBuckets().size(), equalTo(1));
|
|
||||||
String term = agg.iterator().next().getKeyAsString();
|
|
||||||
String classTerm = classBucket.getKeyAsString();
|
|
||||||
assertTrue(term.equals(classTerm));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -23,7 +23,6 @@ import org.elasticsearch.action.search.SearchResponse;
|
|||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
import org.elasticsearch.common.ParseFieldMatcher;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
@ -39,54 +38,39 @@ import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuil
|
|||||||
import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter;
|
import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter;
|
||||||
import org.elasticsearch.search.aggregations.bucket.script.NativeSignificanceScoreScriptNoParams;
|
import org.elasticsearch.search.aggregations.bucket.script.NativeSignificanceScoreScriptNoParams;
|
||||||
import org.elasticsearch.search.aggregations.bucket.script.NativeSignificanceScoreScriptWithParams;
|
import org.elasticsearch.search.aggregations.bucket.script.NativeSignificanceScoreScriptWithParams;
|
||||||
import org.elasticsearch.search.aggregations.bucket.significant.SignificantStringTerms;
|
|
||||||
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms;
|
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms;
|
||||||
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregatorFactory;
|
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsBuilder;
|
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsBuilder;
|
||||||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ChiSquare;
|
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.*;
|
||||||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.GND;
|
|
||||||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.MutualInformation;
|
|
||||||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ScriptHeuristic;
|
|
||||||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
|
|
||||||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicBuilder;
|
|
||||||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser;
|
|
||||||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicStreams;
|
|
||||||
import org.elasticsearch.search.aggregations.bucket.terms.StringTerms;
|
import org.elasticsearch.search.aggregations.bucket.terms.StringTerms;
|
||||||
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
|
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
|
||||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsBuilder;
|
import org.elasticsearch.search.aggregations.bucket.terms.TermsBuilder;
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.test.ESIntegTestCase;
|
import org.elasticsearch.test.ESIntegTestCase;
|
||||||
|
import org.elasticsearch.test.search.aggregations.bucket.SharedSignificantTermsTestMethods;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.*;
|
||||||
import java.util.Collection;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.Iterator;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.concurrent.ExecutionException;
|
import java.util.concurrent.ExecutionException;
|
||||||
|
|
||||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
|
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
|
||||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
|
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
|
||||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
|
||||||
import static org.hamcrest.Matchers.closeTo;
|
import static org.hamcrest.Matchers.*;
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
|
||||||
import static org.hamcrest.Matchers.greaterThan;
|
|
||||||
import static org.hamcrest.Matchers.is;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE)
|
@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE)
|
||||||
public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
|
public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
|
||||||
|
|
||||||
static final String INDEX_NAME = "testidx";
|
static final String INDEX_NAME = "testidx";
|
||||||
static final String DOC_TYPE = "doc";
|
static final String DOC_TYPE = "doc";
|
||||||
static final String TEXT_FIELD = "text";
|
static final String TEXT_FIELD = "text";
|
||||||
static final String CLASS_FIELD = "class";
|
static final String CLASS_FIELD = "class";
|
||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||||
return pluginList(CustomSignificanceHeuristicPlugin.class);
|
return pluginList(CustomSignificanceHeuristicPlugin.class);
|
||||||
@ -99,7 +83,7 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
|
|||||||
public void testPlugin() throws Exception {
|
public void testPlugin() throws Exception {
|
||||||
String type = randomBoolean() ? "string" : "long";
|
String type = randomBoolean() ? "string" : "long";
|
||||||
String settings = "{\"index.number_of_shards\": 1, \"index.number_of_replicas\": 0}";
|
String settings = "{\"index.number_of_shards\": 1, \"index.number_of_replicas\": 0}";
|
||||||
index01Docs(type, settings);
|
SharedSignificantTermsTestMethods.index01Docs(type, settings, this);
|
||||||
SearchResponse response = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE)
|
SearchResponse response = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE)
|
||||||
.addAggregation(new TermsBuilder("class")
|
.addAggregation(new TermsBuilder("class")
|
||||||
.field(CLASS_FIELD)
|
.field(CLASS_FIELD)
|
||||||
@ -252,7 +236,7 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
|
|||||||
public void testXContentResponse() throws Exception {
|
public void testXContentResponse() throws Exception {
|
||||||
String type = randomBoolean() ? "string" : "long";
|
String type = randomBoolean() ? "string" : "long";
|
||||||
String settings = "{\"index.number_of_shards\": 1, \"index.number_of_replicas\": 0}";
|
String settings = "{\"index.number_of_shards\": 1, \"index.number_of_replicas\": 0}";
|
||||||
index01Docs(type, settings);
|
SharedSignificantTermsTestMethods.index01Docs(type, settings, this);
|
||||||
SearchResponse response = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE)
|
SearchResponse response = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE)
|
||||||
.addAggregation(new TermsBuilder("class").field(CLASS_FIELD).subAggregation(new SignificantTermsBuilder("sig_terms").field(TEXT_FIELD)))
|
.addAggregation(new TermsBuilder("class").field(CLASS_FIELD).subAggregation(new SignificantTermsBuilder("sig_terms").field(TEXT_FIELD)))
|
||||||
.execute()
|
.execute()
|
||||||
@ -327,7 +311,7 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
|
|||||||
public void testBackgroundVsSeparateSet() throws Exception {
|
public void testBackgroundVsSeparateSet() throws Exception {
|
||||||
String type = randomBoolean() ? "string" : "long";
|
String type = randomBoolean() ? "string" : "long";
|
||||||
String settings = "{\"index.number_of_shards\": 1, \"index.number_of_replicas\": 0}";
|
String settings = "{\"index.number_of_shards\": 1, \"index.number_of_replicas\": 0}";
|
||||||
index01Docs(type, settings);
|
SharedSignificantTermsTestMethods.index01Docs(type, settings, this);
|
||||||
testBackgroundVsSeparateSet(new MutualInformation.MutualInformationBuilder(true, true), new MutualInformation.MutualInformationBuilder(true, false));
|
testBackgroundVsSeparateSet(new MutualInformation.MutualInformationBuilder(true, true), new MutualInformation.MutualInformationBuilder(true, false));
|
||||||
testBackgroundVsSeparateSet(new ChiSquare.ChiSquareBuilder(true, true), new ChiSquare.ChiSquareBuilder(true, false));
|
testBackgroundVsSeparateSet(new ChiSquare.ChiSquareBuilder(true, true), new ChiSquare.ChiSquareBuilder(true, false));
|
||||||
testBackgroundVsSeparateSet(new GND.GNDBuilder(true), new GND.GNDBuilder(false));
|
testBackgroundVsSeparateSet(new GND.GNDBuilder(true), new GND.GNDBuilder(false));
|
||||||
@ -388,28 +372,6 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
|
|||||||
assertThat(score11Background, equalTo(score11SeparateSets));
|
assertThat(score11Background, equalTo(score11SeparateSets));
|
||||||
}
|
}
|
||||||
|
|
||||||
private void index01Docs(String type, String settings) throws ExecutionException, InterruptedException {
|
|
||||||
String mappings = "{\"doc\": {\"properties\":{\"text\": {\"type\":\"" + type + "\"}}}}";
|
|
||||||
assertAcked(prepareCreate(INDEX_NAME).setSettings(settings).addMapping("doc", mappings));
|
|
||||||
String[] gb = {"0", "1"};
|
|
||||||
List<IndexRequestBuilder> indexRequestBuilderList = new ArrayList<>();
|
|
||||||
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "1")
|
|
||||||
.setSource(TEXT_FIELD, "1", CLASS_FIELD, "1"));
|
|
||||||
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "2")
|
|
||||||
.setSource(TEXT_FIELD, "1", CLASS_FIELD, "1"));
|
|
||||||
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "3")
|
|
||||||
.setSource(TEXT_FIELD, "0", CLASS_FIELD, "0"));
|
|
||||||
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "4")
|
|
||||||
.setSource(TEXT_FIELD, "0", CLASS_FIELD, "0"));
|
|
||||||
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "5")
|
|
||||||
.setSource(TEXT_FIELD, gb, CLASS_FIELD, "1"));
|
|
||||||
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "6")
|
|
||||||
.setSource(TEXT_FIELD, gb, CLASS_FIELD, "0"));
|
|
||||||
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "7")
|
|
||||||
.setSource(TEXT_FIELD, "0", CLASS_FIELD, "0"));
|
|
||||||
indexRandom(true, false, indexRequestBuilderList);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testScoresEqualForPositiveAndNegative() throws Exception {
|
public void testScoresEqualForPositiveAndNegative() throws Exception {
|
||||||
indexEqualTestData();
|
indexEqualTestData();
|
||||||
testScoresEqualForPositiveAndNegative(new MutualInformation.MutualInformationBuilder(true, true));
|
testScoresEqualForPositiveAndNegative(new MutualInformation.MutualInformationBuilder(true, true));
|
||||||
@ -528,4 +490,9 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
|
|||||||
}
|
}
|
||||||
indexRandom(true, indexRequestBuilderList);
|
indexRandom(true, indexRequestBuilderList);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testReduceFromSeveralShards() throws IOException, ExecutionException, InterruptedException {
|
||||||
|
SharedSignificantTermsTestMethods.aggregateAndCheckFromSeveralShards(this);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,103 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.test.search.aggregations.bucket;
|
||||||
|
|
||||||
|
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||||
|
import org.elasticsearch.action.search.SearchResponse;
|
||||||
|
import org.elasticsearch.search.aggregations.Aggregation;
|
||||||
|
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms;
|
||||||
|
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsBuilder;
|
||||||
|
import org.elasticsearch.search.aggregations.bucket.terms.StringTerms;
|
||||||
|
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
|
||||||
|
import org.elasticsearch.search.aggregations.bucket.terms.TermsBuilder;
|
||||||
|
import org.elasticsearch.test.ESIntegTestCase;
|
||||||
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
import org.junit.Assert;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.concurrent.ExecutionException;
|
||||||
|
|
||||||
|
import static org.elasticsearch.test.ESIntegTestCase.client;
|
||||||
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||||
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
|
||||||
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
|
||||||
|
public class SharedSignificantTermsTestMethods {
|
||||||
|
public static final String INDEX_NAME = "testidx";
|
||||||
|
public static final String DOC_TYPE = "doc";
|
||||||
|
public static final String TEXT_FIELD = "text";
|
||||||
|
public static final String CLASS_FIELD = "class";
|
||||||
|
|
||||||
|
public static void aggregateAndCheckFromSeveralShards(ESIntegTestCase testCase) throws ExecutionException, InterruptedException {
|
||||||
|
String type = ESTestCase.randomBoolean() ? "string" : "long";
|
||||||
|
String settings = "{\"index.number_of_shards\": 5, \"index.number_of_replicas\": 0}";
|
||||||
|
index01Docs(type, settings, testCase);
|
||||||
|
testCase.ensureGreen();
|
||||||
|
testCase.logClusterState();
|
||||||
|
checkSignificantTermsAggregationCorrect(testCase);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void checkSignificantTermsAggregationCorrect(ESIntegTestCase testCase) {
|
||||||
|
|
||||||
|
SearchResponse response = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE)
|
||||||
|
.addAggregation(new TermsBuilder("class").field(CLASS_FIELD).subAggregation(
|
||||||
|
new SignificantTermsBuilder("sig_terms")
|
||||||
|
.field(TEXT_FIELD)))
|
||||||
|
.execute()
|
||||||
|
.actionGet();
|
||||||
|
assertSearchResponse(response);
|
||||||
|
StringTerms classes = response.getAggregations().get("class");
|
||||||
|
Assert.assertThat(classes.getBuckets().size(), equalTo(2));
|
||||||
|
for (Terms.Bucket classBucket : classes.getBuckets()) {
|
||||||
|
Map<String, Aggregation> aggs = classBucket.getAggregations().asMap();
|
||||||
|
Assert.assertTrue(aggs.containsKey("sig_terms"));
|
||||||
|
SignificantTerms agg = (SignificantTerms) aggs.get("sig_terms");
|
||||||
|
Assert.assertThat(agg.getBuckets().size(), equalTo(1));
|
||||||
|
SignificantTerms.Bucket sigBucket = agg.iterator().next();
|
||||||
|
String term = sigBucket.getKeyAsString();
|
||||||
|
String classTerm = classBucket.getKeyAsString();
|
||||||
|
Assert.assertTrue(term.equals(classTerm));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void index01Docs(String type, String settings, ESIntegTestCase testCase) throws ExecutionException, InterruptedException {
|
||||||
|
String mappings = "{\"doc\": {\"properties\":{\"text\": {\"type\":\"" + type + "\"}}}}";
|
||||||
|
assertAcked(testCase.prepareCreate(INDEX_NAME).setSettings(settings).addMapping("doc", mappings));
|
||||||
|
String[] gb = {"0", "1"};
|
||||||
|
List<IndexRequestBuilder> indexRequestBuilderList = new ArrayList<>();
|
||||||
|
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "1")
|
||||||
|
.setSource(TEXT_FIELD, "1", CLASS_FIELD, "1"));
|
||||||
|
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "2")
|
||||||
|
.setSource(TEXT_FIELD, "1", CLASS_FIELD, "1"));
|
||||||
|
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "3")
|
||||||
|
.setSource(TEXT_FIELD, "0", CLASS_FIELD, "0"));
|
||||||
|
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "4")
|
||||||
|
.setSource(TEXT_FIELD, "0", CLASS_FIELD, "0"));
|
||||||
|
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "5")
|
||||||
|
.setSource(TEXT_FIELD, gb, CLASS_FIELD, "1"));
|
||||||
|
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "6")
|
||||||
|
.setSource(TEXT_FIELD, gb, CLASS_FIELD, "0"));
|
||||||
|
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "7")
|
||||||
|
.setSource(TEXT_FIELD, "0", CLASS_FIELD, "0"));
|
||||||
|
testCase.indexRandom(true, false, indexRequestBuilderList);
|
||||||
|
}
|
||||||
|
}
|
Loading…
x
Reference in New Issue
Block a user