Adds mutate function to various tests (#25999)
* Adds mutate function to various tests Relates to #25929 * fix test * implements mutate function for all single bucket aggs * review comments * convert getMutateFunction to mutateIInstance
This commit is contained in:
parent
53dd8afaea
commit
87c6e63e73
|
@ -18,6 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.search.aggregations;
|
||||
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteable;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
@ -234,4 +235,9 @@ public abstract class InternalAggregation implements Aggregation, ToXContent, Na
|
|||
return this == obj;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return Strings.toString(this);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -22,6 +22,8 @@ package org.elasticsearch.action.fieldcaps;
|
|||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.test.AbstractWireSerializingTestCase;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class FieldCapabilitiesTests extends AbstractWireSerializingTestCase<FieldCapabilities> {
|
||||
|
@ -106,4 +108,73 @@ public class FieldCapabilitiesTests extends AbstractWireSerializingTestCase<Fiel
|
|||
randomAlphaOfLengthBetween(5, 20), randomBoolean(), randomBoolean(),
|
||||
indices, nonSearchableIndices, nonAggregatableIndices);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected FieldCapabilities mutateInstance(FieldCapabilities instance) {
|
||||
String name = instance.getName();
|
||||
String type = instance.getType();
|
||||
boolean isSearchable = instance.isSearchable();
|
||||
boolean isAggregatable = instance.isAggregatable();
|
||||
String[] indices = instance.indices();
|
||||
String[] nonSearchableIndices = instance.nonSearchableIndices();
|
||||
String[] nonAggregatableIndices = instance.nonAggregatableIndices();
|
||||
switch (between(0, 6)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLengthBetween(1, 10);
|
||||
break;
|
||||
case 1:
|
||||
type += randomAlphaOfLengthBetween(1, 10);
|
||||
break;
|
||||
case 2:
|
||||
isSearchable = isSearchable == false;
|
||||
break;
|
||||
case 3:
|
||||
isAggregatable = isAggregatable == false;
|
||||
break;
|
||||
case 4:
|
||||
String[] newIndices;
|
||||
int startIndicesPos = 0;
|
||||
if (indices == null) {
|
||||
newIndices = new String[between(1, 10)];
|
||||
} else {
|
||||
newIndices = Arrays.copyOf(indices, indices.length + between(1, 10));
|
||||
startIndicesPos = indices.length;
|
||||
}
|
||||
for (int i = startIndicesPos; i < newIndices.length; i++) {
|
||||
newIndices[i] = randomAlphaOfLengthBetween(5, 20);
|
||||
}
|
||||
indices = newIndices;
|
||||
break;
|
||||
case 5:
|
||||
String[] newNonSearchableIndices;
|
||||
int startNonSearchablePos = 0;
|
||||
if (nonSearchableIndices == null) {
|
||||
newNonSearchableIndices = new String[between(1, 10)];
|
||||
} else {
|
||||
newNonSearchableIndices = Arrays.copyOf(nonSearchableIndices, nonSearchableIndices.length + between(1, 10));
|
||||
startNonSearchablePos = nonSearchableIndices.length;
|
||||
}
|
||||
for (int i = startNonSearchablePos; i < newNonSearchableIndices.length; i++) {
|
||||
newNonSearchableIndices[i] = randomAlphaOfLengthBetween(5, 20);
|
||||
}
|
||||
nonSearchableIndices = newNonSearchableIndices;
|
||||
break;
|
||||
case 6:
|
||||
default:
|
||||
String[] newNonAggregatableIndices;
|
||||
int startNonAggregatablePos = 0;
|
||||
if (nonAggregatableIndices == null) {
|
||||
newNonAggregatableIndices = new String[between(1, 10)];
|
||||
} else {
|
||||
newNonAggregatableIndices = Arrays.copyOf(nonAggregatableIndices, nonAggregatableIndices.length + between(1, 10));
|
||||
startNonAggregatablePos = nonAggregatableIndices.length;
|
||||
}
|
||||
for (int i = startNonAggregatablePos; i < newNonAggregatableIndices.length; i++) {
|
||||
newNonAggregatableIndices[i] = randomAlphaOfLengthBetween(5, 20);
|
||||
}
|
||||
nonAggregatableIndices = newNonAggregatableIndices;
|
||||
break;
|
||||
}
|
||||
return new FieldCapabilities(name, type, isSearchable, isAggregatable, indices, nonSearchableIndices, nonAggregatableIndices);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,10 +19,8 @@
|
|||
|
||||
package org.elasticsearch.index.refresh;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.test.AbstractStreamableTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import org.elasticsearch.test.EqualsHashCodeTestUtils.MutateFunction;
|
||||
|
||||
public class RefreshStatsTests extends AbstractStreamableTestCase<RefreshStats> {
|
||||
@Override
|
||||
|
@ -34,4 +32,26 @@ public class RefreshStatsTests extends AbstractStreamableTestCase<RefreshStats>
|
|||
protected RefreshStats createBlankInstance() {
|
||||
return new RefreshStats();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected MutateFunction<RefreshStats> getMutateFunction() {
|
||||
return instance -> {
|
||||
long total = instance.getTotal();
|
||||
long totalInMillis = instance.getTotalTimeInMillis();
|
||||
int listeners = instance.getListeners();
|
||||
switch (randomInt(2)) {
|
||||
case 0:
|
||||
total += between(1, 2000);
|
||||
break;
|
||||
case 1:
|
||||
totalInMillis += between(1, 2000);
|
||||
break;
|
||||
case 2:
|
||||
default:
|
||||
listeners += between(1, 2000);
|
||||
break;
|
||||
}
|
||||
return new RefreshStats(total, totalInMillis, listeners);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -120,9 +120,9 @@ public class ScriptMetaDataTests extends AbstractSerializingTestCase<ScriptMetaD
|
|||
assertEquals("1 + 1", result.getStoredScript("_id").getSource());
|
||||
}
|
||||
|
||||
private ScriptMetaData randomScriptMetaData(XContentType sourceContentType) throws IOException {
|
||||
private ScriptMetaData randomScriptMetaData(XContentType sourceContentType, int minNumberScripts) throws IOException {
|
||||
ScriptMetaData.Builder builder = new ScriptMetaData.Builder(null);
|
||||
int numScripts = scaledRandomIntBetween(0, 32);
|
||||
int numScripts = scaledRandomIntBetween(minNumberScripts, 32);
|
||||
for (int i = 0; i < numScripts; i++) {
|
||||
XContentBuilder sourceBuilder = XContentBuilder.builder(sourceContentType.xContent());
|
||||
sourceBuilder.startObject().field("script").startObject()
|
||||
|
@ -137,7 +137,7 @@ public class ScriptMetaDataTests extends AbstractSerializingTestCase<ScriptMetaD
|
|||
@Override
|
||||
protected ScriptMetaData createTestInstance() {
|
||||
try {
|
||||
return randomScriptMetaData(randomFrom(XContentType.values()));
|
||||
return randomScriptMetaData(randomFrom(XContentType.values()), 0);
|
||||
} catch (IOException ioe) {
|
||||
throw new UncheckedIOException(ioe);
|
||||
}
|
||||
|
@ -148,6 +148,15 @@ public class ScriptMetaDataTests extends AbstractSerializingTestCase<ScriptMetaD
|
|||
return ScriptMetaData::new;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ScriptMetaData mutateInstance(ScriptMetaData instance) throws IOException {
|
||||
// ScriptMetaData doesn't allow us to see the scripts inside it so
|
||||
// the best we can do here is create a new random instance and rely
|
||||
// on the fact that the new instance is very unlikely to be equal to
|
||||
// the old one
|
||||
return randomScriptMetaData(randomFrom(XContentType.values()), 1);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ScriptMetaData doParseInstance(XContentParser parser) {
|
||||
try {
|
||||
|
|
|
@ -64,5 +64,37 @@ public class StoredScriptSourceTests extends AbstractSerializingTestCase<StoredS
|
|||
return StoredScriptSource::new;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected StoredScriptSource mutateInstance(StoredScriptSource instance) throws IOException {
|
||||
String source = instance.getSource();
|
||||
String lang = instance.getLang();
|
||||
Map<String, String> options = instance.getOptions();
|
||||
|
||||
XContentType newXContentType = randomFrom(XContentType.JSON, XContentType.YAML);
|
||||
XContentBuilder newTemplate = XContentBuilder.builder(newXContentType.xContent());
|
||||
newTemplate.startObject();
|
||||
newTemplate.startObject("query");
|
||||
newTemplate.startObject("match");
|
||||
newTemplate.field("body", "{{query_string}}");
|
||||
newTemplate.endObject();
|
||||
newTemplate.endObject();
|
||||
newTemplate.endObject();
|
||||
|
||||
switch (between(0, 3)) {
|
||||
case 0:
|
||||
source = newTemplate.string();
|
||||
break;
|
||||
case 1:
|
||||
lang = randomAlphaOfLengthBetween(1, 20);
|
||||
break;
|
||||
case 2:
|
||||
options = new HashMap<>(options);
|
||||
options.put(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20));
|
||||
break;
|
||||
case 3:
|
||||
default:
|
||||
return new StoredScriptSource(newTemplate.string());
|
||||
}
|
||||
return new StoredScriptSource(lang, source, options);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,9 +23,11 @@ import org.elasticsearch.common.io.stream.Writeable.Reader;
|
|||
import org.elasticsearch.search.aggregations.InternalAggregations;
|
||||
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregationTestCase;
|
||||
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
|
||||
import org.elasticsearch.search.aggregations.bucket.filter.InternalFilters.InternalBucket;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.TreeMap;
|
||||
|
@ -90,4 +92,30 @@ public class InternalFiltersTests extends InternalMultiBucketAggregationTestCase
|
|||
return ParsedFilters.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InternalFilters mutateInstance(InternalFilters instance) {
|
||||
String name = instance.getName();
|
||||
List<InternalBucket> buckets = instance.getBuckets();
|
||||
List<PipelineAggregator> pipelineAggregators = instance.pipelineAggregators();
|
||||
Map<String, Object> metaData = instance.getMetaData();
|
||||
switch (between(0, 2)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
break;
|
||||
case 1:
|
||||
buckets = new ArrayList<>(buckets);
|
||||
buckets.add(new InternalFilters.InternalBucket("test", randomIntBetween(0, 1000), InternalAggregations.EMPTY, keyed));
|
||||
break;
|
||||
case 2:
|
||||
default:
|
||||
if (metaData == null) {
|
||||
metaData = new HashMap<>(1);
|
||||
} else {
|
||||
metaData = new HashMap<>(instance.getMetaData());
|
||||
}
|
||||
metaData.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
}
|
||||
return new InternalFilters(name, buckets, keyed, pipelineAggregators, metaData);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -102,6 +102,32 @@ public class CollapseBuilderTests extends AbstractSerializingTestCase<CollapseBu
|
|||
return CollapseBuilder::new;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected CollapseBuilder mutateInstance(CollapseBuilder instance) throws IOException {
|
||||
CollapseBuilder newBuilder;
|
||||
switch (between(0, 2)) {
|
||||
case 0:
|
||||
newBuilder = new CollapseBuilder(instance.getField() + randomAlphaOfLength(10));
|
||||
newBuilder.setMaxConcurrentGroupRequests(instance.getMaxConcurrentGroupRequests());
|
||||
newBuilder.setInnerHits(instance.getInnerHits());
|
||||
break;
|
||||
case 1:
|
||||
newBuilder = copyInstance(instance);
|
||||
newBuilder.setMaxConcurrentGroupRequests(instance.getMaxConcurrentGroupRequests() + between(1, 20));
|
||||
break;
|
||||
case 2:
|
||||
default:
|
||||
newBuilder = copyInstance(instance);
|
||||
List<InnerHitBuilder> innerHits = newBuilder.getInnerHits();
|
||||
for (int i = 0; i < between(1, 5); i++) {
|
||||
innerHits.add(InnerHitBuilderTests.randomInnerHits());
|
||||
}
|
||||
newBuilder.setInnerHits(innerHits);
|
||||
break;
|
||||
}
|
||||
return newBuilder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected NamedWriteableRegistry getNamedWriteableRegistry() {
|
||||
return namedWriteableRegistry;
|
||||
|
|
|
@ -53,6 +53,18 @@ public class RepositoriesMetaDataSerializationTests extends AbstractDiffableSeri
|
|||
return RepositoriesMetaData::new;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Custom mutateInstance(Custom instance) {
|
||||
List<RepositoryMetaData> entries = new ArrayList<>(((RepositoriesMetaData) instance).repositories());
|
||||
boolean addEntry = entries.isEmpty() ? true : randomBoolean();
|
||||
if (addEntry) {
|
||||
entries.add(new RepositoryMetaData(randomAlphaOfLength(10), randomAlphaOfLength(10), randomSettings()));
|
||||
} else {
|
||||
entries.remove(randomIntBetween(0, entries.size() - 1));
|
||||
}
|
||||
return new RepositoriesMetaData(entries.toArray(new RepositoryMetaData[entries.size()]));
|
||||
}
|
||||
|
||||
public Settings randomSettings() {
|
||||
if (randomBoolean()) {
|
||||
return Settings.EMPTY;
|
||||
|
|
|
@ -115,4 +115,16 @@ public class SnapshotsInProgressSerializationTests extends AbstractDiffableWireS
|
|||
return new NamedWriteableRegistry(ClusterModule.getNamedWriteables());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Custom mutateInstance(Custom instance) {
|
||||
List<Entry> entries = new ArrayList<>(((SnapshotsInProgress) instance).entries());
|
||||
boolean addEntry = entries.isEmpty() ? true : randomBoolean();
|
||||
if (addEntry) {
|
||||
entries.add(randomSnapshot());
|
||||
} else {
|
||||
entries.remove(randomIntBetween(0, entries.size() - 1));
|
||||
}
|
||||
return new SnapshotsInProgress(entries);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -37,6 +37,7 @@ import java.io.IOException;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Predicate;
|
||||
|
@ -76,7 +77,7 @@ public class InternalMatrixStatsTests extends InternalAggregationTestCase<Intern
|
|||
RunningStats runningStats = new RunningStats();
|
||||
runningStats.add(fields, values);
|
||||
MatrixStatsResults matrixStatsResults = hasMatrixStatsResults ? new MatrixStatsResults(runningStats) : null;
|
||||
return new InternalMatrixStats(name, 1L, runningStats, matrixStatsResults, Collections.emptyList(), Collections.emptyMap());
|
||||
return new InternalMatrixStats(name, 1L, runningStats, matrixStatsResults, Collections.emptyList(), metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -84,6 +85,45 @@ public class InternalMatrixStatsTests extends InternalAggregationTestCase<Intern
|
|||
return InternalMatrixStats::new;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InternalMatrixStats mutateInstance(InternalMatrixStats instance) {
|
||||
String name = instance.getName();
|
||||
long docCount = instance.getDocCount();
|
||||
RunningStats runningStats = instance.getStats();
|
||||
MatrixStatsResults matrixStatsResults = instance.getResults();
|
||||
Map<String, Object> metaData = instance.getMetaData();
|
||||
switch (between(0, 3)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
break;
|
||||
case 1:
|
||||
double[] values = new double[fields.length];
|
||||
for (int i = 0; i < fields.length; i++) {
|
||||
values[i] = randomDouble() * 200;
|
||||
}
|
||||
runningStats = new RunningStats();
|
||||
runningStats.add(fields, values);
|
||||
break;
|
||||
case 2:
|
||||
if (matrixStatsResults == null) {
|
||||
matrixStatsResults = new MatrixStatsResults(runningStats);
|
||||
} else {
|
||||
matrixStatsResults = null;
|
||||
}
|
||||
break;
|
||||
case 3:
|
||||
default:
|
||||
if (metaData == null) {
|
||||
metaData = new HashMap<>(1);
|
||||
} else {
|
||||
metaData = new HashMap<>(instance.getMetaData());
|
||||
}
|
||||
metaData.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
}
|
||||
return new InternalMatrixStats(name, docCount, runningStats, matrixStatsResults, Collections.emptyList(), metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testReduceRandom() {
|
||||
int numValues = 10000;
|
||||
|
|
|
@ -79,6 +79,39 @@ public abstract class InternalSingleBucketAggregationTestCase<T extends Internal
|
|||
return createTestInstance(name, docCount, subAggregationsSupplier.get(), pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected T mutateInstance(T instance) {
|
||||
String name = instance.getName();
|
||||
long docCount = instance.getDocCount();
|
||||
InternalAggregations aggregations = instance.getAggregations();
|
||||
List<PipelineAggregator> pipelineAggregators = instance.pipelineAggregators();
|
||||
Map<String, Object> metaData = instance.getMetaData();
|
||||
switch (between(0, 3)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
break;
|
||||
case 1:
|
||||
docCount += between(1, 2000);
|
||||
break;
|
||||
case 2:
|
||||
List<InternalAggregation> aggs = new ArrayList<>();
|
||||
aggs.add(new InternalMax("new_max", randomDouble(), randomNumericDocValueFormat(), emptyList(), emptyMap()));
|
||||
aggs.add(new InternalMin("new_min", randomDouble(), randomNumericDocValueFormat(), emptyList(), emptyMap()));
|
||||
aggregations = new InternalAggregations(aggs);
|
||||
break;
|
||||
case 3:
|
||||
default:
|
||||
if (metaData == null) {
|
||||
metaData = new HashMap<>(1);
|
||||
} else {
|
||||
metaData = new HashMap<>(instance.getMetaData());
|
||||
}
|
||||
metaData.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
}
|
||||
return createTestInstance(name, docCount, aggregations, pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected final void assertReduced(T reduced, List<T> inputs) {
|
||||
assertEquals(inputs.stream().mapToLong(InternalSingleBucketAggregation::getDocCount).sum(), reduced.getDocCount());
|
||||
|
|
|
@ -26,8 +26,6 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.io.stream.Writeable.Reader;
|
||||
import org.elasticsearch.test.EqualsHashCodeTestUtils.CopyFunction;
|
||||
import org.elasticsearch.test.EqualsHashCodeTestUtils.MutateFunction;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
|
@ -48,21 +46,11 @@ public abstract class AbstractWireSerializingTestCase<T extends Writeable> exten
|
|||
protected abstract Reader<T> instanceReader();
|
||||
|
||||
/**
|
||||
* Returns a {@link CopyFunction} that can be used to make an exact copy of
|
||||
* the given instance. This defaults to a function that uses
|
||||
* {@link #copyInstance(Writeable)} to create the copy.
|
||||
*/
|
||||
protected CopyFunction<T> getCopyFunction() {
|
||||
return (original) -> copyInstance(original);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a {@link MutateFunction} that can be used to make create a copy
|
||||
* of the given instance that is different to this instance. This defaults
|
||||
* to null.
|
||||
* Returns an instance which is mutated slightly so it should not be equal
|
||||
* to the given instance.
|
||||
*/
|
||||
// TODO: Make this abstract when all sub-classes implement this (https://github.com/elastic/elasticsearch/issues/25929)
|
||||
protected MutateFunction<T> getMutateFunction() {
|
||||
protected T mutateInstance(T instance) throws IOException {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
@ -72,7 +60,7 @@ public abstract class AbstractWireSerializingTestCase<T extends Writeable> exten
|
|||
*/
|
||||
public void testEqualsAndHashcode() throws IOException {
|
||||
for (int runs = 0; runs < NUMBER_OF_TEST_RUNS; runs++) {
|
||||
EqualsHashCodeTestUtils.checkEqualsAndHashCode(createTestInstance(), getCopyFunction(), getMutateFunction());
|
||||
EqualsHashCodeTestUtils.checkEqualsAndHashCode(createTestInstance(), this::copyInstance, this::mutateInstance);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue