Add tests for reducing top hits
Also adds many `equals` and `hashCode` implementations and moves the failure printing in `MatchAssertion` into a common spot and exposes it over `assertEqualsWithErrorMessageFromXContent` which does an object equality test but then uses `toXContent` to print the differences. Relates to #22278
This commit is contained in:
parent
1baa884ab7
commit
8abd4101eb
|
@ -18,6 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.search.aggregations.metrics.tophits;
|
||||
|
||||
import org.apache.lucene.search.FieldDoc;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
|
@ -35,6 +36,7 @@ import org.elasticsearch.search.internal.InternalSearchHit;
|
|||
import org.elasticsearch.search.internal.InternalSearchHits;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -86,6 +88,14 @@ public class InternalTopHits extends InternalMetricsAggregation implements TopHi
|
|||
return searchHits;
|
||||
}
|
||||
|
||||
TopDocs getTopDocs() {
|
||||
return topDocs;
|
||||
}
|
||||
|
||||
int getSize() {
|
||||
return size;
|
||||
}
|
||||
|
||||
@Override
|
||||
public InternalAggregation doReduce(List<InternalAggregation> aggregations, ReduceContext reduceContext) {
|
||||
InternalSearchHits[] shardHits = new InternalSearchHits[aggregations.size()];
|
||||
|
@ -145,4 +155,50 @@ public class InternalTopHits extends InternalMetricsAggregation implements TopHi
|
|||
searchHits.toXContent(builder, params);
|
||||
return builder;
|
||||
}
|
||||
|
||||
// Equals and hashcode implemented for testing round trips
|
||||
@Override
|
||||
protected boolean doEquals(Object obj) {
|
||||
InternalTopHits other = (InternalTopHits) obj;
|
||||
if (from != other.from) return false;
|
||||
if (size != other.size) return false;
|
||||
if (topDocs.totalHits != other.topDocs.totalHits) return false;
|
||||
if (topDocs.scoreDocs.length != other.topDocs.scoreDocs.length) return false;
|
||||
for (int d = 0; d < topDocs.scoreDocs.length; d++) {
|
||||
ScoreDoc thisDoc = topDocs.scoreDocs[d];
|
||||
ScoreDoc otherDoc = other.topDocs.scoreDocs[d];
|
||||
if (thisDoc.doc != otherDoc.doc) return false;
|
||||
if (thisDoc.score != otherDoc.score) return false;
|
||||
if (thisDoc.shardIndex != otherDoc.shardIndex) return false;
|
||||
if (thisDoc instanceof FieldDoc) {
|
||||
if (false == (otherDoc instanceof FieldDoc)) return false;
|
||||
FieldDoc thisFieldDoc = (FieldDoc) thisDoc;
|
||||
FieldDoc otherFieldDoc = (FieldDoc) otherDoc;
|
||||
if (thisFieldDoc.fields.length != otherFieldDoc.fields.length) return false;
|
||||
for (int f = 0; f < thisFieldDoc.fields.length; f++) {
|
||||
if (false == thisFieldDoc.fields[f].equals(otherFieldDoc.fields[f])) return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
return searchHits.equals(other.searchHits);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int doHashCode() {
|
||||
int hashCode = from;
|
||||
hashCode = 31 * hashCode + size;
|
||||
hashCode = 31 * hashCode + topDocs.totalHits;
|
||||
for (int d = 0; d < topDocs.scoreDocs.length; d++) {
|
||||
ScoreDoc doc = topDocs.scoreDocs[d];
|
||||
hashCode = 31 * hashCode + doc.doc;
|
||||
hashCode = 31 * hashCode + Float.floatToIntBits(doc.score);
|
||||
hashCode = 31 * hashCode + doc.shardIndex;
|
||||
if (doc instanceof FieldDoc) {
|
||||
FieldDoc fieldDoc = (FieldDoc) doc;
|
||||
hashCode = 31 * hashCode + Arrays.hashCode(fieldDoc.fields);
|
||||
}
|
||||
}
|
||||
hashCode = 31 * hashCode + searchHits.hashCode();
|
||||
return hashCode;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -49,6 +49,7 @@ import org.elasticsearch.search.lookup.SourceLookup;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
|
@ -811,6 +812,31 @@ public class InternalSearchHit implements SearchHit {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
InternalSearchHit other = (InternalSearchHit) obj;
|
||||
return Objects.equals(id, other.id)
|
||||
&& Objects.equals(type, other.type)
|
||||
&& Objects.equals(nestedIdentity, other.nestedIdentity)
|
||||
&& Objects.equals(version, other.version)
|
||||
&& Objects.equals(source, other.source)
|
||||
&& Objects.equals(fields, other.fields)
|
||||
&& Objects.equals(highlightFields(), other.highlightFields())
|
||||
&& Arrays.equals(matchedQueries, other.matchedQueries)
|
||||
&& Objects.equals(explanation, other.explanation)
|
||||
&& Objects.equals(shard, other.shard)
|
||||
&& Objects.equals(innerHits, other.innerHits);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(id, type, nestedIdentity, version, source, fields, highlightFields(), Arrays.hashCode(matchedQueries),
|
||||
explanation, shard, innerHits);
|
||||
}
|
||||
|
||||
public static final class InternalNestedIdentity implements NestedIdentity, Writeable, ToXContent {
|
||||
|
||||
private Text field;
|
||||
|
|
|
@ -28,6 +28,7 @@ import java.io.IOException;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
public class InternalSearchHitField implements SearchHitField {
|
||||
|
||||
|
@ -109,4 +110,19 @@ public class InternalSearchHitField implements SearchHitField {
|
|||
out.writeGenericValue(value);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
InternalSearchHitField other = (InternalSearchHitField) obj;
|
||||
return Objects.equals(name, other.name)
|
||||
&& Objects.equals(values, other.values);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(name, values);
|
||||
}
|
||||
}
|
|
@ -32,6 +32,7 @@ import java.util.ArrayList;
|
|||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
|
||||
import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField;
|
||||
|
@ -208,4 +209,20 @@ public class InternalSearchHits implements SearchHits {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
InternalSearchHits other = (InternalSearchHits) obj;
|
||||
return Objects.equals(totalHits, other.totalHits)
|
||||
&& Objects.equals(maxScore, other.maxScore)
|
||||
&& Arrays.equals(hits, other.hits);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(totalHits, maxScore, Arrays.hashCode(hits));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,219 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.metrics.tophits;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Repeat;
|
||||
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.search.FieldComparator;
|
||||
import org.apache.lucene.search.FieldDoc;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.SortField;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.search.TopFieldDocs;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.io.stream.Writeable.Reader;
|
||||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.search.SearchHitField;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregationTestCase;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.internal.InternalSearchHit;
|
||||
import org.elasticsearch.search.internal.InternalSearchHits;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import static java.lang.Math.max;
|
||||
import static java.lang.Math.min;
|
||||
import static java.util.Comparator.comparing;
|
||||
|
||||
@Repeat(iterations=1000)
|
||||
public class InternalTopHitsTests extends InternalAggregationTestCase<InternalTopHits> {
|
||||
/**
|
||||
* Should the test instances look like they are sorted by some fields (true) or sorted by score (false). Set here because these need
|
||||
* to be the same across the entirety of {@link #testReduceRandom()}.
|
||||
*/
|
||||
private final boolean testInstancesLookSortedByField = randomBoolean();
|
||||
/**
|
||||
* Fields shared by all instances created by {@link #createTestInstance(String, List, Map)}.
|
||||
*/
|
||||
private final SortField[] testInstancesSortFields = testInstancesLookSortedByField ? randomSortFields() : new SortField[0];
|
||||
|
||||
@Override
|
||||
protected InternalTopHits createTestInstance(String name, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
||||
int from = 0;
|
||||
int requestedSize = between(1, 40);
|
||||
int actualSize = between(0, requestedSize);
|
||||
|
||||
float maxScore = Float.MIN_VALUE;
|
||||
ScoreDoc[] scoreDocs = new ScoreDoc[actualSize];
|
||||
InternalSearchHit[] hits = new InternalSearchHit[actualSize];
|
||||
Set<Integer> usedDocIds = new HashSet<>();
|
||||
for (int i = 0; i < actualSize; i++) {
|
||||
float score = randomFloat();
|
||||
maxScore = max(maxScore, score);
|
||||
int docId = randomValueOtherThanMany(usedDocIds::contains, () -> between(0, IndexWriter.MAX_DOCS));
|
||||
usedDocIds.add(docId);
|
||||
|
||||
Map<String, SearchHitField> searchHitFields = new HashMap<>();
|
||||
if (testInstancesLookSortedByField) {
|
||||
Object[] fields = new Object[testInstancesSortFields.length];
|
||||
for (int f = 0; f < testInstancesSortFields.length; f++) {
|
||||
fields[f] = randomOfType(testInstancesSortFields[f].getType());
|
||||
}
|
||||
scoreDocs[i] = new FieldDoc(docId, score, fields);
|
||||
} else {
|
||||
scoreDocs[i] = new ScoreDoc(docId, score);
|
||||
}
|
||||
hits[i] = new InternalSearchHit(docId, Integer.toString(i), new Text("test"), searchHitFields);
|
||||
hits[i].score(score);
|
||||
}
|
||||
int totalHits = between(actualSize, 500000);
|
||||
InternalSearchHits internalSearchHits = new InternalSearchHits(hits, totalHits, maxScore);
|
||||
|
||||
TopDocs topDocs;
|
||||
Arrays.sort(scoreDocs, scoreDocComparator());
|
||||
if (testInstancesLookSortedByField) {
|
||||
topDocs = new TopFieldDocs(totalHits, scoreDocs, testInstancesSortFields, maxScore);
|
||||
} else {
|
||||
topDocs = new TopDocs(totalHits, scoreDocs, maxScore);
|
||||
}
|
||||
|
||||
return new InternalTopHits(name, from, requestedSize, topDocs, internalSearchHits, pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
private Object randomOfType(SortField.Type type) {
|
||||
switch (type) {
|
||||
case CUSTOM:
|
||||
throw new UnsupportedOperationException();
|
||||
case DOC:
|
||||
return between(0, IndexWriter.MAX_DOCS);
|
||||
case DOUBLE:
|
||||
return randomDouble();
|
||||
case FLOAT:
|
||||
return randomFloat();
|
||||
case INT:
|
||||
return randomInt();
|
||||
case LONG:
|
||||
return randomLong();
|
||||
case REWRITEABLE:
|
||||
throw new UnsupportedOperationException();
|
||||
case SCORE:
|
||||
return randomFloat();
|
||||
case STRING:
|
||||
return new BytesRef(randomAsciiOfLength(5));
|
||||
case STRING_VAL:
|
||||
return new BytesRef(randomAsciiOfLength(5));
|
||||
default:
|
||||
throw new UnsupportedOperationException("Unkown SortField.Type: " + type);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void assertReduced(InternalTopHits reduced, List<InternalTopHits> inputs) {
|
||||
InternalSearchHits actualHits = (InternalSearchHits) reduced.getHits();
|
||||
List<Tuple<ScoreDoc, InternalSearchHit>> allHits = new ArrayList<>();
|
||||
float maxScore = Float.MIN_VALUE;
|
||||
long totalHits = 0;
|
||||
for (int input = 0; input < inputs.size(); input++) {
|
||||
InternalSearchHits internalHits = (InternalSearchHits) inputs.get(input).getHits();
|
||||
totalHits += internalHits.totalHits();
|
||||
maxScore = max(maxScore, internalHits.maxScore());
|
||||
for (int i = 0; i < internalHits.internalHits().length; i++) {
|
||||
ScoreDoc doc = inputs.get(input).getTopDocs().scoreDocs[i];
|
||||
if (testInstancesLookSortedByField) {
|
||||
doc = new FieldDoc(doc.doc, doc.score, ((FieldDoc) doc).fields, input);
|
||||
} else {
|
||||
doc = new ScoreDoc(doc.doc, doc.score, input);
|
||||
}
|
||||
allHits.add(new Tuple<>(doc, internalHits.internalHits()[i]));
|
||||
}
|
||||
}
|
||||
allHits.sort(comparing(Tuple::v1, scoreDocComparator()));
|
||||
InternalSearchHit[] expectedHitsHits = new InternalSearchHit[min(inputs.get(0).getSize(), allHits.size())];
|
||||
for (int i = 0; i < expectedHitsHits.length; i++) {
|
||||
expectedHitsHits[i] = allHits.get(i).v2();
|
||||
}
|
||||
InternalSearchHits expectedHits = new InternalSearchHits(expectedHitsHits, totalHits, maxScore);
|
||||
assertEqualsWithErrorMessageFromXContent(expectedHits, actualHits);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Reader<InternalTopHits> instanceReader() {
|
||||
return InternalTopHits::new;
|
||||
}
|
||||
|
||||
private SortField[] randomSortFields() {
|
||||
SortField[] sortFields = new SortField[between(1, 5)];
|
||||
Set<String> usedSortFields = new HashSet<>();
|
||||
for (int i = 0; i < sortFields.length; i++) {
|
||||
String sortField = randomValueOtherThanMany(usedSortFields::contains, () -> randomAsciiOfLength(5));
|
||||
usedSortFields.add(sortField);
|
||||
SortField.Type type = randomValueOtherThanMany(t -> t == SortField.Type.CUSTOM || t == SortField.Type.REWRITEABLE,
|
||||
() -> randomFrom(SortField.Type.values()));
|
||||
sortFields[i] = new SortField(sortField, type);
|
||||
}
|
||||
return sortFields;
|
||||
}
|
||||
|
||||
private Comparator<ScoreDoc> scoreDocComparator() {
|
||||
return innerScoreDocComparator().thenComparing(s -> s.shardIndex);
|
||||
}
|
||||
|
||||
private Comparator<ScoreDoc> innerScoreDocComparator() {
|
||||
if (testInstancesLookSortedByField) {
|
||||
// Values passed to getComparator shouldn't matter
|
||||
@SuppressWarnings("rawtypes")
|
||||
FieldComparator[] comparators = new FieldComparator[testInstancesSortFields.length];
|
||||
for (int i = 0; i < testInstancesSortFields.length; i++) {
|
||||
try {
|
||||
comparators[i] = testInstancesSortFields[i].getComparator(0, 0);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
return (lhs, rhs) -> {
|
||||
FieldDoc l = (FieldDoc) lhs;
|
||||
FieldDoc r = (FieldDoc) rhs;
|
||||
int i = 0;
|
||||
while (i < l.fields.length) {
|
||||
@SuppressWarnings("unchecked")
|
||||
int c = comparators[i].compareValues(l.fields[i], r.fields[i]);
|
||||
if (c != 0) {
|
||||
return c;
|
||||
}
|
||||
i++;
|
||||
}
|
||||
return 0;
|
||||
};
|
||||
} else {
|
||||
Comparator<ScoreDoc> comparator = comparing(d -> d.score);
|
||||
return comparator.reversed();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -61,7 +61,10 @@ public abstract class AbstractWireSerializingTestCase<T extends Writeable> exten
|
|||
|
||||
T secondInstance = copyInstance(firstInstance);
|
||||
assertEquals("instance is not equal to self", secondInstance, secondInstance);
|
||||
if (false == firstInstance.equals(secondInstance)) {
|
||||
firstInstance.equals(secondInstance);
|
||||
assertEquals("instance is not equal to its copy", firstInstance, secondInstance);
|
||||
}
|
||||
assertEquals("equals is not symmetric", secondInstance, firstInstance);
|
||||
assertThat("instance copy's hashcode is different from original hashcode", secondInstance.hashCode(),
|
||||
equalTo(firstInstance.hashCode()));
|
||||
|
|
|
@ -29,6 +29,7 @@ import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
|
|||
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
|
||||
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
|
||||
import com.carrotsearch.randomizedtesting.rules.TestRuleAdapter;
|
||||
|
||||
import org.apache.logging.log4j.Level;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
|
@ -65,11 +66,14 @@ import org.elasticsearch.common.util.MockBigArrays;
|
|||
import org.elasticsearch.common.util.MockPageCacheRecycler;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.env.NodeEnvironment;
|
||||
import org.elasticsearch.index.Index;
|
||||
|
@ -115,6 +119,7 @@ import java.util.Collections;
|
|||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Random;
|
||||
import java.util.Set;
|
||||
import java.util.TreeMap;
|
||||
|
@ -951,6 +956,38 @@ public abstract class ESTestCase extends LuceneTestCase {
|
|||
assertThat(count + " files exist that should have been cleaned:\n" + sb.toString(), count, equalTo(0));
|
||||
}
|
||||
|
||||
/**
|
||||
* Assert that two objects are equals, calling {@link ToXContent#toXContent(XContentBuilder, ToXContent.Params)} to print out their
|
||||
* differences if they aren't equal.
|
||||
*/
|
||||
public static <T extends ToXContent> void assertEqualsWithErrorMessageFromXContent(T expected, T actual) {
|
||||
if (Objects.equals(expected, actual)) {
|
||||
return;
|
||||
}
|
||||
if (expected == null) {
|
||||
throw new AssertionError("Expected null be actual was [" + actual.toString() + "]");
|
||||
}
|
||||
if (actual == null) {
|
||||
throw new AssertionError("Didn't expect null but actual was [null]");
|
||||
}
|
||||
try (XContentBuilder actualJson = JsonXContent.contentBuilder();
|
||||
XContentBuilder expectedJson = JsonXContent.contentBuilder()) {
|
||||
actualJson.startObject();
|
||||
actual.toXContent(actualJson, ToXContent.EMPTY_PARAMS);
|
||||
actualJson.endObject();
|
||||
expectedJson.startObject();
|
||||
expected.toXContent(expectedJson, ToXContent.EMPTY_PARAMS);
|
||||
expectedJson.endObject();
|
||||
NotEqualMessageBuilder message = new NotEqualMessageBuilder();
|
||||
message.compareMaps(
|
||||
XContentHelper.convertToMap(actualJson.bytes(), false).v2(),
|
||||
XContentHelper.convertToMap(expectedJson.bytes(), false).v2());
|
||||
throw new AssertionError("Didn't match expected value:\n" + message);
|
||||
} catch (IOException e) {
|
||||
throw new AssertionError("IOException while building failure message", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link XContentParser}.
|
||||
*/
|
||||
|
|
|
@ -0,0 +1,170 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.test;
|
||||
|
||||
import org.elasticsearch.common.Nullable;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.TreeMap;
|
||||
|
||||
/**
|
||||
* Builds a message describing how two sets of values are unequal.
|
||||
*/
|
||||
public class NotEqualMessageBuilder {
|
||||
private final StringBuilder message;
|
||||
private int indent = 0;
|
||||
|
||||
/**
|
||||
* The name of the field being compared.
|
||||
*/
|
||||
public NotEqualMessageBuilder() {
|
||||
this.message = new StringBuilder();
|
||||
}
|
||||
|
||||
/**
|
||||
* The failure message.
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
return message.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Compare two maps.
|
||||
*/
|
||||
public void compareMaps(Map<String, Object> actual, Map<String, Object> expected) {
|
||||
actual = new TreeMap<>(actual);
|
||||
expected = new TreeMap<>(expected);
|
||||
for (Map.Entry<String, Object> expectedEntry : expected.entrySet()) {
|
||||
compare(expectedEntry.getKey(), actual.remove(expectedEntry.getKey()), expectedEntry.getValue());
|
||||
}
|
||||
for (Map.Entry<String, Object> unmatchedEntry : actual.entrySet()) {
|
||||
field(unmatchedEntry.getKey(), "unexpected but found [" + unmatchedEntry.getValue() + "]");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Compare two lists.
|
||||
*/
|
||||
public void compareLists(List<Object> actual, List<Object> expected) {
|
||||
int i = 0;
|
||||
while (i < actual.size() && i < expected.size()) {
|
||||
compare(Integer.toString(i), actual.get(i), expected.get(i));
|
||||
i++;
|
||||
}
|
||||
if (actual.size() == expected.size()) {
|
||||
return;
|
||||
}
|
||||
indent();
|
||||
if (actual.size() < expected.size()) {
|
||||
message.append("expected [").append(expected.size() - i).append("] more entries\n");
|
||||
return;
|
||||
}
|
||||
message.append("received [").append(actual.size() - i).append("] more entries than expected\n");
|
||||
}
|
||||
|
||||
/**
|
||||
* Compare two values.
|
||||
* @param field the name of the field being compared.
|
||||
*/
|
||||
public void compare(String field, @Nullable Object actual, Object expected) {
|
||||
if (expected instanceof Map) {
|
||||
if (actual == null) {
|
||||
field(field, "expected map but not found");
|
||||
return;
|
||||
}
|
||||
if (false == actual instanceof Map) {
|
||||
field(field, "expected map but found [" + actual + "]");
|
||||
return;
|
||||
}
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, Object> expectedMap = (Map<String, Object>) expected;
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, Object> actualMap = (Map<String, Object>) actual;
|
||||
if (expectedMap.isEmpty() && actualMap.isEmpty()) {
|
||||
field(field, "same [empty map]");
|
||||
return;
|
||||
}
|
||||
field(field, null);
|
||||
indent += 1;
|
||||
compareMaps(actualMap, expectedMap);
|
||||
indent -= 1;
|
||||
return;
|
||||
}
|
||||
if (expected instanceof List) {
|
||||
if (actual == null) {
|
||||
field(field, "expected list but not found");
|
||||
return;
|
||||
}
|
||||
if (false == actual instanceof List) {
|
||||
field(field, "expected list but found [" + actual + "]");
|
||||
return;
|
||||
}
|
||||
@SuppressWarnings("unchecked")
|
||||
List<Object> expectedList = (List<Object>) expected;
|
||||
@SuppressWarnings("unchecked")
|
||||
List<Object> actualList = (List<Object>) actual;
|
||||
if (expectedList.isEmpty() && actualList.isEmpty()) {
|
||||
field(field, "same [empty list]");
|
||||
return;
|
||||
}
|
||||
field(field, null);
|
||||
indent += 1;
|
||||
compareLists(actualList, expectedList);
|
||||
indent -= 1;
|
||||
return;
|
||||
}
|
||||
if (actual == null) {
|
||||
field(field, "expected [" + expected + "] but not found");
|
||||
return;
|
||||
}
|
||||
if (Objects.equals(expected, actual)) {
|
||||
if (expected instanceof String) {
|
||||
String expectedString = (String) expected;
|
||||
if (expectedString.length() > 50) {
|
||||
expectedString = expectedString.substring(0, 50) + "...";
|
||||
}
|
||||
field(field, "same [" + expectedString + "]");
|
||||
return;
|
||||
}
|
||||
field(field, "same [" + expected + "]");
|
||||
return;
|
||||
}
|
||||
field(field, "expected [" + expected + "] but was [" + actual + "]");
|
||||
}
|
||||
|
||||
private void indent() {
|
||||
for (int i = 0; i < indent; i++) {
|
||||
message.append(" ");
|
||||
}
|
||||
}
|
||||
|
||||
private void field(Object name, String info) {
|
||||
indent();
|
||||
message.append(String.format(Locale.ROOT, "%30s: ", name));
|
||||
if (info != null) {
|
||||
message.append(info);
|
||||
}
|
||||
message.append('\n');
|
||||
}
|
||||
}
|
|
@ -19,18 +19,13 @@
|
|||
package org.elasticsearch.test.rest.yaml.section;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.xcontent.XContentLocation;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.NotEqualMessageBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.TreeMap;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import static org.elasticsearch.test.hamcrest.RegexMatcher.matches;
|
||||
|
@ -87,127 +82,9 @@ public class MatchAssertion extends Assertion {
|
|||
}
|
||||
|
||||
if (expectedValue.equals(actualValue) == false) {
|
||||
FailureMessage message = new FailureMessage(getField());
|
||||
NotEqualMessageBuilder message = new NotEqualMessageBuilder();
|
||||
message.compare(getField(), actualValue, expectedValue);
|
||||
throw new AssertionError(message.message);
|
||||
}
|
||||
}
|
||||
|
||||
private static class FailureMessage {
|
||||
private final StringBuilder message;
|
||||
private int indent = 0;
|
||||
|
||||
private FailureMessage(String field) {
|
||||
this.message = new StringBuilder(field + " didn't match the expected value:\n");
|
||||
}
|
||||
|
||||
private void compareMaps(Map<String, Object> actual, Map<String, Object> expected) {
|
||||
actual = new TreeMap<>(actual);
|
||||
expected = new TreeMap<>(expected);
|
||||
for (Map.Entry<String, Object> expectedEntry : expected.entrySet()) {
|
||||
compare(expectedEntry.getKey(), actual.remove(expectedEntry.getKey()), expectedEntry.getValue());
|
||||
}
|
||||
for (Map.Entry<String, Object> unmatchedEntry : actual.entrySet()) {
|
||||
field(unmatchedEntry.getKey(), "unexpected but found [" + unmatchedEntry.getValue() + "]");
|
||||
}
|
||||
}
|
||||
|
||||
private void compareLists(List<Object> actual, List<Object> expected) {
|
||||
int i = 0;
|
||||
while (i < actual.size() && i < expected.size()) {
|
||||
compare(Integer.toString(i), actual.get(i), expected.get(i));
|
||||
i++;
|
||||
}
|
||||
if (actual.size() == expected.size()) {
|
||||
return;
|
||||
}
|
||||
indent();
|
||||
if (actual.size() < expected.size()) {
|
||||
message.append("expected [").append(expected.size() - i).append("] more entries\n");
|
||||
return;
|
||||
}
|
||||
message.append("received [").append(actual.size() - i).append("] more entries than expected\n");
|
||||
}
|
||||
|
||||
private void compare(String field, @Nullable Object actual, Object expected) {
|
||||
if (expected instanceof Map) {
|
||||
if (actual == null) {
|
||||
field(field, "expected map but not found");
|
||||
return;
|
||||
}
|
||||
if (false == actual instanceof Map) {
|
||||
field(field, "expected map but found [" + actual + "]");
|
||||
return;
|
||||
}
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, Object> expectedMap = (Map<String, Object>) expected;
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, Object> actualMap = (Map<String, Object>) actual;
|
||||
if (expectedMap.isEmpty() && actualMap.isEmpty()) {
|
||||
field(field, "same [empty map]");
|
||||
return;
|
||||
}
|
||||
field(field, null);
|
||||
indent += 1;
|
||||
compareMaps(actualMap, expectedMap);
|
||||
indent -= 1;
|
||||
return;
|
||||
}
|
||||
if (expected instanceof List) {
|
||||
if (actual == null) {
|
||||
field(field, "expected list but not found");
|
||||
return;
|
||||
}
|
||||
if (false == actual instanceof List) {
|
||||
field(field, "expected list but found [" + actual + "]");
|
||||
return;
|
||||
}
|
||||
@SuppressWarnings("unchecked")
|
||||
List<Object> expectedList = (List<Object>) expected;
|
||||
@SuppressWarnings("unchecked")
|
||||
List<Object> actualList = (List<Object>) actual;
|
||||
if (expectedList.isEmpty() && actualList.isEmpty()) {
|
||||
field(field, "same [empty list]");
|
||||
return;
|
||||
}
|
||||
field(field, null);
|
||||
indent += 1;
|
||||
compareLists(actualList, expectedList);
|
||||
indent -= 1;
|
||||
return;
|
||||
}
|
||||
if (actual == null) {
|
||||
field(field, "expected [" + expected + "] but not found");
|
||||
return;
|
||||
}
|
||||
if (Objects.equals(expected, actual)) {
|
||||
if (expected instanceof String) {
|
||||
String expectedString = (String) expected;
|
||||
if (expectedString.length() > 50) {
|
||||
expectedString = expectedString.substring(0, 50) + "...";
|
||||
}
|
||||
field(field, "same [" + expectedString + "]");
|
||||
return;
|
||||
}
|
||||
field(field, "same [" + expected + "]");
|
||||
return;
|
||||
}
|
||||
field(field, "expected [" + expected + "] but was [" + actual + "]");
|
||||
}
|
||||
|
||||
private void indent() {
|
||||
for (int i = 0; i < indent; i++) {
|
||||
message.append(" ");
|
||||
}
|
||||
}
|
||||
|
||||
private void field(Object name, String info) {
|
||||
indent();
|
||||
message.append(String.format(Locale.ROOT, "%30s: ", name));
|
||||
if (info != null) {
|
||||
message.append(info);
|
||||
}
|
||||
message.append('\n');
|
||||
throw new AssertionError(getField() + " didn't match expected value:\n" + message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue