Fixing compile issues after merging in master
This commit is contained in:
parent
cfa52f8b9a
commit
6f6b2933b1
|
@ -26,7 +26,6 @@ import org.elasticsearch.common.xcontent.ToXContent;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.internal.InternalSearchHit;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
@ -43,7 +42,7 @@ public class RatedSearchHit implements Writeable, ToXContent {
|
|||
}
|
||||
|
||||
public RatedSearchHit(StreamInput in) throws IOException {
|
||||
this(InternalSearchHit.readSearchHit(in), in.readBoolean() == true ? Optional.of(in.readVInt()) : Optional.empty());
|
||||
this(SearchHit.readSearchHit(in), in.readBoolean() == true ? Optional.of(in.readVInt()) : Optional.empty());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -27,8 +27,8 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.SearchShardTarget;
|
||||
import org.elasticsearch.search.internal.InternalSearchHit;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -57,10 +57,10 @@ public class DiscountedCumulativeGainTests extends ESTestCase {
|
|||
public void testDCGAt() {
|
||||
List<RatedDocument> rated = new ArrayList<>();
|
||||
int[] relevanceRatings = new int[] { 3, 2, 3, 0, 1, 2 };
|
||||
InternalSearchHit[] hits = new InternalSearchHit[6];
|
||||
SearchHit[] hits = new SearchHit[6];
|
||||
for (int i = 0; i < 6; i++) {
|
||||
rated.add(new RatedDocument("index", "type", Integer.toString(i), relevanceRatings[i]));
|
||||
hits[i] = new InternalSearchHit(i, Integer.toString(i), new Text("type"), Collections.emptyMap());
|
||||
hits[i] = new SearchHit(i, Integer.toString(i), new Text("type"), Collections.emptyMap());
|
||||
hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0)));
|
||||
}
|
||||
DiscountedCumulativeGain dcg = new DiscountedCumulativeGain();
|
||||
|
@ -101,14 +101,14 @@ public class DiscountedCumulativeGainTests extends ESTestCase {
|
|||
public void testDCGAtSixMissingRatings() {
|
||||
List<RatedDocument> rated = new ArrayList<>();
|
||||
Integer[] relevanceRatings = new Integer[] { 3, 2, 3, null, 1};
|
||||
InternalSearchHit[] hits = new InternalSearchHit[6];
|
||||
SearchHit[] hits = new SearchHit[6];
|
||||
for (int i = 0; i < 6; i++) {
|
||||
if (i < relevanceRatings.length) {
|
||||
if (relevanceRatings[i] != null) {
|
||||
rated.add(new RatedDocument("index", "type", Integer.toString(i), relevanceRatings[i]));
|
||||
}
|
||||
}
|
||||
hits[i] = new InternalSearchHit(i, Integer.toString(i), new Text("type"), Collections.emptyMap());
|
||||
hits[i] = new SearchHit(i, Integer.toString(i), new Text("type"), Collections.emptyMap());
|
||||
hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0)));
|
||||
}
|
||||
DiscountedCumulativeGain dcg = new DiscountedCumulativeGain();
|
||||
|
@ -161,9 +161,9 @@ public class DiscountedCumulativeGainTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
// only create four hits
|
||||
InternalSearchHit[] hits = new InternalSearchHit[4];
|
||||
SearchHit[] hits = new SearchHit[4];
|
||||
for (int i = 0; i < 4; i++) {
|
||||
hits[i] = new InternalSearchHit(i, Integer.toString(i), new Text("type"), Collections.emptyMap());
|
||||
hits[i] = new SearchHit(i, Integer.toString(i), new Text("type"), Collections.emptyMap());
|
||||
hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0)));
|
||||
}
|
||||
DiscountedCumulativeGain dcg = new DiscountedCumulativeGain();
|
||||
|
|
|
@ -29,7 +29,6 @@ import org.elasticsearch.common.xcontent.json.JsonXContent;
|
|||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.SearchShardTarget;
|
||||
import org.elasticsearch.search.internal.InternalSearchHit;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -115,8 +114,8 @@ public class PrecisionTests extends ESTestCase {
|
|||
rated.add(new RatedDocument("test", "testtype", "1", Rating.RELEVANT.ordinal()));
|
||||
// add an unlabeled search hit
|
||||
SearchHit[] searchHits = Arrays.copyOf(toSearchHits(rated, "test", "testtype"), 3);
|
||||
searchHits[2] = new InternalSearchHit(2, "2", new Text("testtype"), Collections.emptyMap());
|
||||
((InternalSearchHit)searchHits[2]).shard(new SearchShardTarget("testnode", new Index("index", "uuid"), 0));
|
||||
searchHits[2] = new SearchHit(2, "2", new Text("testtype"), Collections.emptyMap());
|
||||
searchHits[2].shard(new SearchShardTarget("testnode", new Index("index", "uuid"), 0));
|
||||
|
||||
EvalQueryQuality evaluated = (new Precision()).evaluate("id", searchHits, rated);
|
||||
assertEquals((double) 2 / 3, evaluated.getQualityLevel(), 0.00001);
|
||||
|
@ -133,9 +132,9 @@ public class PrecisionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testNoRatedDocs() throws Exception {
|
||||
InternalSearchHit[] hits = new InternalSearchHit[5];
|
||||
SearchHit[] hits = new SearchHit[5];
|
||||
for (int i = 0; i < 5; i++) {
|
||||
hits[i] = new InternalSearchHit(i, i+"", new Text("type"), Collections.emptyMap());
|
||||
hits[i] = new SearchHit(i, i+"", new Text("type"), Collections.emptyMap());
|
||||
hits[i].shard(new SearchShardTarget("testnode", new Index("index", "uuid"), 0));
|
||||
}
|
||||
EvalQueryQuality evaluated = (new Precision()).evaluate("id", hits, Collections.emptyList());
|
||||
|
@ -228,9 +227,9 @@ public class PrecisionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private static SearchHit[] toSearchHits(List<RatedDocument> rated, String index, String type) {
|
||||
InternalSearchHit[] hits = new InternalSearchHit[rated.size()];
|
||||
SearchHit[] hits = new SearchHit[rated.size()];
|
||||
for (int i = 0; i < rated.size(); i++) {
|
||||
hits[i] = new InternalSearchHit(i, i+"", new Text(type), Collections.emptyMap());
|
||||
hits[i] = new SearchHit(i, i+"", new Text(type), Collections.emptyMap());
|
||||
hits[i].shard(new SearchShardTarget("testnode", new Index(index, "uuid"), 0));
|
||||
}
|
||||
return hits;
|
||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.index.rankeval;
|
|||
|
||||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.internal.InternalSearchHit;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -32,7 +31,7 @@ public class RatedSearchHitTests extends ESTestCase {
|
|||
|
||||
public static RatedSearchHit randomRatedSearchHit() {
|
||||
Optional<Integer> rating = randomBoolean() ? Optional.empty() : Optional.of(randomIntBetween(0, 5));
|
||||
SearchHit searchHit = new InternalSearchHit(randomIntBetween(0, 10), randomAsciiOfLength(10), new Text(randomAsciiOfLength(10)),
|
||||
SearchHit searchHit = new SearchHit(randomIntBetween(0, 10), randomAsciiOfLength(10), new Text(randomAsciiOfLength(10)),
|
||||
Collections.emptyMap());
|
||||
RatedSearchHit ratedSearchHit = new RatedSearchHit(searchHit, rating);
|
||||
return ratedSearchHit;
|
||||
|
@ -40,13 +39,13 @@ public class RatedSearchHitTests extends ESTestCase {
|
|||
|
||||
private static RatedSearchHit mutateTestItem(RatedSearchHit original) {
|
||||
Optional<Integer> rating = original.getRating();
|
||||
InternalSearchHit hit = (InternalSearchHit) original.getSearchHit();
|
||||
SearchHit hit = original.getSearchHit();
|
||||
switch (randomIntBetween(0, 1)) {
|
||||
case 0:
|
||||
rating = rating.isPresent() ? Optional.of(rating.get() + 1) : Optional.of(randomInt(5));
|
||||
break;
|
||||
case 1:
|
||||
hit = new InternalSearchHit(hit.docId(), hit.getId() + randomAsciiOfLength(10), new Text(hit.getType()),
|
||||
hit = new SearchHit(hit.docId(), hit.getId() + randomAsciiOfLength(10), new Text(hit.getType()),
|
||||
Collections.emptyMap());
|
||||
break;
|
||||
default:
|
||||
|
|
|
@ -29,7 +29,6 @@ import org.elasticsearch.index.Index;
|
|||
import org.elasticsearch.index.rankeval.PrecisionTests.Rating;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.SearchShardTarget;
|
||||
import org.elasticsearch.search.internal.InternalSearchHit;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -140,13 +139,13 @@ public class ReciprocalRankTests extends ESTestCase {
|
|||
}
|
||||
|
||||
/**
|
||||
* Create InternalSearchHits for testing, starting from dociId 'from' up to docId 'to'.
|
||||
* Create SearchHits for testing, starting from dociId 'from' up to docId 'to'.
|
||||
* The search hits index and type also need to be provided
|
||||
*/
|
||||
private static SearchHit[] createSearchHits(int from, int to, String index, String type) {
|
||||
InternalSearchHit[] hits = new InternalSearchHit[to + 1 - from];
|
||||
SearchHit[] hits = new SearchHit[to + 1 - from];
|
||||
for (int i = from; i <= to; i++) {
|
||||
hits[i] = new InternalSearchHit(i, i+"", new Text(type), Collections.emptyMap());
|
||||
hits[i] = new SearchHit(i, i+"", new Text(type), Collections.emptyMap());
|
||||
hits[i].shard(new SearchShardTarget("testnode", new Index(index, "uuid"), 0));
|
||||
}
|
||||
return hits;
|
||||
|
|
Loading…
Reference in New Issue