Upstream fixes: use getter methods instead of (now) private members
Original commit: elastic/x-pack-elasticsearch@80786e4f84
This commit is contained in:
parent
9dc4a2f31c
commit
91883ad57b
|
@ -309,7 +309,7 @@ public class GetFiltersAction extends Action<GetFiltersAction.Request, GetFilter
|
|||
try {
|
||||
List<MlFilter> docs = new ArrayList<>();
|
||||
for (SearchHit hit : response.getHits().getHits()) {
|
||||
BytesReference docSource = hit.sourceRef();
|
||||
BytesReference docSource = hit.getSourceRef();
|
||||
XContentParser parser =
|
||||
XContentFactory.xContent(docSource).createParser(NamedXContentRegistry.EMPTY, docSource);
|
||||
docs.add(MlFilter.PARSER.apply(parser, null));
|
||||
|
|
|
@ -120,7 +120,7 @@ public class ChunkedDataExtractor implements DataExtractor {
|
|||
Aggregations aggregations = response.getAggregations();
|
||||
long earliestTime = 0;
|
||||
long latestTime = 0;
|
||||
long totalHits = response.getHits().totalHits();
|
||||
long totalHits = response.getHits().getTotalHits();
|
||||
if (totalHits > 0) {
|
||||
earliestTime = (long) Double.parseDouble(aggregations.getProperty(EARLIEST_TIME + VALUE_SUFFIX).toString());
|
||||
latestTime = (long) Double.parseDouble(aggregations.getProperty(LATEST_TIME + VALUE_SUFFIX).toString());
|
||||
|
|
|
@ -66,7 +66,7 @@ abstract class ExtractedField {
|
|||
public Object[] value(SearchHit hit) {
|
||||
SearchHitField keyValue = hit.field(name);
|
||||
if (keyValue != null) {
|
||||
List<Object> values = keyValue.values();
|
||||
List<Object> values = keyValue.getValues();
|
||||
return values.toArray(new Object[values.size()]);
|
||||
}
|
||||
return new Object[0];
|
||||
|
@ -101,7 +101,7 @@ abstract class ExtractedField {
|
|||
|
||||
@Override
|
||||
public Object[] value(SearchHit hit) {
|
||||
Map<String, Object> source = hit.getSource();
|
||||
Map<String, Object> source = hit.getSourceAsMap();
|
||||
int level = 0;
|
||||
while (source != null && level < namePath.length - 1) {
|
||||
source = getNextLevel(source, namePath[level]);
|
||||
|
|
|
@ -118,7 +118,7 @@ class ScrollDataExtractor implements DataExtractor {
|
|||
private InputStream processSearchResponse(SearchResponse searchResponse) throws IOException {
|
||||
ExtractorUtils.checkSearchWasSuccessful(context.jobId, searchResponse);
|
||||
scrollId = searchResponse.getScrollId();
|
||||
if (searchResponse.getHits().hits().length == 0) {
|
||||
if (searchResponse.getHits().getHits().length == 0) {
|
||||
hasNext = false;
|
||||
clearScroll(scrollId);
|
||||
return null;
|
||||
|
@ -126,7 +126,7 @@ class ScrollDataExtractor implements DataExtractor {
|
|||
|
||||
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
|
||||
try (SearchHitToJsonProcessor hitProcessor = new SearchHitToJsonProcessor(context.extractedFields, outputStream)) {
|
||||
for (SearchHit hit : searchResponse.getHits().hits()) {
|
||||
for (SearchHit hit : searchResponse.getHits().getHits()) {
|
||||
if (isCancelled) {
|
||||
Long timestamp = context.extractedFields.timeFieldValue(hit);
|
||||
if (timestamp != null) {
|
||||
|
|
|
@ -85,7 +85,7 @@ public class JobDataDeleter {
|
|||
}
|
||||
|
||||
private void addDeleteRequestForSearchHits(SearchHits hits, String index) {
|
||||
for (SearchHit hit : hits.hits()) {
|
||||
for (SearchHit hit : hits.getHits()) {
|
||||
LOGGER.trace("Search hit for result: {}", hit.getId());
|
||||
addDeleteRequest(hit, index);
|
||||
}
|
||||
|
@ -140,7 +140,7 @@ public class JobDataDeleter {
|
|||
.get();
|
||||
|
||||
String scrollId = searchResponse.getScrollId();
|
||||
long totalHits = searchResponse.getHits().totalHits();
|
||||
long totalHits = searchResponse.getHits().getTotalHits();
|
||||
long totalDeletedCount = 0;
|
||||
while (totalDeletedCount < totalHits) {
|
||||
for (SearchHit hit : searchResponse.getHits()) {
|
||||
|
@ -211,8 +211,8 @@ public class JobDataDeleter {
|
|||
public void onResponse(SearchResponse searchResponse) {
|
||||
addDeleteRequestForSearchHits(searchResponse.getHits(), index);
|
||||
|
||||
totalDeletedCount.addAndGet(searchResponse.getHits().hits().length);
|
||||
if (totalDeletedCount.get() < searchResponse.getHits().totalHits()) {
|
||||
totalDeletedCount.addAndGet(searchResponse.getHits().getHits().length);
|
||||
if (totalDeletedCount.get() < searchResponse.getHits().getTotalHits()) {
|
||||
client.prepareSearchScroll(searchResponse.getScrollId()).setScroll(SCROLL_CONTEXT_DURATION).execute(this);
|
||||
}
|
||||
else {
|
||||
|
|
|
@ -407,7 +407,7 @@ public class ChunkedDataExtractorTests extends ESTestCase {
|
|||
SearchResponse searchResponse = mock(SearchResponse.class);
|
||||
when(searchResponse.status()).thenReturn(RestStatus.OK);
|
||||
SearchHits searchHits = mock(SearchHits.class);
|
||||
when(searchHits.totalHits()).thenReturn(totalHits);
|
||||
when(searchHits.getTotalHits()).thenReturn(totalHits);
|
||||
when(searchResponse.getHits()).thenReturn(searchHits);
|
||||
|
||||
Aggregations aggs = mock(Aggregations.class);
|
||||
|
|
|
@ -8,8 +8,6 @@ package org.elasticsearch.xpack.ml.datafeed.extractor.scroll;
|
|||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.SearchHitField;
|
||||
import org.elasticsearch.search.internal.InternalSearchHit;
|
||||
import org.elasticsearch.search.internal.InternalSearchHitField;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.joda.time.DateTime;
|
||||
|
||||
|
@ -111,11 +109,11 @@ public class ExtractedFieldTests extends ESTestCase {
|
|||
|
||||
static class SearchHitBuilder {
|
||||
|
||||
private final InternalSearchHit hit;
|
||||
private final SearchHit hit;
|
||||
private final Map<String, SearchHitField> fields;
|
||||
|
||||
SearchHitBuilder(int docId) {
|
||||
hit = new InternalSearchHit(docId);
|
||||
hit = new SearchHit(docId);
|
||||
fields = new HashMap<>();
|
||||
}
|
||||
|
||||
|
@ -124,7 +122,7 @@ public class ExtractedFieldTests extends ESTestCase {
|
|||
}
|
||||
|
||||
SearchHitBuilder addField(String name, List<Object> values) {
|
||||
fields.put(name, new InternalSearchHitField(name, values));
|
||||
fields.put(name, new SearchHitField(name, values));
|
||||
return this;
|
||||
}
|
||||
|
||||
|
|
|
@ -16,8 +16,6 @@ import org.elasticsearch.search.SearchHit;
|
|||
import org.elasticsearch.search.SearchHitField;
|
||||
import org.elasticsearch.search.SearchHits;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.internal.InternalSearchHit;
|
||||
import org.elasticsearch.search.internal.InternalSearchHitField;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.Before;
|
||||
|
||||
|
@ -287,16 +285,16 @@ public class ScrollDataExtractorTests extends ESTestCase {
|
|||
SearchHits searchHits = mock(SearchHits.class);
|
||||
List<SearchHit> hits = new ArrayList<>();
|
||||
for (int i = 0; i < timestamps.size(); i++) {
|
||||
InternalSearchHit hit = new InternalSearchHit(randomInt());
|
||||
SearchHit hit = new SearchHit(randomInt());
|
||||
Map<String, SearchHitField> fields = new HashMap<>();
|
||||
fields.put(extractedFields.timeField(), new InternalSearchHitField("time", Arrays.asList(timestamps.get(i))));
|
||||
fields.put("field_1", new InternalSearchHitField("field_1", Arrays.asList(field1Values.get(i))));
|
||||
fields.put("field_2", new InternalSearchHitField("field_2", Arrays.asList(field2Values.get(i))));
|
||||
fields.put(extractedFields.timeField(), new SearchHitField("time", Arrays.asList(timestamps.get(i))));
|
||||
fields.put("field_1", new SearchHitField("field_1", Arrays.asList(field1Values.get(i))));
|
||||
fields.put("field_2", new SearchHitField("field_2", Arrays.asList(field2Values.get(i))));
|
||||
hit.fields(fields);
|
||||
hits.add(hit);
|
||||
}
|
||||
when(searchHits.getHits()).thenReturn(hits.toArray(new SearchHit[hits.size()]));
|
||||
when(searchHits.hits()).thenReturn(hits.toArray(new SearchHit[hits.size()]));
|
||||
when(searchHits.getHits()).thenReturn(hits.toArray(new SearchHit[hits.size()]));
|
||||
when(searchResponse.getHits()).thenReturn(searchHits);
|
||||
return searchResponse;
|
||||
}
|
||||
|
|
|
@ -107,7 +107,7 @@ public class JobDataDeleterTests extends ESTestCase {
|
|||
private SearchHits mockSearchHits(long totalHitCount, int hitsPerSearchResult) {
|
||||
|
||||
SearchHits hits = Mockito.mock(SearchHits.class);
|
||||
when(hits.totalHits()).thenReturn(totalHitCount);
|
||||
when(hits.getTotalHits()).thenReturn(totalHitCount);
|
||||
|
||||
List<SearchHit> hitList = new ArrayList<>();
|
||||
for (int i=0; i<20; i++) {
|
||||
|
@ -117,7 +117,7 @@ public class JobDataDeleterTests extends ESTestCase {
|
|||
hitList.add(hit);
|
||||
}
|
||||
when(hits.getHits()).thenReturn(hitList.toArray(new SearchHit[hitList.size()]));
|
||||
when(hits.hits()).thenReturn(hitList.toArray(new SearchHit[hitList.size()]));
|
||||
when(hits.getHits()).thenReturn(hitList.toArray(new SearchHit[hitList.size()]));
|
||||
|
||||
return hits;
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue