Implement fields fetch for runtime fields (backport of #61995) (#62416)

This implements the `fields` API in `_search` for runtime fields using
doc values. Most of that implementation is stolen from the
`docvalue_fields` fetch sub-phase, just moved into the same API that the
`fields` API uses. At this point the `docvalue_fields` fetch phase looks
like a special case of the `fields` API.

While I was at it I moved the "which doc values sub-implementation
should I use for fetching?" question from a bunch of `instanceof`s to a
method on `LeafFieldData` so we can be much more flexible with what is
returned and we're not forced to extend certain classes just to make the
fetch phase happy.

Relates to #59332
This commit is contained in:
Nik Everett 2020-09-15 20:24:10 -04:00 committed by GitHub
parent f94ae7ae26
commit 24a24d050a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
109 changed files with 699 additions and 349 deletions

View File

@ -183,7 +183,7 @@ public class RankFeatureFieldMapper extends FieldMapper {
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}

View File

@ -162,7 +162,7 @@ public class RankFeaturesFieldMapper extends FieldMapper {
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}

View File

@ -398,7 +398,7 @@ public class ScaledFloatFieldMapper extends ParametrizedFieldMapper {
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
@ -545,5 +545,25 @@ public class ScaledFloatFieldMapper extends ParametrizedFieldMapper {
}
}
@Override
public DocValueFetcher.Leaf getLeafValueFetcher(DocValueFormat format) {
SortedNumericDoubleValues values = getDoubleValues();
return new DocValueFetcher.Leaf() {
@Override
public boolean advanceExact(int docId) throws IOException {
return values.advanceExact(docId);
}
@Override
public int docValueCount() throws IOException {
return values.docValueCount();
}
@Override
public Object nextValue() throws IOException {
return format.format(values.nextValue());
}
};
}
}
}

View File

@ -54,6 +54,7 @@ import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.similarity.SimilarityProvider;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
import java.util.ArrayList;
@ -419,7 +420,7 @@ public class SearchAsYouTypeFieldMapper extends FieldMapper {
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}
@ -465,7 +466,7 @@ public class SearchAsYouTypeFieldMapper extends FieldMapper {
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}
@ -588,7 +589,7 @@ public class SearchAsYouTypeFieldMapper extends FieldMapper {
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}

View File

@ -25,6 +25,7 @@ import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.document.FieldType;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
import java.util.Iterator;
@ -159,7 +160,7 @@ public class TokenCountFieldMapper extends FieldMapper {
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}

View File

@ -147,7 +147,7 @@ public class RankFeatureFieldMapperTests extends FieldMapperTestCase2<RankFeatur
e.getCause().getMessage());
}
public void testFetchSourceValue() {
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
RankFeatureFieldMapper mapper = new RankFeatureFieldMapper.Builder("field").build(context);

View File

@ -260,7 +260,7 @@ public class ScaledFloatFieldMapperTests extends MapperTestCase {
assertWarnings("Parameter [index_options] has no effect on type [scaled_float] and will be removed in future");
}
public void testFetchSourceValue() {
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());

View File

@ -122,3 +122,15 @@ setup:
- match: { hits.total.value: 4 }
- match: { hits.hits.0._id: "3" }
- match: { hits.hits.0.sort.0: -2 }
---
"docvalue_fields":
- do:
search:
body:
docvalue_fields: [ "number" ]
sort:
number:
order: asc
- match: { hits.hits.0.fields.number: [-2.1] }

View File

@ -140,7 +140,7 @@ public class MetaJoinFieldMapper extends FieldMapper {
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException("Cannot fetch values for metadata field [" + typeName() + "].");
}

View File

@ -190,7 +190,7 @@ public final class ParentIdFieldMapper extends FieldMapper {
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException("Cannot fetch values for internal field [" + typeName() + "].");
}

View File

@ -352,7 +352,7 @@ public final class ParentJoinFieldMapper extends FieldMapper {
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}

View File

@ -82,6 +82,7 @@ import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.index.query.Rewriteable;
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
@ -370,7 +371,7 @@ public class PercolatorFieldMapper extends FieldMapper {
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}

View File

@ -35,6 +35,8 @@ import org.elasticsearch.search.fetch.subphase.highlight.HighlightPhase;
import org.elasticsearch.search.fetch.subphase.highlight.Highlighter;
import org.elasticsearch.search.fetch.subphase.highlight.SearchHighlightContext;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException;
import java.util.ArrayList;
@ -55,7 +57,7 @@ final class PercolatorHighlightSubFetchPhase implements FetchSubPhase {
}
@Override
public FetchSubPhaseProcessor getProcessor(SearchContext searchContext) throws IOException {
public FetchSubPhaseProcessor getProcessor(SearchContext searchContext, SearchLookup lookup) throws IOException {
if (searchContext.highlight() == null) {
return null;
}
@ -95,9 +97,17 @@ final class PercolatorHighlightSubFetchPhase implements FetchSubPhase {
int slot = (int) matchedSlot;
BytesReference document = percolateQuery.getDocuments().get(slot);
HitContext subContext = new HitContext(
new SearchHit(slot, "unknown", new Text(hit.hit().getType()),
Collections.emptyMap(), Collections.emptyMap()),
percolatorLeafReaderContext, slot, new HashMap<>()
new SearchHit(
slot,
"unknown",
new Text(hit.hit().getType()),
Collections.emptyMap(),
Collections.emptyMap()
),
percolatorLeafReaderContext,
slot,
new SourceLookup(),
new HashMap<>()
);
subContext.sourceLookup().setSource(document);
// force source because MemoryIndex does not store fields

View File

@ -37,6 +37,7 @@ import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
import java.util.ArrayList;
@ -57,7 +58,7 @@ final class PercolatorMatchedSlotSubFetchPhase implements FetchSubPhase {
static final String FIELD_NAME_PREFIX = "_percolator_document_slot";
@Override
public FetchSubPhaseProcessor getProcessor(SearchContext searchContext) throws IOException {
public FetchSubPhaseProcessor getProcessor(SearchContext searchContext, SearchLookup lookup) throws IOException {
List<PercolateContext> percolateContexts = new ArrayList<>();
List<PercolateQuery> percolateQueries = locatePercolatorQuery(searchContext.query());

View File

@ -52,9 +52,9 @@ public class PercolatorHighlightSubFetchPhaseTests extends ESTestCase {
Mockito.when(searchContext.highlight()).thenReturn(new SearchHighlightContext(Collections.emptyList()));
Mockito.when(searchContext.query()).thenReturn(new MatchAllDocsQuery());
assertNull(subFetchPhase.getProcessor(searchContext));
assertNull(subFetchPhase.getProcessor(searchContext, null));
Mockito.when(searchContext.query()).thenReturn(percolateQuery);
assertNotNull(subFetchPhase.getProcessor(searchContext));
assertNotNull(subFetchPhase.getProcessor(searchContext, null));
}
public void testLocatePercolatorQuery() {

View File

@ -40,6 +40,7 @@ import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.fetch.FetchSubPhase.HitContext;
import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.ESTestCase;
import java.util.Collections;
@ -66,7 +67,7 @@ public class PercolatorMatchedSlotSubFetchPhaseTests extends ESTestCase {
LeafReaderContext context = reader.leaves().get(0);
// A match:
{
HitContext hit = new HitContext(new SearchHit(0), context, 0, new HashMap<>());
HitContext hit = new HitContext(new SearchHit(0), context, 0, new SourceLookup(), new HashMap<>());
PercolateQuery.QueryStore queryStore = ctx -> docId -> new TermQuery(new Term("field", "value"));
MemoryIndex memoryIndex = new MemoryIndex();
memoryIndex.addField("field", "value", new WhitespaceAnalyzer());
@ -77,7 +78,7 @@ public class PercolatorMatchedSlotSubFetchPhaseTests extends ESTestCase {
SearchContext sc = mock(SearchContext.class);
when(sc.query()).thenReturn(percolateQuery);
FetchSubPhaseProcessor processor = phase.getProcessor(sc);
FetchSubPhaseProcessor processor = phase.getProcessor(sc, null);
assertNotNull(processor);
processor.process(hit);
@ -87,7 +88,7 @@ public class PercolatorMatchedSlotSubFetchPhaseTests extends ESTestCase {
// No match:
{
HitContext hit = new HitContext(new SearchHit(0), context, 0, new HashMap<>());
HitContext hit = new HitContext(new SearchHit(0), context, 0, new SourceLookup(), new HashMap<>());
PercolateQuery.QueryStore queryStore = ctx -> docId -> new TermQuery(new Term("field", "value"));
MemoryIndex memoryIndex = new MemoryIndex();
memoryIndex.addField("field", "value1", new WhitespaceAnalyzer());
@ -98,7 +99,7 @@ public class PercolatorMatchedSlotSubFetchPhaseTests extends ESTestCase {
SearchContext sc = mock(SearchContext.class);
when(sc.query()).thenReturn(percolateQuery);
FetchSubPhaseProcessor processor = phase.getProcessor(sc);
FetchSubPhaseProcessor processor = phase.getProcessor(sc, null);
assertNotNull(processor);
processor.process(hit);
@ -107,7 +108,7 @@ public class PercolatorMatchedSlotSubFetchPhaseTests extends ESTestCase {
// No query:
{
HitContext hit = new HitContext(new SearchHit(0), context, 0, new HashMap<>());
HitContext hit = new HitContext(new SearchHit(0), context, 0, new SourceLookup(), new HashMap<>());
PercolateQuery.QueryStore queryStore = ctx -> docId -> null;
MemoryIndex memoryIndex = new MemoryIndex();
memoryIndex.addField("field", "value", new WhitespaceAnalyzer());
@ -118,7 +119,7 @@ public class PercolatorMatchedSlotSubFetchPhaseTests extends ESTestCase {
SearchContext sc = mock(SearchContext.class);
when(sc.query()).thenReturn(percolateQuery);
FetchSubPhaseProcessor processor = phase.getProcessor(sc);
FetchSubPhaseProcessor processor = phase.getProcessor(sc, null);
assertNotNull(processor);
processor.process(hit);

View File

@ -735,7 +735,7 @@ public class ICUCollationKeywordFieldMapper extends FieldMapper {
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}

View File

@ -310,7 +310,7 @@ public class ICUCollationKeywordFieldMapperTests extends FieldMapperTestCase2<IC
assertEquals(0, fields.length);
}
public void testFetchSourceValue() {
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());

View File

@ -678,7 +678,7 @@ public class AnnotatedTextFieldMapperTests extends ESSingleNodeTestCase {
assertThat(e.getMessage(), containsString("name cannot be empty string"));
}
public void testFetchSourceValue() {
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());

View File

@ -46,6 +46,7 @@ import org.elasticsearch.index.mapper.TextFieldMapper;
import org.elasticsearch.index.mapper.ValueFetcher;
import org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapper.AnnotatedText.AnnotationToken;
import org.elasticsearch.index.similarity.SimilarityProvider;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
import java.io.Reader;
@ -589,7 +590,7 @@ public class AnnotatedTextFieldMapper extends FieldMapper {
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}

View File

@ -153,7 +153,7 @@ public class Murmur3FieldMapper extends FieldMapper {
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}

View File

@ -14,7 +14,7 @@ setup:
index:
index: test_1
id: 1
body: { "include": { "field1": "v1", "field2": "v2" }, "count": 1, "bigint": 72057594037927936 }
body: { "include": { "field1": "v1", "field2": "v2" }, "count": 1, "bigint": 72057594037927936, d: 3.14 }
- do:
indices.refresh: {}
@ -199,3 +199,18 @@ setup:
- field: "count"
format: "#.0"
- match: { hits.hits.0.fields.count: ["1.0"] }
---
"docvalue_fields - double":
- skip:
version: " - 6.99.99"
reason: Triggered a deprecation warning before 7.0
- do:
search:
body:
docvalue_fields: [ "d" ]
# Doc values produce floating point errors.
# When this test is run during runtime-field's tests we *don't* get floating point errors. Thus the funny assertion here that matches both.
- lt: { hits.hits.0.fields.d.0: 3.141 }
- gte: { hits.hits.0.fields.d.0: 3.14 }

View File

@ -112,8 +112,15 @@ setup:
rest_total_hits_as_int: true
index: date*
body:
docvalue_fields: [ { "field": "date", "format" : "strict_date_optional_time" }, { "field": "date", "format": "epoch_millis" }, { "field" : "date", "format": "uuuu-MM-dd'T'HH:mm:ss.SSSSSSSSSX" } ]
sort: [ { "date": "desc" } ]
docvalue_fields:
- field: date
format: strict_date_optional_time
- field: date
format: epoch_millis
- field: date
format: uuuu-MM-dd'T'HH:mm:ss.SSSSSSSSSX
sort:
- date: desc
- match: { hits.total: 2 }
- length: { hits.hits: 2 }

View File

@ -39,6 +39,7 @@ import org.elasticsearch.plugins.SearchPlugin;
import org.elasticsearch.search.SearchExtBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.test.ESIntegTestCase.Scope;
@ -120,7 +121,7 @@ public class FetchSubPhasePluginIT extends ESIntegTestCase {
private static final String NAME = "term_vectors_fetch";
@Override
public FetchSubPhaseProcessor getProcessor(SearchContext searchContext) {
public FetchSubPhaseProcessor getProcessor(SearchContext searchContext, SearchLookup lookup) {
return new FetchSubPhaseProcessor() {
@Override
public void setNextReader(LeafReaderContext readerContext) {

View File

@ -32,10 +32,10 @@ import org.elasticsearch.index.fielddata.fieldcomparator.FloatValuesComparatorSo
import org.elasticsearch.index.fielddata.fieldcomparator.LongValuesComparatorSource;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.sort.BucketedSort;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
import org.elasticsearch.search.sort.BucketedSort;
import org.elasticsearch.search.sort.SortOrder;
import java.io.IOException;
import java.util.function.LongUnaryOperator;

View File

@ -21,6 +21,10 @@ package org.elasticsearch.index.fielddata;
import org.apache.lucene.util.Accountable;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.index.mapper.DocValueFetcher;
import org.elasticsearch.search.DocValueFormat;
import java.io.IOException;
/**
* The thread safe {@link org.apache.lucene.index.LeafReader} level cache of the data.
@ -37,4 +41,26 @@ public interface LeafFieldData extends Accountable, Releasable {
*/
SortedBinaryDocValues getBytesValues();
/**
* Return a value fetcher for this leaf implementation.
*/
default DocValueFetcher.Leaf getLeafValueFetcher(DocValueFormat format) {
SortedBinaryDocValues values = getBytesValues();
return new DocValueFetcher.Leaf() {
@Override
public boolean advanceExact(int docId) throws IOException {
return values.advanceExact(docId);
}
@Override
public int docValueCount() throws IOException {
return values.docValueCount();
}
@Override
public Object nextValue() throws IOException {
return format.format(values.nextValue());
}
};
}
}

View File

@ -21,12 +21,15 @@ package org.elasticsearch.index.fielddata.plain;
import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.util.Accountable;
import org.elasticsearch.index.fielddata.LeafNumericFieldData;
import org.elasticsearch.index.fielddata.FieldData;
import org.elasticsearch.index.fielddata.LeafNumericFieldData;
import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
import org.elasticsearch.index.mapper.DocValueFetcher;
import org.elasticsearch.search.DocValueFormat;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
@ -78,6 +81,27 @@ public abstract class LeafDoubleFieldData implements LeafNumericFieldData {
};
}
@Override
public DocValueFetcher.Leaf getLeafValueFetcher(DocValueFormat format) {
SortedNumericDoubleValues values = getDoubleValues();
return new DocValueFetcher.Leaf() {
@Override
public boolean advanceExact(int docId) throws IOException {
return values.advanceExact(docId);
}
@Override
public int docValueCount() throws IOException {
return values.docValueCount();
}
@Override
public Object nextValue() throws IOException {
return format.format(values.nextValue());
}
};
}
@Override
public void close() {
}

View File

@ -19,12 +19,17 @@
package org.elasticsearch.index.fielddata.plain;
import org.elasticsearch.index.fielddata.LeafNumericFieldData;
import org.apache.lucene.index.SortedNumericDocValues;
import org.elasticsearch.index.fielddata.FieldData;
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
import org.elasticsearch.index.fielddata.LeafNumericFieldData;
import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
import org.elasticsearch.index.mapper.DocValueFetcher;
import org.elasticsearch.search.DocValueFormat;
import java.io.IOException;
/**
* Specialization of {@link LeafNumericFieldData} for integers.
@ -73,6 +78,27 @@ public abstract class LeafLongFieldData implements LeafNumericFieldData {
return FieldData.castToDouble(getLongValues());
}
@Override
public DocValueFetcher.Leaf getLeafValueFetcher(DocValueFormat format) {
SortedNumericDocValues values = getLongValues();
return new DocValueFetcher.Leaf() {
@Override
public boolean advanceExact(int docId) throws IOException {
return values.advanceExact(docId);
}
@Override
public int docValueCount() throws IOException {
return values.docValueCount();
}
@Override
public Object nextValue() throws IOException {
return format.format(values.nextValue());
}
};
}
@Override
public void close() {}
}

View File

@ -38,8 +38,10 @@ import org.elasticsearch.index.fielddata.LeafNumericFieldData;
import org.elasticsearch.index.fielddata.NumericDoubleValues;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
import org.elasticsearch.index.fielddata.fieldcomparator.LongValuesComparatorSource;
import org.elasticsearch.index.mapper.DocValueFetcher;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
@ -173,6 +175,28 @@ public class SortedNumericIndexFieldData extends IndexNumericFieldData {
throw new IllegalStateException("Cannot load doc values", e);
}
}
@Override
public DocValueFetcher.Leaf getLeafValueFetcher(DocValueFormat format) {
DocValueFormat nanosFormat = DocValueFormat.withNanosecondResolution(format);
SortedNumericDocValues values = getLongValuesAsNanos();
return new DocValueFetcher.Leaf() {
@Override
public boolean advanceExact(int docId) throws IOException {
return values.advanceExact(docId);
}
@Override
public int docValueCount() throws IOException {
return values.docValueCount();
}
@Override
public Object nextValue() throws IOException {
return nanosFormat.format(values.nextValue());
}
};
}
}
/**

View File

@ -40,6 +40,7 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.geometry.Geometry;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
import java.io.UncheckedIOException;
@ -185,7 +186,7 @@ public abstract class AbstractGeometryFieldMapper<Parsed, Processed> extends Fie
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
String geoFormat = format != null ? format : GeoJsonGeometryFormat.NAME;
AbstractGeometryFieldType<Parsed, Processed> mappedFieldType = fieldType();

View File

@ -193,7 +193,7 @@ public class BinaryFieldMapper extends ParametrizedFieldMapper {
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}

View File

@ -254,7 +254,7 @@ public class BooleanFieldMapper extends ParametrizedFieldMapper {
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}

View File

@ -45,6 +45,7 @@ import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.index.analysis.AnalyzerScope;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.suggest.completion.CompletionSuggester;
import org.elasticsearch.search.suggest.completion.context.ContextMapping;
import org.elasticsearch.search.suggest.completion.context.ContextMappings;
@ -536,7 +537,7 @@ public class CompletionFieldMapper extends ParametrizedFieldMapper {
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}

View File

@ -619,7 +619,7 @@ public final class DateFieldMapper extends ParametrizedFieldMapper {
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
DateFormatter defaultFormatter = fieldType().dateTimeFormatter();
DateFormatter formatter = format != null
? DateFormatter.forPattern(format).withLocale(defaultFormatter.locale())

View File

@ -0,0 +1,79 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import static java.util.Collections.emptyList;
/**
* Value fetcher that loads from doc values.
*/
public final class DocValueFetcher implements ValueFetcher {
private final DocValueFormat format;
private final IndexFieldData<?> ifd;
private Leaf leaf;
public DocValueFetcher(DocValueFormat format, IndexFieldData<?> ifd) {
this.format = format;
this.ifd = ifd;
}
public void setNextReader(LeafReaderContext context) {
leaf = ifd.load(context).getLeafValueFetcher(format);
}
@Override
public List<Object> fetchValues(SourceLookup lookup) throws IOException {
if (false == leaf.advanceExact(lookup.docId())) {
return emptyList();
}
List<Object> result = new ArrayList<Object>(leaf.docValueCount());
for (int i = 0, count = leaf.docValueCount(); i < count; ++i) {
result.add(leaf.nextValue());
}
return result;
}
public interface Leaf {
/**
* Advance the doc values reader to the provided doc.
* @return false if there are no values for this document, true otherwise
*/
boolean advanceExact(int docId) throws IOException;
/**
* A count of the number of values at this document.
*/
int docValueCount() throws IOException;
/**
* Load and format the next value.
*/
Object nextValue() throws IOException;
}
}

View File

@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions;
@ -35,6 +36,7 @@ import org.elasticsearch.common.xcontent.support.AbstractXContentParser;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.FieldNamesFieldMapper.FieldNamesFieldType;
import org.elasticsearch.search.fetch.subphase.FetchFieldsPhase;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
import java.util.ArrayList;
@ -278,7 +280,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
/**
* Create a helper class to fetch field values during the {@link FetchFieldsPhase}.
*/
public abstract ValueFetcher valueFetcher(MapperService mapperService, @Nullable String format);
public abstract ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, @Nullable String format);
protected void createFieldNamesField(ParseContext context) {
FieldNamesFieldType fieldNamesFieldType = context.docMapper().metadataMapper(FieldNamesFieldMapper.class).fieldType();

View File

@ -404,7 +404,7 @@ public class IpFieldMapper extends ParametrizedFieldMapper {
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}

View File

@ -394,7 +394,7 @@ public final class KeywordFieldMapper extends ParametrizedFieldMapper {
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}

View File

@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
import java.util.Map;
@ -168,7 +169,7 @@ public abstract class MetadataFieldMapper extends ParametrizedFieldMapper {
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup lookup, String format) {
throw new UnsupportedOperationException("Cannot fetch values for internal field [" + name() + "].");
}

View File

@ -1089,7 +1089,7 @@ public class NumberFieldMapper extends ParametrizedFieldMapper {
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}

View File

@ -368,7 +368,7 @@ public class RangeFieldMapper extends ParametrizedFieldMapper {
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
DateFormatter defaultFormatter = fieldType().dateTimeFormatter();
DateFormatter formatter = format != null
? DateFormatter.forPattern(format).withLocale(defaultFormatter.locale())

View File

@ -503,7 +503,7 @@ public class TextFieldMapper extends FieldMapper {
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}
@ -534,7 +534,7 @@ public class TextFieldMapper extends FieldMapper {
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}
@ -841,7 +841,7 @@ public class TextFieldMapper extends FieldMapper {
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}

View File

@ -19,9 +19,11 @@
package org.elasticsearch.index.mapper;
import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.search.fetch.subphase.FetchFieldsPhase;
import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException;
import java.util.List;
/**
@ -42,5 +44,10 @@ public interface ValueFetcher {
* @param lookup a lookup structure over the document's source.
* @return a list a standardized field values.
*/
List<Object> fetchValues(SourceLookup lookup);
List<Object> fetchValues(SourceLookup lookup) throws IOException;
/**
* Update the leaf reader used to fetch values.
*/
default void setNextReader(LeafReaderContext context) {}
}

View File

@ -95,8 +95,7 @@ public abstract class InnerHitContextBuilder {
}
if (innerHitBuilder.getFetchFields() != null) {
String indexName = queryShardContext.index().getName();
FetchFieldsContext fieldsContext = FetchFieldsContext.create(
indexName, queryShardContext.getMapperService(), innerHitBuilder.getFetchFields());
FetchFieldsContext fieldsContext = new FetchFieldsContext(innerHitBuilder.getFetchFields());
innerHitsContext.fetchFieldsContext(fieldsContext);
}
if (innerHitBuilder.getScriptFields() != null) {

View File

@ -325,16 +325,35 @@ public class QueryShardContext extends QueryRewriteContext {
private SearchLookup lookup = null;
/**
* Get the lookup to use during the search.
*/
public SearchLookup lookup() {
if (this.lookup == null) {
this.lookup = new SearchLookup(
getMapperService(),
(fieldType, searchLookup) -> indexFieldDataService.apply(fieldType, fullyQualifiedIndex.getName(), searchLookup),
types);
types
);
}
return this.lookup;
}
/**
* Build a lookup customized for the fetch phase. Use {@link #lookup()}
* in other phases.
*/
public SearchLookup newFetchLookup() {
/*
* Real customization coming soon, I promise!
*/
return new SearchLookup(
getMapperService(),
(fieldType, searchLookup) -> indexFieldDataService.apply(fieldType, fullyQualifiedIndex.getName(), searchLookup),
types
);
}
public NestedScope nestedScope() {
return nestedScope;
}

View File

@ -39,7 +39,6 @@ import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ObjectMapper;
@ -511,11 +510,6 @@ final class DefaultSearchContext extends SearchContext {
return indexService.cache().bitsetFilterCache();
}
@Override
public <IFD extends IndexFieldData<?>> IFD getForField(MappedFieldType fieldType) {
return queryShardContext.getForField(fieldType);
}
@Override
public TimeValue timeout() {
return timeout;

View File

@ -968,9 +968,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
context.docValuesContext(docValuesContext);
}
if (source.fetchFields() != null) {
String indexName = context.indexShard().shardId().getIndexName();
FetchFieldsContext fetchFieldsContext = FetchFieldsContext.create(
indexName, context.mapperService(), source.fetchFields());
FetchFieldsContext fetchFieldsContext = new FetchFieldsContext(source.fetchFields());
context.fetchFieldsContext(fetchFieldsContext);
}
if (source.highlighter() != null) {

View File

@ -114,8 +114,7 @@ class TopHitsAggregatorFactory extends AggregatorFactory {
subSearchContext.docValuesContext(docValuesContext);
}
if (fetchFields != null) {
String indexName = searchContext.indexShard().shardId().getIndexName();
FetchFieldsContext fieldsContext = FetchFieldsContext.create(indexName, searchContext.mapperService(), fetchFields);
FetchFieldsContext fieldsContext = new FetchFieldsContext(fetchFields);
subSearchContext.fetchFieldsContext(fieldsContext);
}
for (ScriptFieldsContext.ScriptField field : scriptFields) {

View File

@ -56,6 +56,7 @@ import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.InnerHitsContext;
import org.elasticsearch.search.fetch.subphase.InnerHitsPhase;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.tasks.TaskCancelledException;
@ -103,7 +104,8 @@ public class FetchPhase {
SearchHit[] hits = new SearchHit[context.docIdsToLoadSize()];
Map<String, Object> sharedCache = new HashMap<>();
List<FetchSubPhaseProcessor> processors = getProcessors(context);
SearchLookup lookup = context.getQueryShardContext().newFetchLookup();
List<FetchSubPhaseProcessor> processors = getProcessors(context, lookup);
int currentReaderIndex = -1;
LeafReaderContext currentReaderContext = null;
@ -122,8 +124,15 @@ public class FetchPhase {
}
}
assert currentReaderContext != null;
HitContext hit
= prepareHitContext(context, fieldsVisitor, docId, storedToRequestedFields, currentReaderContext, sharedCache);
HitContext hit = prepareHitContext(
context,
lookup,
fieldsVisitor,
docId,
storedToRequestedFields,
currentReaderContext,
sharedCache
);
for (FetchSubPhaseProcessor processor : processors) {
processor.process(hit);
}
@ -141,11 +150,11 @@ public class FetchPhase {
}
List<FetchSubPhaseProcessor> getProcessors(SearchContext context) {
List<FetchSubPhaseProcessor> getProcessors(SearchContext context, SearchLookup lookup) {
try {
List<FetchSubPhaseProcessor> processors = new ArrayList<>();
for (FetchSubPhase fsp : fetchSubPhases) {
FetchSubPhaseProcessor processor = fsp.getProcessor(context);
FetchSubPhaseProcessor processor = fsp.getProcessor(context, lookup);
if (processor != null) {
processors.add(processor);
}
@ -235,12 +244,20 @@ public class FetchPhase {
return -1;
}
private HitContext prepareHitContext(SearchContext context, FieldsVisitor fieldsVisitor, int docId,
private HitContext prepareHitContext(SearchContext context, SearchLookup lookup, FieldsVisitor fieldsVisitor, int docId,
Map<String, Set<String>> storedToRequestedFields,
LeafReaderContext subReaderContext, Map<String, Object> sharedCache) throws IOException {
int rootDocId = findRootDocumentIfNested(context, subReaderContext, docId - subReaderContext.docBase);
if (rootDocId == -1) {
return prepareNonNestedHitContext(context, fieldsVisitor, docId, storedToRequestedFields, subReaderContext, sharedCache);
return prepareNonNestedHitContext(
context,
lookup,
fieldsVisitor,
docId,
storedToRequestedFields,
subReaderContext,
sharedCache
);
} else {
return prepareNestedHitContext(context, docId, rootDocId, storedToRequestedFields, subReaderContext, sharedCache);
}
@ -254,6 +271,7 @@ public class FetchPhase {
* fetch subphases that use the hit context to access the preloaded source.
*/
private HitContext prepareNonNestedHitContext(SearchContext context,
SearchLookup lookup,
FieldsVisitor fieldsVisitor,
int docId,
Map<String, Set<String>> storedToRequestedFields,
@ -265,7 +283,7 @@ public class FetchPhase {
if (fieldsVisitor == null) {
SearchHit hit = new SearchHit(docId, null, typeText, null, null);
return new HitContext(hit, subReaderContext, subDocId, sharedCache);
return new HitContext(hit, subReaderContext, subDocId, lookup.source(), sharedCache);
} else {
SearchHit hit;
loadStoredFields(context.mapperService(), subReaderContext, fieldsVisitor, subDocId);
@ -279,7 +297,7 @@ public class FetchPhase {
hit = new SearchHit(docId, uid.id(), typeText, emptyMap(), emptyMap());
}
HitContext hitContext = new HitContext(hit, subReaderContext, subDocId, sharedCache);
HitContext hitContext = new HitContext(hit, subReaderContext, subDocId, lookup.source(), sharedCache);
if (fieldsVisitor.source() != null) {
hitContext.sourceLookup().setSource(fieldsVisitor.source());
}
@ -288,7 +306,6 @@ public class FetchPhase {
}
/**
/**
* Resets the provided {@link HitContext} with information on the current
* nested document. This includes the following:
* - Adding an initial {@link SearchHit} instance.
@ -359,7 +376,13 @@ public class FetchPhase {
getInternalNestedIdentity(context, nestedDocId, subReaderContext, context.mapperService(), nestedObjectMapper);
SearchHit hit = new SearchHit(nestedTopDocId, rootId.id(), typeText, nestedIdentity, docFields, metaFields);
HitContext hitContext = new HitContext(hit, subReaderContext, nestedDocId, sharedCache);
HitContext hitContext = new HitContext(
hit,
subReaderContext,
nestedDocId,
new SourceLookup(), // Use a clean, fresh SourceLookup for the nested context
sharedCache
);
if (rootSourceAsMap != null) {
// Isolate the nested json array object that matches with nested hit and wrap it back into the same json

View File

@ -24,6 +24,7 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.ReaderUtil;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException;
@ -38,14 +39,21 @@ public interface FetchSubPhase {
private final SearchHit hit;
private final LeafReaderContext readerContext;
private final int docId;
private final SourceLookup sourceLookup = new SourceLookup();
private final SourceLookup sourceLookup;
private final Map<String, Object> cache;
public HitContext(SearchHit hit, LeafReaderContext context, int docId, Map<String, Object> cache) {
public HitContext(
SearchHit hit,
LeafReaderContext context,
int docId,
SourceLookup sourceLookup,
Map<String, Object> cache
) {
this.hit = hit;
this.readerContext = context;
this.docId = docId;
this.sourceLookup.setSegmentAndDocument(context, docId);
this.sourceLookup = sourceLookup;
sourceLookup.setSegmentAndDocument(context, docId);
this.cache = cache;
}
@ -95,5 +103,5 @@ public interface FetchSubPhase {
* If nothing should be executed for the provided {@link SearchContext}, then the
* implementation should return {@code null}
*/
FetchSubPhaseProcessor getProcessor(SearchContext searchContext) throws IOException;
FetchSubPhaseProcessor getProcessor(SearchContext searchContext, SearchLookup lookup) throws IOException;
}

View File

@ -23,6 +23,7 @@ import org.apache.lucene.search.Explanation;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.rescore.RescoreContext;
import java.io.IOException;
@ -33,7 +34,7 @@ import java.io.IOException;
public final class ExplainPhase implements FetchSubPhase {
@Override
public FetchSubPhaseProcessor getProcessor(SearchContext context) {
public FetchSubPhaseProcessor getProcessor(SearchContext context, SearchLookup lookup) {
if (context.explain() == false || context.hasOnlySuggest()) {
return null;
}

View File

@ -19,28 +19,20 @@
package org.elasticsearch.search.fetch.subphase;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.SortedNumericDocValues;
import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
import org.elasticsearch.index.fielddata.plain.SortedNumericIndexFieldData;
import org.elasticsearch.index.mapper.DocValueFetcher;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.index.mapper.ValueFetcher;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import static org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
import static org.elasticsearch.search.DocValueFormat.withNanosecondResolution;
/**
* Fetch sub phase which pulls data from doc values.
@ -53,7 +45,7 @@ public final class FetchDocValuesPhase implements FetchSubPhase {
private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(FetchDocValuesPhase.class);
@Override
public FetchSubPhaseProcessor getProcessor(SearchContext context) throws IOException {
public FetchSubPhaseProcessor getProcessor(SearchContext context, SearchLookup lookup) throws IOException {
if (context.collapse() != null) {
// retrieve the `doc_value` associated with the collapse field
String name = context.collapse().getFieldName();
@ -77,19 +69,27 @@ public final class FetchDocValuesPhase implements FetchSubPhase {
"ease the transition to 7.x. It has become the default and shouldn't be set explicitly anymore.");
}
/*
* Its tempting to swap this to a `Map` but that'd break backwards
* compatibility because we support fetching the same field multiple
* times with different configuration. That isn't possible with a `Map`.
*/
List<DocValueField> fields = new ArrayList<>();
for (FieldAndFormat fieldAndFormat : context.docValuesContext().fields()) {
DocValueField f = buildField(context, fieldAndFormat);
if (f != null) {
fields.add(f);
MappedFieldType ft = context.mapperService().fieldType(fieldAndFormat.field);
if (ft == null) {
continue;
}
String format = USE_DEFAULT_FORMAT.equals(fieldAndFormat.format) ? null : fieldAndFormat.format;
ValueFetcher fetcher = new DocValueFetcher(ft.docValueFormat(format, null), lookup.doc().getForField(ft));
fields.add(new DocValueField(fieldAndFormat.field, fetcher));
}
return new FetchSubPhaseProcessor() {
@Override
public void setNextReader(LeafReaderContext readerContext) throws IOException {
for (DocValueField f : fields) {
f.setNextReader(readerContext);
f.fetcher.setNextReader(readerContext);
}
}
@ -103,153 +103,19 @@ public final class FetchDocValuesPhase implements FetchSubPhase {
// docValues fields will still be document fields, and put under "fields" section of a hit.
hit.hit().setDocumentField(f.field, hitField);
}
f.setValues(hit.docId(), hitField);
hitField.getValues().addAll(f.fetcher.fetchValues(hit.sourceLookup()));
}
}
};
}
private abstract static class DocValueField {
private class DocValueField {
private final String field;
private final ValueFetcher fetcher;
final String field;
final DocValueFormat format;
protected DocValueField(String field, DocValueFormat format) {
DocValueField(String field, ValueFetcher fetcher) {
this.field = field;
this.format = format;
this.fetcher = fetcher;
}
abstract void setNextReader(LeafReaderContext context);
abstract void setValues(int doc, DocumentField hitField) throws IOException;
}
private static class DoubleDocValueField extends DocValueField {
SortedNumericDoubleValues doubleValues;
IndexNumericFieldData fieldData;
DoubleDocValueField(String field, IndexNumericFieldData fieldData, DocValueFormat format) {
super(field, format);
this.fieldData = fieldData;
}
@Override
void setNextReader(LeafReaderContext context) {
doubleValues = fieldData.load(context).getDoubleValues();
}
@Override
void setValues(int doc, DocumentField hitField) throws IOException {
final List<Object> values = hitField.getValues();
if (doubleValues.advanceExact(doc)) {
for (int i = 0, count = doubleValues.docValueCount(); i < count; ++i) {
values.add(format.format(doubleValues.nextValue()));
}
}
}
}
private static class NanoDocValueField extends DocValueField {
SortedNumericDocValues longValues;
IndexNumericFieldData fieldData;
NanoDocValueField(String field, IndexNumericFieldData fieldData, DocValueFormat format) {
super(field, withNanosecondResolution(format));
this.fieldData = fieldData;
}
@Override
void setNextReader(LeafReaderContext context) {
longValues = ((SortedNumericIndexFieldData.NanoSecondFieldData) fieldData.load(context)).getLongValuesAsNanos();
}
@Override
void setValues(int doc, DocumentField hitField) throws IOException {
final List<Object> values = hitField.getValues();
if (longValues.advanceExact(doc)) {
for (int i = 0, count = longValues.docValueCount(); i < count; ++i) {
values.add(format.format(longValues.nextValue()));
}
}
}
}
private static class LongDocValueField extends DocValueField {
SortedNumericDocValues longValues;
IndexNumericFieldData fieldData;
LongDocValueField(String field, IndexNumericFieldData fieldData, DocValueFormat format) {
super(field, format);
this.fieldData = fieldData;
}
@Override
void setNextReader(LeafReaderContext context) {
longValues = fieldData.load(context).getLongValues();
}
@Override
void setValues(int doc, DocumentField hitField) throws IOException {
final List<Object> values = hitField.getValues();
if (longValues.advanceExact(doc)) {
for (int i = 0, count = longValues.docValueCount(); i < count; ++i) {
values.add(format.format(longValues.nextValue()));
}
}
}
}
private static class BinaryDocValueField extends DocValueField {
SortedBinaryDocValues byteValues;
IndexFieldData<?> fieldData;
BinaryDocValueField(String field, IndexFieldData<?> fieldData, DocValueFormat format) {
super(field, format);
this.fieldData = fieldData;
}
@Override
void setNextReader(LeafReaderContext context) {
byteValues = fieldData.load(context).getBytesValues();
}
@Override
void setValues(int doc, DocumentField hitField) throws IOException {
final List<Object> values = hitField.getValues();
if (byteValues.advanceExact(doc)) {
for (int i = 0, count = byteValues.docValueCount(); i < count; ++i) {
values.add(format.format(byteValues.nextValue()));
}
}
}
}
private static DocValueField buildField(SearchContext context, FieldAndFormat fieldAndFormat) {
MappedFieldType fieldType = context.mapperService().fieldType(fieldAndFormat.field);
if (fieldType == null) {
return null;
}
final IndexFieldData<?> indexFieldData = context.getForField(fieldType);
String formatDesc = fieldAndFormat.format;
if (Objects.equals(formatDesc, USE_DEFAULT_FORMAT)) {
formatDesc = null;
}
DocValueFormat format = fieldType.docValueFormat(formatDesc, null);
if (indexFieldData instanceof IndexNumericFieldData) {
if (((IndexNumericFieldData) indexFieldData).getNumericType().isFloatingPoint()) {
return new DoubleDocValueField(fieldAndFormat.field, (IndexNumericFieldData) indexFieldData, format);
}
if (((IndexNumericFieldData) indexFieldData).getNumericType() == NumericType.DATE_NANOSECONDS) {
return new NanoDocValueField(fieldAndFormat.field, (IndexNumericFieldData) indexFieldData, format);
}
return new LongDocValueField(fieldAndFormat.field, (IndexNumericFieldData) indexFieldData, format);
}
return new BinaryDocValueField(fieldAndFormat.field, indexFieldData, format);
}
}

View File

@ -18,8 +18,8 @@
*/
package org.elasticsearch.search.fetch.subphase;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.search.lookup.SearchLookup;
import java.util.List;
@ -27,27 +27,18 @@ import java.util.List;
* The context needed to retrieve fields.
*/
public class FetchFieldsContext {
private final List<FieldAndFormat> fields;
private FieldValueRetriever fieldValueRetriever;
public FetchFieldsContext(List<FieldAndFormat> fields) {
this.fields = fields;
}
public static FetchFieldsContext create(String indexName,
MapperService mapperService,
List<FieldAndFormat> fields) {
DocumentMapper documentMapper = mapperService.documentMapper();
if (documentMapper.sourceMapper().enabled() == false) {
throw new IllegalArgumentException("Unable to retrieve the requested [fields] since _source is " +
"disabled in the mappings for index [" + indexName + "]");
public FieldValueRetriever fieldValueRetriever(String indexName, MapperService mapperService, SearchLookup searchLookup) {
if (mapperService.documentMapper().sourceMapper().enabled() == false) {
throw new IllegalArgumentException(
"Unable to retrieve the requested [fields] since _source is disabled in the mappings for index [" + indexName + "]"
);
}
FieldValueRetriever fieldValueRetriever = FieldValueRetriever.create(mapperService, fields);
return new FetchFieldsContext(fieldValueRetriever);
}
private FetchFieldsContext(FieldValueRetriever fieldValueRetriever) {
this.fieldValueRetriever = fieldValueRetriever;
}
public FieldValueRetriever fieldValueRetriever() {
return fieldValueRetriever;
return FieldValueRetriever.create(mapperService, searchLookup, fields);
}
}

View File

@ -26,8 +26,10 @@ import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
@ -39,25 +41,29 @@ import java.util.Set;
public final class FetchFieldsPhase implements FetchSubPhase {
@Override
public FetchSubPhaseProcessor getProcessor(SearchContext searchContext) {
public FetchSubPhaseProcessor getProcessor(SearchContext searchContext, SearchLookup lookup) {
FetchFieldsContext fetchFieldsContext = searchContext.fetchFieldsContext();
if (fetchFieldsContext == null) {
return null;
}
FieldValueRetriever retriever = fetchFieldsContext.fieldValueRetriever(
searchContext.indexShard().shardId().getIndexName(),
searchContext.mapperService(),
lookup
);
return new FetchSubPhaseProcessor() {
@Override
public void setNextReader(LeafReaderContext readerContext) {
retriever.setNextReader(readerContext);
}
@Override
public void process(HitContext hitContext) {
public void process(HitContext hitContext) throws IOException {
SearchHit hit = hitContext.hit();
SourceLookup sourceLookup = hitContext.sourceLookup();
FieldValueRetriever fieldValueRetriever = fetchFieldsContext.fieldValueRetriever();
Set<String> ignoredFields = getIgnoredFields(hit);
Map<String, DocumentField> documentFields = fieldValueRetriever.retrieve(sourceLookup, ignoredFields);
Map<String, DocumentField> documentFields = retriever.retrieve(sourceLookup, ignoredFields);
for (Map.Entry<String, DocumentField> entry : documentFields.entrySet()) {
hit.setDocumentField(entry.getKey(), entry.getValue());
}

View File

@ -28,13 +28,14 @@ import org.apache.lucene.search.Weight;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
public class FetchScorePhase implements FetchSubPhase {
@Override
public FetchSubPhaseProcessor getProcessor(SearchContext context) throws IOException {
public FetchSubPhaseProcessor getProcessor(SearchContext context, SearchLookup lookup) throws IOException {
if (context.trackScores() == false || context.docIdsToLoadSize() == 0 ||
// scores were already computed since they are needed on the coordinated node to merge top hits
context.sort() == null) {

View File

@ -28,6 +28,7 @@ import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException;
@ -36,7 +37,7 @@ import java.util.Map;
public final class FetchSourcePhase implements FetchSubPhase {
@Override
public FetchSubPhaseProcessor getProcessor(SearchContext searchContext) {
public FetchSubPhaseProcessor getProcessor(SearchContext searchContext, SearchLookup lookup) {
if (searchContext.sourceRequested() == false) {
return null;
}

View File

@ -25,13 +25,14 @@ import org.elasticsearch.index.mapper.VersionFieldMapper;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
public final class FetchVersionPhase implements FetchSubPhase {
@Override
public FetchSubPhaseProcessor getProcessor(SearchContext context) {
public FetchSubPhaseProcessor getProcessor(SearchContext context, SearchLookup lookup) {
if (context.version() == false ||
(context.storedFieldsContext() != null && context.storedFieldsContext().fetchFields() == false)) {
return null;

View File

@ -19,6 +19,7 @@
package org.elasticsearch.search.fetch.subphase;
import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.index.mapper.FieldAliasMapper;
import org.elasticsearch.index.mapper.FieldMapper;
@ -26,8 +27,10 @@ import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.MappingLookup;
import org.elasticsearch.index.mapper.ValueFetcher;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
@ -40,10 +43,11 @@ import java.util.Set;
* Then given a specific document, it can retrieve the corresponding fields from the document's source.
*/
public class FieldValueRetriever {
private final List<FieldContext> fieldContexts;
public static FieldValueRetriever create(MapperService mapperService,
Collection<FieldAndFormat> fieldAndFormats) {
public static FieldValueRetriever create(
MapperService mapperService,
SearchLookup searchLookup,
Collection<FieldAndFormat> fieldAndFormats
) {
MappingLookup fieldMappers = mapperService.documentMapper().mappers();
List<FieldContext> fieldContexts = new ArrayList<>();
@ -65,7 +69,7 @@ public class FieldValueRetriever {
}
FieldMapper fieldMapper = (FieldMapper) mapper;
ValueFetcher valueFetcher = fieldMapper.valueFetcher(mapperService, format);
ValueFetcher valueFetcher = fieldMapper.valueFetcher(mapperService, searchLookup, format);
fieldContexts.add(new FieldContext(field, valueFetcher));
}
}
@ -73,11 +77,13 @@ public class FieldValueRetriever {
return new FieldValueRetriever(fieldContexts);
}
private final List<FieldContext> fieldContexts;
private FieldValueRetriever(List<FieldContext> fieldContexts) {
this.fieldContexts = fieldContexts;
}
public Map<String, DocumentField> retrieve(SourceLookup sourceLookup, Set<String> ignoredFields) {
public Map<String, DocumentField> retrieve(SourceLookup sourceLookup, Set<String> ignoredFields) throws IOException {
Map<String, DocumentField> documentFields = new HashMap<>();
for (FieldContext context : fieldContexts) {
String field = context.fieldName;
@ -95,6 +101,12 @@ public class FieldValueRetriever {
return documentFields;
}
public void setNextReader(LeafReaderContext readerContext) {
for (FieldContext field : fieldContexts) {
field.valueFetcher.setNextReader(readerContext);
}
}
private static class FieldContext {
final String fieldName;
final ValueFetcher valueFetcher;

View File

@ -31,6 +31,7 @@ import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException;
@ -46,7 +47,7 @@ public final class InnerHitsPhase implements FetchSubPhase {
}
@Override
public FetchSubPhaseProcessor getProcessor(SearchContext searchContext) {
public FetchSubPhaseProcessor getProcessor(SearchContext searchContext, SearchLookup lookup) {
if (searchContext.innerHits() == null) {
return null;
}

View File

@ -28,6 +28,7 @@ import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
import java.util.ArrayList;
@ -38,7 +39,7 @@ import java.util.Map;
public final class MatchedQueriesPhase implements FetchSubPhase {
@Override
public FetchSubPhaseProcessor getProcessor(SearchContext context) throws IOException {
public FetchSubPhaseProcessor getProcessor(SearchContext context, SearchLookup lookup) throws IOException {
if (context.docIdsToLoadSize() == 0 ||
// in case the request has only suggest, parsed query is null
context.parsedQuery() == null) {

View File

@ -25,6 +25,7 @@ import org.elasticsearch.script.FieldScript;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
import java.util.ArrayList;
@ -35,7 +36,7 @@ import java.util.List;
public final class ScriptFieldsPhase implements FetchSubPhase {
@Override
public FetchSubPhaseProcessor getProcessor(SearchContext context) {
public FetchSubPhaseProcessor getProcessor(SearchContext context, SearchLookup lookup) {
if (context.hasScriptFields() == false) {
return null;
}

View File

@ -25,13 +25,14 @@ import org.elasticsearch.index.seqno.SequenceNumbers;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
public final class SeqNoPrimaryTermPhase implements FetchSubPhase {
@Override
public FetchSubPhaseProcessor getProcessor(SearchContext context) throws IOException {
public FetchSubPhaseProcessor getProcessor(SearchContext context, SearchLookup lookup) throws IOException {
if (context.seqNoAndPrimaryTerm() == false) {
return null;
}

View File

@ -31,6 +31,7 @@ import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
import java.util.Collection;
@ -49,7 +50,7 @@ public class HighlightPhase implements FetchSubPhase {
}
@Override
public FetchSubPhaseProcessor getProcessor(SearchContext context) {
public FetchSubPhaseProcessor getProcessor(SearchContext context, SearchLookup lookup) {
if (context.highlight() == null) {
return null;
}

View File

@ -27,7 +27,6 @@ import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ObjectMapper;
@ -244,11 +243,6 @@ public abstract class FilteredSearchContext extends SearchContext {
return in.bitsetFilterCache();
}
@Override
public <IFD extends IndexFieldData<?>> IFD getForField(MappedFieldType fieldType) {
return in.getForField(fieldType);
}
@Override
public TimeValue timeout() {
return in.timeout();

View File

@ -30,7 +30,6 @@ import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ObjectMapper;
@ -230,8 +229,6 @@ public abstract class SearchContext implements Releasable {
public abstract BitsetFilterCache bitsetFilterCache();
public abstract <IFD extends IndexFieldData<?>> IFD getForField(MappedFieldType fieldType);
public abstract TimeValue timeout();
public abstract void timeout(TimeValue timeout);

View File

@ -55,6 +55,10 @@ public class SourceLookup implements Map {
return sourceContentType;
}
public int docId() {
return docId;
}
// Scripting requires this method to be public. Using source()
// is not possible because certain checks use source == null as
// as a determination if source is enabled/disabled, but it should

View File

@ -172,7 +172,7 @@ public class BooleanFieldMapperTests extends MapperTestCase {
assertEquals(new BoostQuery(new TermQuery(new Term("field", "T")), 2.0f), ft.termQuery("true", null));
}
public void testFetchSourceValue() {
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());

View File

@ -721,7 +721,7 @@ public class CompletionFieldMapperTests extends MapperTestCase {
"The maximum allowed number of completion contexts in a mapping will be limited to [10] starting in version [8.0].");
}
public void testFetchSourceValue() {
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
NamedAnalyzer defaultAnalyzer = new NamedAnalyzer("standard", AnalyzerScope.INDEX, new StandardAnalyzer());

View File

@ -30,6 +30,7 @@ import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.DateFieldMapper.Resolution;
import org.elasticsearch.index.termvectors.TermVectorsService;
import org.elasticsearch.search.DocValueFormat;
import java.io.IOException;
import java.time.ZoneId;
@ -284,7 +285,7 @@ public class DateFieldMapperTests extends MapperTestCase {
assertThat(e.getMessage(), containsString("Error parsing [format] on field [field]: Invalid"));
}
public void testFetchSourceValue() {
public void testFetchSourceValue() throws IOException {
DateFieldMapper mapper = createMapper(Resolution.MILLISECONDS, null);
String date = "2020-05-15T21:33:02.000Z";
assertEquals(List.of(date), fetchSourceValue(mapper, date));
@ -305,7 +306,7 @@ public class DateFieldMapperTests extends MapperTestCase {
assertEquals(List.of(nullValueDate), fetchSourceValue(nullValueMapper, null));
}
public void testParseSourceValueWithFormat() {
public void testParseSourceValueWithFormat() throws IOException {
DateFieldMapper mapper = createMapper(Resolution.NANOSECONDS, "strict_date_time", "1970-12-29T00:00:00.000Z");
String date = "1990-12-29T00:00:00.000Z";
assertEquals(List.of("1990/12/29"), fetchSourceValue(mapper, date, "yyyy/MM/dd"));
@ -313,7 +314,7 @@ public class DateFieldMapperTests extends MapperTestCase {
assertEquals(List.of("1970/12/29"), fetchSourceValue(mapper, null, "yyyy/MM/dd"));
}
public void testParseSourceValueNanos() {
public void testParseSourceValueNanos() throws IOException {
DateFieldMapper mapper = createMapper(Resolution.NANOSECONDS, "strict_date_time||epoch_millis");
String date = "2020-05-15T21:33:02.123456789Z";
assertEquals(List.of("2020-05-15T21:33:02.123456789Z"), fetchSourceValue(mapper, date));
@ -324,6 +325,28 @@ public class DateFieldMapperTests extends MapperTestCase {
assertEquals(List.of(nullValueDate), fetchSourceValue(nullValueMapper, null));
}
public void testFetchDocValuesMillis() throws IOException {
MapperService mapperService = createMapperService(
fieldMapping(b -> b.field("type", "date").field("format", "strict_date_time||epoch_millis"))
);
MappedFieldType ft = mapperService.fieldType("field");
DocValueFormat format = ft.docValueFormat(null, null);
String date = "2020-05-15T21:33:02.123Z";
assertEquals(List.of(date), fetchFromDocValues(mapperService, ft, format, date));
assertEquals(List.of(date), fetchFromDocValues(mapperService, ft, format, 1589578382123L));
}
public void testFetchDocValuesNanos() throws IOException {
MapperService mapperService = createMapperService(
fieldMapping(b -> b.field("type", "date_nanos").field("format", "strict_date_time||epoch_millis"))
);
MappedFieldType ft = mapperService.fieldType("field");
DocValueFormat format = ft.docValueFormat(null, null);
String date = "2020-05-15T21:33:02.123456789Z";
assertEquals(List.of(date), fetchFromDocValues(mapperService, ft, format, date));
assertEquals(List.of("2020-05-15T21:33:02.123Z"), fetchFromDocValues(mapperService, ft, format, 1589578382123L));
}
private DateFieldMapper createMapper(Resolution resolution, String format) {
return createMapper(resolution, format, null);
}

View File

@ -32,6 +32,7 @@ import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.index.analysis.AnalyzerScope;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
import java.io.StringReader;
@ -103,7 +104,7 @@ public class DocumentFieldMapperTests extends LuceneTestCase {
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}

View File

@ -31,6 +31,7 @@ import org.elasticsearch.common.geo.builders.PointBuilder;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.geometry.Point;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
import java.nio.charset.Charset;
@ -205,7 +206,7 @@ public class ExternalMapper extends FieldMapper {
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
return new SourceValueFetcher(name(), mapperService, parsesArrayValue()) {
@Override
protected Object parseSourceValue(Object value) {

View File

@ -31,6 +31,7 @@ import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
import java.util.Collections;
@ -134,7 +135,7 @@ public class FakeStringFieldMapper extends FieldMapper {
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
return new SourceValueFetcher(name(), mapperService, parsesArrayValue()) {
@Override
protected String parseSourceValue(Object value) {

View File

@ -299,7 +299,7 @@ public class GeoPointFieldMapperTests extends FieldMapperTestCase2<GeoPointField
);
}
public void testFetchSourceValue() {
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());

View File

@ -214,7 +214,7 @@ public class GeoShapeFieldMapperTests extends FieldMapperTestCase2<GeoShapeField
assertThat(document.docs().get(0).getFields("field").length, equalTo(2));
}
public void testFetchSourceValue() {
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());

View File

@ -294,7 +294,7 @@ public class IpFieldMapperTests extends ESSingleNodeTestCase {
assertThat(e.getMessage(), containsString("name cannot be empty string"));
}
public void testFetchSourceValue() {
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());

View File

@ -35,6 +35,7 @@ import org.elasticsearch.index.IndexService;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.junit.Before;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
@ -86,7 +87,7 @@ public class IpRangeFieldMapperTests extends ESSingleNodeTestCase {
}
}
public void testFetchSourceValue() {
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());

View File

@ -401,7 +401,7 @@ public class KeywordFieldMapperTests extends MapperTestCase {
);
}
public void testFetchSourceValue() {
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());

View File

@ -633,7 +633,7 @@ public class LegacyGeoShapeFieldMapperTests extends FieldMapperTestCase2<LegacyG
assertFieldWarnings("tree", "strategy");
}
public void testFetchSourceValue() {
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());

View File

@ -204,7 +204,7 @@ public class NumberFieldMapperTests extends AbstractNumericFieldMapperTestCase {
assertFalse(dvField.fieldType().stored());
}
public void testFetchSourceValue() {
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());

View File

@ -36,6 +36,7 @@ import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.ParametrizedFieldMapper.Parameter;
import org.elasticsearch.plugins.MapperPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.test.ESSingleNodeTestCase;
import java.io.IOException;
@ -193,7 +194,7 @@ public class ParametrizedMapperTests extends ESSingleNodeTestCase {
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
return null;
}

View File

@ -339,7 +339,7 @@ public class RangeFieldMapperTests extends AbstractNumericFieldMapperTestCase {
assertThat(e.getMessage(), containsString("Invalid format: [[test_format]]: Unknown pattern letter: t"));
}
public void testFetchSourceValue() {
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
@ -356,7 +356,7 @@ public class RangeFieldMapperTests extends AbstractNumericFieldMapperTestCase {
fetchSourceValue(dateMapper, dateRange));
}
public void testParseSourceValueWithFormat() {
public void testParseSourceValueWithFormat() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());

View File

@ -923,7 +923,7 @@ public class TextFieldMapperTests extends FieldMapperTestCase2<TextFieldMapper.B
assertThat(mapperService.documentMapper().mappers().getMapper("other_field"), instanceOf(KeywordFieldMapper.class));
}
public void testFetchSourceValue() {
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());

View File

@ -31,6 +31,7 @@ import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.fetch.FetchSubPhase.HitContext;
import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.TestSearchContext;
@ -159,11 +160,11 @@ public class FetchSourcePhaseTests extends ESTestCase {
// We don't need a real index, just a LeafReaderContext which cannot be mocked.
MemoryIndex index = new MemoryIndex();
LeafReaderContext leafReaderContext = index.createSearcher().getIndexReader().leaves().get(0);
HitContext hitContext = new HitContext(searchHit, leafReaderContext, 1, new HashMap<>());
HitContext hitContext = new HitContext(searchHit, leafReaderContext, 1, new SourceLookup(), new HashMap<>());
hitContext.sourceLookup().setSource(source == null ? null : BytesReference.bytes(source));
FetchSourcePhase phase = new FetchSourcePhase();
FetchSubPhaseProcessor processor = phase.getProcessor(searchContext);
FetchSubPhaseProcessor processor = phase.getProcessor(searchContext, null);
if (fetchSource == false) {
assertNull(processor);
} else {

View File

@ -359,16 +359,20 @@ public class FieldValueRetrieverTests extends ESSingleNodeTestCase {
assertFalse(fields.containsKey("object"));
}
private Map<String, DocumentField> retrieveFields(MapperService mapperService, XContentBuilder source, String fieldPattern) {
private Map<String, DocumentField> retrieveFields(MapperService mapperService, XContentBuilder source, String fieldPattern)
throws IOException {
List<FieldAndFormat> fields = org.elasticsearch.common.collect.List.of(new FieldAndFormat(fieldPattern, null));
return retrieveFields(mapperService, source, fields);
}
private Map<String, DocumentField> retrieveFields(MapperService mapperService, XContentBuilder source, List<FieldAndFormat> fields) {
private Map<String, DocumentField> retrieveFields(MapperService mapperService, XContentBuilder source, List<FieldAndFormat> fields)
throws IOException {
SourceLookup sourceLookup = new SourceLookup();
sourceLookup.setSource(BytesReference.bytes(source));
FieldValueRetriever fetchFieldsLookup = FieldValueRetriever.create(mapperService, fields);
FieldValueRetriever fetchFieldsLookup = FieldValueRetriever.create(mapperService, null, fields);
return fetchFieldsLookup.retrieve(sourceLookup, org.elasticsearch.common.collect.Set.of());
}

View File

@ -254,17 +254,17 @@ public abstract class FieldMapperTestCase<T extends FieldMapper.Builder<?>> exte
return Strings.toString(x);
}
public static List<?> fetchSourceValue(FieldMapper mapper, Object sourceValue) {
public static List<?> fetchSourceValue(FieldMapper mapper, Object sourceValue) throws IOException {
return fetchSourceValue(mapper, sourceValue, null);
}
public static List<?> fetchSourceValue(FieldMapper mapper, Object sourceValue, String format) {
public static List<?> fetchSourceValue(FieldMapper mapper, Object sourceValue, String format) throws IOException {
String field = mapper.name();
MapperService mapperService = mock(MapperService.class);
when(mapperService.sourcePath(field)).thenReturn(org.elasticsearch.common.collect.Set.of(field));
ValueFetcher fetcher = mapper.valueFetcher(mapperService, format);
ValueFetcher fetcher = mapper.valueFetcher(mapperService, null, format);
SourceLookup lookup = new SourceLookup();
lookup.setSource(Collections.singletonMap(field, sourceValue));
return fetcher.fetchValues(lookup);

View File

@ -19,17 +19,27 @@
package org.elasticsearch.index.mapper;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.util.SetOnce;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.function.BiFunction;
import java.util.function.Supplier;
import static org.hamcrest.Matchers.containsString;
import static org.mockito.Mockito.mock;
@ -121,19 +131,43 @@ public abstract class MapperTestCase extends MapperServiceTestCase {
);
}
public static List<?> fetchSourceValue(FieldMapper mapper, Object sourceValue) {
public static List<?> fetchSourceValue(FieldMapper mapper, Object sourceValue) throws IOException {
return fetchSourceValue(mapper, sourceValue, null);
}
public static List<?> fetchSourceValue(FieldMapper mapper, Object sourceValue, String format) {
public static List<?> fetchSourceValue(FieldMapper mapper, Object sourceValue, String format) throws IOException {
String field = mapper.name();
MapperService mapperService = mock(MapperService.class);
when(mapperService.sourcePath(field)).thenReturn(org.elasticsearch.common.collect.Set.of(field));
ValueFetcher fetcher = mapper.valueFetcher(mapperService, format);
ValueFetcher fetcher = mapper.valueFetcher(mapperService, null, format);
SourceLookup lookup = new SourceLookup();
lookup.setSource(Collections.singletonMap(field, sourceValue));
return fetcher.fetchValues(lookup);
}
/**
* Use a {@linkplain FieldMapper} to extract values from doc values.
*/
protected final List<?> fetchFromDocValues(MapperService mapperService, MappedFieldType ft, DocValueFormat format, Object sourceValue)
throws IOException {
BiFunction<MappedFieldType, Supplier<SearchLookup>, IndexFieldData<?>> fieldDataLookup = (mft, lookupSource) -> mft
.fielddataBuilder("test", () -> { throw new UnsupportedOperationException(); })
.build(new IndexFieldDataCache.None(), new NoneCircuitBreakerService(), mapperService);
SetOnce<List<?>> result = new SetOnce<>();
withLuceneIndex(mapperService, iw -> {
iw.addDocument(mapperService.documentMapper().parse(source(b -> b.field(ft.name(), sourceValue))).rootDoc());
}, iw -> {
SearchLookup lookup = new SearchLookup(mapperService, fieldDataLookup, null);
ValueFetcher valueFetcher = new DocValueFetcher(format, lookup.doc().getForField(ft));
IndexSearcher searcher = newSearcher(iw);
LeafReaderContext context = searcher.getIndexReader().leaves().get(0);
lookup.source().setSegmentAndDocument(context, 0);
valueFetcher.setNextReader(context);
result.set(valueFetcher.fetchValues(lookup.source()));
});
return result.get();
}
}

View File

@ -28,6 +28,7 @@ import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
import java.util.Collections;
@ -90,7 +91,7 @@ public class MockFieldMapper extends FieldMapper {
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}

View File

@ -101,7 +101,6 @@ import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
import org.elasticsearch.indices.mapper.MapperRegistry;
import org.elasticsearch.mock.orig.Mockito;
import org.elasticsearch.plugins.SearchPlugin;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.SearchModule;
@ -319,12 +318,6 @@ public abstract class AggregatorTestCase extends ESTestCase {
IndexFieldDataService ifds = new IndexFieldDataService(indexSettings,
new IndicesFieldDataCache(Settings.EMPTY, new IndexFieldDataCache.Listener() {
}), circuitBreakerService, mapperService);
when(searchContext.getForField(Mockito.any(MappedFieldType.class)))
.thenAnswer(invocationOnMock -> ifds.getForField((MappedFieldType) invocationOnMock.getArguments()[0],
indexSettings.getIndex().getName(),
() -> {
throw new UnsupportedOperationException("search lookup not available");
}));
QueryShardContext queryShardContext =
queryShardContextMock(contextIndexSearcher, mapperService, indexSettings, circuitBreakerService, bigArrays);
when(searchContext.getQueryShardContext()).thenReturn(queryShardContext);

View File

@ -28,7 +28,6 @@ import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ObjectMapper;
@ -308,11 +307,6 @@ public class TestSearchContext extends SearchContext {
return fixedBitSetFilterCache;
}
@Override
public <IFD extends IndexFieldData<?>> IFD getForField(MappedFieldType fieldType) {
return queryShardContext.getForField(fieldType);
}
@Override
public TimeValue timeout() {
return TimeValue.ZERO;

View File

@ -168,7 +168,7 @@ public class HistogramFieldMapper extends FieldMapper {
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}

View File

@ -118,7 +118,7 @@ public class AsyncSearchSingleNodeTests extends ESSingleNodeTestCase {
public static final class SubFetchPhasePlugin extends Plugin implements SearchPlugin {
@Override
public List<FetchSubPhase> getFetchSubPhases(FetchPhaseConstructionContext context) {
return Collections.singletonList(searchContext -> new FetchSubPhaseProcessor() {
return Collections.singletonList((searchContext, lookup) -> new FetchSubPhaseProcessor() {
@Override
public void setNextReader(LeafReaderContext readerContext) {}

View File

@ -115,7 +115,7 @@ public class ConstantKeywordFieldMapperTests extends FieldMapperTestCase2<Consta
public void testFetchValue() throws Exception {
MapperService mapperService = createMapperService(fieldMapping(b -> b.field("type", "constant_keyword")));
FieldMapper fieldMapper = (FieldMapper) mapperService.documentMapper().mappers().getMapper("field");
ValueFetcher fetcher = fieldMapper.valueFetcher(mapperService, null);
ValueFetcher fetcher = fieldMapper.valueFetcher(mapperService, null, null);
SourceLookup missingValueLookup = new SourceLookup();
SourceLookup nullValueLookup = new SourceLookup();
@ -126,7 +126,7 @@ public class ConstantKeywordFieldMapperTests extends FieldMapperTestCase2<Consta
merge(mapperService, fieldMapping(b -> b.field("type", "constant_keyword").field("value", "foo")));
fieldMapper = (FieldMapper) mapperService.documentMapper().mappers().getMapper("field");
fetcher = fieldMapper.valueFetcher(mapperService, null);
fetcher = fieldMapper.valueFetcher(mapperService, null, null);
assertEquals(List.of("foo"), fetcher.fetchValues(missingValueLookup));
assertEquals(List.of("foo"), fetcher.fetchValues(nullValueLookup));

View File

@ -267,7 +267,7 @@ public class ConstantKeywordFieldMapper extends FieldMapper {
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}

View File

@ -513,7 +513,7 @@ public class FlatObjectFieldMapperTests extends FieldMapperTestCase<FlatObjectFi
new String[] {"Hello", "World"});
}
public void testFetchSourceValue() {
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());

View File

@ -564,7 +564,7 @@ public final class FlatObjectFieldMapper extends DynamicKeyFieldMapper {
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}

View File

@ -32,8 +32,6 @@ yamlRestTest {
systemProperty 'tests.rest.blacklist',
[
/////// TO FIX ///////
'search/330_fetch_fields/*', // The 'fields' option is not yet supported
'search/110_field_collapsing/field collapsing, inner_hits, and fields', // Also fails because of the 'fields' option
'search.highlight/40_keyword_ignore/Plain Highligher should skip highlighting ignored keyword values', // The plain highlighter is incompatible with runtime fields. Worth fixing?
'search/115_multiple_field_collapsing/two levels fields collapsing', // Broken. Gotta fix.
'field_caps/30_filter/Field caps with index filter', // We don't support filtering field caps on runtime fields. What should we do?

View File

@ -10,6 +10,7 @@ import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.util.LocaleUtils;
import org.elasticsearch.index.mapper.BooleanFieldMapper;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.mapper.DocValueFetcher;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.IpFieldMapper;
import org.elasticsearch.index.mapper.KeywordFieldMapper;
@ -22,6 +23,7 @@ import org.elasticsearch.index.mapper.ValueFetcher;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.xpack.runtimefields.BooleanScriptFieldScript;
import org.elasticsearch.xpack.runtimefields.DateScriptFieldScript;
import org.elasticsearch.xpack.runtimefields.DoubleScriptFieldScript;
@ -75,8 +77,8 @@ public final class RuntimeFieldMapper extends ParametrizedFieldMapper {
}
@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
throw new UnsupportedOperationException();
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup lookup, String format) {
return new DocValueFetcher(fieldType().docValueFormat(format, null), lookup.doc().getForField(fieldType()));
}
@Override

View File

@ -304,7 +304,7 @@ public class PointFieldMapperTests extends CartesianFieldMapperTests {
assertThat(ignoreZValue, equalTo(false));
}
public void testFetchSourceValue() {
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());

View File

@ -334,7 +334,7 @@ public class ShapeFieldMapperTests extends CartesianFieldMapperTests {
return toXContentString(mapper, true);
}
public void testFetchSourceValue() {
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());

View File

@ -99,6 +99,20 @@ setup:
lang: painless
meta: {}
---
"fetch fields":
- do:
search:
index: sensor
body:
sort: timestamp
fields: [day_of_week, day_of_week_from_source, day_of_week_letters, prefixed_node]
- match: {hits.total.value: 6}
- match: {hits.hits.0.fields.day_of_week: [Thursday] }
- match: {hits.hits.0.fields.day_of_week_from_source: [Thursday] }
- match: {hits.hits.0.fields.day_of_week_letters: [T, a, d, h, r, s, u, y] }
- match: {hits.hits.0.fields.prefixed_node: [node_c] }
---
"docvalue_fields":
- do:

Some files were not shown because too many files have changed in this diff Show More