mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-03-25 09:28:27 +00:00
Unify the result interfaces from get and search in Java client (#25361)
As GetField and SearchHitField have the same members, they have been unified into DocumentField. Closes #16440
This commit is contained in:
parent
da59c178e2
commit
3518e313b8
@ -24,12 +24,12 @@ import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.document.DocumentField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.get.GetField;
|
||||
import org.elasticsearch.index.get.GetResult;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -44,7 +44,7 @@ import java.util.Objects;
|
||||
* @see GetRequest
|
||||
* @see org.elasticsearch.client.Client#get(GetRequest)
|
||||
*/
|
||||
public class GetResponse extends ActionResponse implements Iterable<GetField>, ToXContentObject {
|
||||
public class GetResponse extends ActionResponse implements Iterable<DocumentField>, ToXContentObject {
|
||||
|
||||
GetResult getResult;
|
||||
|
||||
@ -138,11 +138,11 @@ public class GetResponse extends ActionResponse implements Iterable<GetField>, T
|
||||
return getResult.getSource();
|
||||
}
|
||||
|
||||
public Map<String, GetField> getFields() {
|
||||
public Map<String, DocumentField> getFields() {
|
||||
return getResult.getFields();
|
||||
}
|
||||
|
||||
public GetField getField(String name) {
|
||||
public DocumentField getField(String name) {
|
||||
return getResult.field(name);
|
||||
}
|
||||
|
||||
@ -151,7 +151,7 @@ public class GetResponse extends ActionResponse implements Iterable<GetField>, T
|
||||
*/
|
||||
@Deprecated
|
||||
@Override
|
||||
public Iterator<GetField> iterator() {
|
||||
public Iterator<DocumentField> iterator() {
|
||||
return getResult.iterator();
|
||||
}
|
||||
|
||||
|
@ -29,6 +29,7 @@ import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.document.DocumentField;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
@ -38,7 +39,6 @@ import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.VersionType;
|
||||
import org.elasticsearch.index.engine.DocumentMissingException;
|
||||
import org.elasticsearch.index.engine.DocumentSourceMissingException;
|
||||
import org.elasticsearch.index.get.GetField;
|
||||
import org.elasticsearch.index.get.GetResult;
|
||||
import org.elasticsearch.index.mapper.ParentFieldMapper;
|
||||
import org.elasticsearch.index.mapper.RoutingFieldMapper;
|
||||
@ -324,7 +324,7 @@ public class UpdateHelper extends AbstractComponent {
|
||||
SourceLookup sourceLookup = new SourceLookup();
|
||||
sourceLookup.setSource(source);
|
||||
boolean sourceRequested = false;
|
||||
Map<String, GetField> fields = null;
|
||||
Map<String, DocumentField> fields = null;
|
||||
if (request.fields() != null && request.fields().length > 0) {
|
||||
for (String field : request.fields()) {
|
||||
if (field.equals("_source")) {
|
||||
@ -336,12 +336,12 @@ public class UpdateHelper extends AbstractComponent {
|
||||
if (fields == null) {
|
||||
fields = new HashMap<>(2);
|
||||
}
|
||||
GetField getField = fields.get(field);
|
||||
if (getField == null) {
|
||||
getField = new GetField(field, new ArrayList<>(2));
|
||||
fields.put(field, getField);
|
||||
DocumentField documentField = fields.get(field);
|
||||
if (documentField == null) {
|
||||
documentField = new DocumentField(field, new ArrayList<>(2));
|
||||
fields.put(field, documentField);
|
||||
}
|
||||
getField.getValues().add(value);
|
||||
documentField.getValues().add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -17,7 +17,7 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.get;
|
||||
package org.elasticsearch.common.document;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
@ -25,7 +25,9 @@ import org.elasticsearch.common.io.stream.Streamable;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.get.GetResult;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
@ -36,34 +38,52 @@ import java.util.Objects;
|
||||
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
|
||||
import static org.elasticsearch.common.xcontent.XContentParserUtils.parseStoredFieldsValue;
|
||||
|
||||
public class GetField implements Streamable, ToXContent, Iterable<Object> {
|
||||
/**
|
||||
* A single field name and values part of {@link SearchHit} and {@link GetResult}.
|
||||
*
|
||||
* @see SearchHit
|
||||
* @see GetResult
|
||||
*/
|
||||
public class DocumentField implements Streamable, ToXContent, Iterable<Object> {
|
||||
|
||||
private String name;
|
||||
private List<Object> values;
|
||||
|
||||
private GetField() {
|
||||
private DocumentField() {
|
||||
}
|
||||
|
||||
public GetField(String name, List<Object> values) {
|
||||
public DocumentField(String name, List<Object> values) {
|
||||
this.name = Objects.requireNonNull(name, "name must not be null");
|
||||
this.values = Objects.requireNonNull(values, "values must not be null");
|
||||
}
|
||||
|
||||
/**
|
||||
* The name of the field.
|
||||
*/
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public Object getValue() {
|
||||
if (values != null && !values.isEmpty()) {
|
||||
return values.get(0);
|
||||
/**
|
||||
* The first value of the hit.
|
||||
*/
|
||||
public <V> V getValue() {
|
||||
if (values == null || values.isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
return null;
|
||||
return (V)values.get(0);
|
||||
}
|
||||
|
||||
/**
|
||||
* The field values.
|
||||
*/
|
||||
public List<Object> getValues() {
|
||||
return values;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The field is a metadata field
|
||||
*/
|
||||
public boolean isMetadataField() {
|
||||
return MapperService.isMetadataField(name);
|
||||
}
|
||||
@ -73,8 +93,8 @@ public class GetField implements Streamable, ToXContent, Iterable<Object> {
|
||||
return values.iterator();
|
||||
}
|
||||
|
||||
public static GetField readGetField(StreamInput in) throws IOException {
|
||||
GetField result = new GetField();
|
||||
public static DocumentField readDocumentField(StreamInput in) throws IOException {
|
||||
DocumentField result = new DocumentField();
|
||||
result.readFrom(in);
|
||||
return result;
|
||||
}
|
||||
@ -102,25 +122,26 @@ public class GetField implements Streamable, ToXContent, Iterable<Object> {
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startArray(name);
|
||||
for (Object value : values) {
|
||||
//this call doesn't really need to support writing any kind of object.
|
||||
//Stored fields values are converted using MappedFieldType#valueForDisplay.
|
||||
//As a result they can either be Strings, Numbers, Booleans, or BytesReference, that's all.
|
||||
// this call doesn't really need to support writing any kind of object.
|
||||
// Stored fields values are converted using MappedFieldType#valueForDisplay.
|
||||
// As a result they can either be Strings, Numbers, Booleans, or BytesReference, that's
|
||||
// all.
|
||||
builder.value(value);
|
||||
}
|
||||
builder.endArray();
|
||||
return builder;
|
||||
}
|
||||
|
||||
public static GetField fromXContent(XContentParser parser) throws IOException {
|
||||
public static DocumentField fromXContent(XContentParser parser) throws IOException {
|
||||
ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation);
|
||||
String fieldName = parser.currentName();
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
ensureExpectedToken(XContentParser.Token.START_ARRAY, token, parser::getTokenLocation);
|
||||
List<Object> values = new ArrayList<>();
|
||||
while((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
values.add(parseStoredFieldsValue(parser));
|
||||
}
|
||||
return new GetField(fieldName, values);
|
||||
return new DocumentField(fieldName, values);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -131,9 +152,8 @@ public class GetField implements Streamable, ToXContent, Iterable<Object> {
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
GetField objects = (GetField) o;
|
||||
return Objects.equals(name, objects.name) &&
|
||||
Objects.equals(values, objects.values);
|
||||
DocumentField objects = (DocumentField) o;
|
||||
return Objects.equals(name, objects.name) && Objects.equals(values, objects.values);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -143,9 +163,9 @@ public class GetField implements Streamable, ToXContent, Iterable<Object> {
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "GetField{" +
|
||||
return "DocumentField{" +
|
||||
"name='" + name + '\'' +
|
||||
", values=" + values +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
}
|
@ -20,9 +20,9 @@
|
||||
package org.elasticsearch.index.get;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.compress.CompressorFactory;
|
||||
import org.elasticsearch.common.document.DocumentField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
@ -44,9 +44,8 @@ import java.util.Objects;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
|
||||
import static org.elasticsearch.index.get.GetField.readGetField;
|
||||
|
||||
public class GetResult implements Streamable, Iterable<GetField>, ToXContentObject {
|
||||
public class GetResult implements Streamable, Iterable<DocumentField>, ToXContentObject {
|
||||
|
||||
public static final String _INDEX = "_index";
|
||||
public static final String _TYPE = "_type";
|
||||
@ -60,7 +59,7 @@ public class GetResult implements Streamable, Iterable<GetField>, ToXContentObje
|
||||
private String id;
|
||||
private long version;
|
||||
private boolean exists;
|
||||
private Map<String, GetField> fields;
|
||||
private Map<String, DocumentField> fields;
|
||||
private Map<String, Object> sourceAsMap;
|
||||
private BytesReference source;
|
||||
private byte[] sourceAsBytes;
|
||||
@ -69,7 +68,7 @@ public class GetResult implements Streamable, Iterable<GetField>, ToXContentObje
|
||||
}
|
||||
|
||||
public GetResult(String index, String type, String id, long version, boolean exists, BytesReference source,
|
||||
Map<String, GetField> fields) {
|
||||
Map<String, DocumentField> fields) {
|
||||
this.index = index;
|
||||
this.type = type;
|
||||
this.id = id;
|
||||
@ -196,16 +195,16 @@ public class GetResult implements Streamable, Iterable<GetField>, ToXContentObje
|
||||
return sourceAsMap();
|
||||
}
|
||||
|
||||
public Map<String, GetField> getFields() {
|
||||
public Map<String, DocumentField> getFields() {
|
||||
return fields;
|
||||
}
|
||||
|
||||
public GetField field(String name) {
|
||||
public DocumentField field(String name) {
|
||||
return fields.get(name);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<GetField> iterator() {
|
||||
public Iterator<DocumentField> iterator() {
|
||||
if (fields == null) {
|
||||
return Collections.emptyIterator();
|
||||
}
|
||||
@ -213,10 +212,10 @@ public class GetResult implements Streamable, Iterable<GetField>, ToXContentObje
|
||||
}
|
||||
|
||||
public XContentBuilder toXContentEmbedded(XContentBuilder builder, Params params) throws IOException {
|
||||
List<GetField> metaFields = new ArrayList<>();
|
||||
List<GetField> otherFields = new ArrayList<>();
|
||||
List<DocumentField> metaFields = new ArrayList<>();
|
||||
List<DocumentField> otherFields = new ArrayList<>();
|
||||
if (fields != null && !fields.isEmpty()) {
|
||||
for (GetField field : fields.values()) {
|
||||
for (DocumentField field : fields.values()) {
|
||||
if (field.getValues().isEmpty()) {
|
||||
continue;
|
||||
}
|
||||
@ -228,8 +227,9 @@ public class GetResult implements Streamable, Iterable<GetField>, ToXContentObje
|
||||
}
|
||||
}
|
||||
|
||||
for (GetField field : metaFields) {
|
||||
builder.field(field.getName(), field.getValue());
|
||||
for (DocumentField field : metaFields) {
|
||||
Object value = field.getValue();
|
||||
builder.field(field.getName(), value);
|
||||
}
|
||||
|
||||
builder.field(FOUND, exists);
|
||||
@ -240,7 +240,7 @@ public class GetResult implements Streamable, Iterable<GetField>, ToXContentObje
|
||||
|
||||
if (!otherFields.isEmpty()) {
|
||||
builder.startObject(FIELDS);
|
||||
for (GetField field : otherFields) {
|
||||
for (DocumentField field : otherFields) {
|
||||
field.toXContent(builder, params);
|
||||
}
|
||||
builder.endObject();
|
||||
@ -275,7 +275,7 @@ public class GetResult implements Streamable, Iterable<GetField>, ToXContentObje
|
||||
long version = -1;
|
||||
Boolean found = null;
|
||||
BytesReference source = null;
|
||||
Map<String, GetField> fields = new HashMap<>();
|
||||
Map<String, DocumentField> fields = new HashMap<>();
|
||||
while((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
@ -291,7 +291,7 @@ public class GetResult implements Streamable, Iterable<GetField>, ToXContentObje
|
||||
} else if (FOUND.equals(currentFieldName)) {
|
||||
found = parser.booleanValue();
|
||||
} else {
|
||||
fields.put(currentFieldName, new GetField(currentFieldName, Collections.singletonList(parser.objectText())));
|
||||
fields.put(currentFieldName, new DocumentField(currentFieldName, Collections.singletonList(parser.objectText())));
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (SourceFieldMapper.NAME.equals(currentFieldName)) {
|
||||
@ -303,7 +303,7 @@ public class GetResult implements Streamable, Iterable<GetField>, ToXContentObje
|
||||
}
|
||||
} else if (FIELDS.equals(currentFieldName)) {
|
||||
while(parser.nextToken() != XContentParser.Token.END_OBJECT) {
|
||||
GetField getField = GetField.fromXContent(parser);
|
||||
DocumentField getField = DocumentField.fromXContent(parser);
|
||||
fields.put(getField.getName(), getField);
|
||||
}
|
||||
} else {
|
||||
@ -347,7 +347,7 @@ public class GetResult implements Streamable, Iterable<GetField>, ToXContentObje
|
||||
} else {
|
||||
fields = new HashMap<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
GetField field = readGetField(in);
|
||||
DocumentField field = DocumentField.readDocumentField(in);
|
||||
fields.put(field.getName(), field);
|
||||
}
|
||||
}
|
||||
@ -367,7 +367,7 @@ public class GetResult implements Streamable, Iterable<GetField>, ToXContentObje
|
||||
out.writeVInt(0);
|
||||
} else {
|
||||
out.writeVInt(fields.size());
|
||||
for (GetField field : fields.values()) {
|
||||
for (DocumentField field : fields.values()) {
|
||||
field.writeTo(out);
|
||||
}
|
||||
}
|
||||
|
@ -24,6 +24,7 @@ import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.document.DocumentField;
|
||||
import org.elasticsearch.common.lucene.uid.VersionsAndSeqNoResolver.DocIdAndVersion;
|
||||
import org.elasticsearch.common.metrics.CounterMetric;
|
||||
import org.elasticsearch.common.metrics.MeanMetric;
|
||||
@ -173,7 +174,7 @@ public final class ShardGetService extends AbstractIndexShardComponent {
|
||||
}
|
||||
|
||||
private GetResult innerGetLoadFromStoredFields(String type, String id, String[] gFields, FetchSourceContext fetchSourceContext, Engine.GetResult get, MapperService mapperService) {
|
||||
Map<String, GetField> fields = null;
|
||||
Map<String, DocumentField> fields = null;
|
||||
BytesReference source = null;
|
||||
DocIdAndVersion docIdAndVersion = get.docIdAndVersion();
|
||||
FieldsVisitor fieldVisitor = buildFieldsVisitors(gFields, fetchSourceContext);
|
||||
@ -189,7 +190,7 @@ public final class ShardGetService extends AbstractIndexShardComponent {
|
||||
fieldVisitor.postProcess(mapperService);
|
||||
fields = new HashMap<>(fieldVisitor.fields().size());
|
||||
for (Map.Entry<String, List<Object>> entry : fieldVisitor.fields().entrySet()) {
|
||||
fields.put(entry.getKey(), new GetField(entry.getKey(), entry.getValue()));
|
||||
fields.put(entry.getKey(), new DocumentField(entry.getKey(), entry.getValue()));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -200,7 +201,7 @@ public final class ShardGetService extends AbstractIndexShardComponent {
|
||||
if (fields == null) {
|
||||
fields = new HashMap<>(1);
|
||||
}
|
||||
fields.put(ParentFieldMapper.NAME, new GetField(ParentFieldMapper.NAME, Collections.singletonList(parentId)));
|
||||
fields.put(ParentFieldMapper.NAME, new DocumentField(ParentFieldMapper.NAME, Collections.singletonList(parentId)));
|
||||
}
|
||||
|
||||
if (gFields != null && gFields.length > 0) {
|
||||
|
@ -34,6 +34,7 @@ import org.elasticsearch.action.search.ShardSearchFailure;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.client.ParentTaskAssigningClient;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.document.DocumentField;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
|
||||
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
|
||||
@ -42,7 +43,6 @@ import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.mapper.ParentFieldMapper;
|
||||
import org.elasticsearch.index.mapper.RoutingFieldMapper;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.SearchHitField;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import java.util.ArrayList;
|
||||
@ -254,7 +254,7 @@ public class ClientScrollableHitSource extends ScrollableHitSource {
|
||||
}
|
||||
|
||||
private <T> T fieldValue(String fieldName) {
|
||||
SearchHitField field = delegate.field(fieldName);
|
||||
DocumentField field = delegate.field(fieldName);
|
||||
return field == null ? null : field.getValue();
|
||||
}
|
||||
}
|
||||
|
@ -34,12 +34,12 @@ import org.elasticsearch.action.termvectors.TermVectorsResponse;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.document.DocumentField;
|
||||
import org.elasticsearch.common.lucene.uid.VersionsAndSeqNoResolver.DocIdAndVersion;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
import org.elasticsearch.index.get.GetField;
|
||||
import org.elasticsearch.index.get.GetResult;
|
||||
import org.elasticsearch.index.mapper.DocumentMapperForType;
|
||||
import org.elasticsearch.index.mapper.KeywordFieldMapper;
|
||||
@ -235,9 +235,9 @@ public class TermVectorsService {
|
||||
return selectedFields;
|
||||
}
|
||||
|
||||
private static Fields generateTermVectors(IndexShard indexShard, Map<String, Object> source, Collection<GetField> getFields, boolean withOffsets, @Nullable Map<String, String> perFieldAnalyzer, Set<String> fields) throws IOException {
|
||||
private static Fields generateTermVectors(IndexShard indexShard, Map<String, Object> source, Collection<DocumentField> getFields, boolean withOffsets, @Nullable Map<String, String> perFieldAnalyzer, Set<String> fields) throws IOException {
|
||||
Map<String, Collection<Object>> values = new HashMap<>();
|
||||
for (GetField getField : getFields) {
|
||||
for (DocumentField getField : getFields) {
|
||||
String field = getField.getName();
|
||||
if (fields.contains(field)) { // some fields are returned even when not asked for, eg. _timestamp
|
||||
values.put(field, getField.getValues());
|
||||
@ -279,7 +279,7 @@ public class TermVectorsService {
|
||||
// select the right fields and generate term vectors
|
||||
ParseContext.Document doc = parsedDocument.rootDoc();
|
||||
Set<String> seenFields = new HashSet<>();
|
||||
Collection<GetField> getFields = new HashSet<>();
|
||||
Collection<DocumentField> documentFields = new HashSet<>();
|
||||
for (IndexableField field : doc.getFields()) {
|
||||
MappedFieldType fieldType = indexShard.mapperService().fullName(field.name());
|
||||
if (!isValidField(fieldType)) {
|
||||
@ -295,10 +295,10 @@ public class TermVectorsService {
|
||||
seenFields.add(field.name());
|
||||
}
|
||||
String[] values = doc.getValues(field.name());
|
||||
getFields.add(new GetField(field.name(), Arrays.asList((Object[]) values)));
|
||||
documentFields.add(new DocumentField(field.name(), Arrays.asList((Object[]) values)));
|
||||
}
|
||||
return generateTermVectors(indexShard, XContentHelper.convertToMap(parsedDocument.source(), true, request.xContentType()).v2(),
|
||||
getFields, request.offsets(), request.perFieldAnalyzer(), seenFields);
|
||||
documentFields, request.offsets(), request.perFieldAnalyzer(), seenFields);
|
||||
}
|
||||
|
||||
private static ParsedDocument parseDocument(IndexShard indexShard, String index, String type, BytesReference doc,
|
||||
|
@ -28,6 +28,7 @@ import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.compress.CompressorFactory;
|
||||
import org.elasticsearch.common.document.DocumentField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
@ -75,7 +76,7 @@ import static org.elasticsearch.search.fetch.subphase.highlight.HighlightField.r
|
||||
*
|
||||
* @see SearchHits
|
||||
*/
|
||||
public final class SearchHit implements Streamable, ToXContentObject, Iterable<SearchHitField> {
|
||||
public final class SearchHit implements Streamable, ToXContentObject, Iterable<DocumentField> {
|
||||
|
||||
private transient int docId;
|
||||
|
||||
@ -91,7 +92,7 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable<S
|
||||
|
||||
private BytesReference source;
|
||||
|
||||
private Map<String, SearchHitField> fields = emptyMap();
|
||||
private Map<String, DocumentField> fields = emptyMap();
|
||||
|
||||
private Map<String, HighlightField> highlightFields = null;
|
||||
|
||||
@ -118,11 +119,11 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable<S
|
||||
this(docId, null, null, null);
|
||||
}
|
||||
|
||||
public SearchHit(int docId, String id, Text type, Map<String, SearchHitField> fields) {
|
||||
public SearchHit(int docId, String id, Text type, Map<String, DocumentField> fields) {
|
||||
this(docId, id, type, null, fields);
|
||||
}
|
||||
|
||||
public SearchHit(int nestedTopDocId, String id, Text type, NestedIdentity nestedIdentity, Map<String, SearchHitField> fields) {
|
||||
public SearchHit(int nestedTopDocId, String id, Text type, NestedIdentity nestedIdentity, Map<String, DocumentField> fields) {
|
||||
this.docId = nestedTopDocId;
|
||||
if (id != null) {
|
||||
this.id = new Text(id);
|
||||
@ -252,14 +253,14 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable<S
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<SearchHitField> iterator() {
|
||||
public Iterator<DocumentField> iterator() {
|
||||
return fields.values().iterator();
|
||||
}
|
||||
|
||||
/**
|
||||
* The hit field matching the given field name.
|
||||
*/
|
||||
public SearchHitField field(String fieldName) {
|
||||
public DocumentField field(String fieldName) {
|
||||
return getFields().get(fieldName);
|
||||
}
|
||||
|
||||
@ -267,16 +268,16 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable<S
|
||||
* A map of hit fields (from field name to hit fields) if additional fields
|
||||
* were required to be loaded.
|
||||
*/
|
||||
public Map<String, SearchHitField> getFields() {
|
||||
public Map<String, DocumentField> getFields() {
|
||||
return fields == null ? emptyMap() : fields;
|
||||
}
|
||||
|
||||
// returns the fields without handling null cases
|
||||
public Map<String, SearchHitField> fieldsOrNull() {
|
||||
public Map<String, DocumentField> fieldsOrNull() {
|
||||
return fields;
|
||||
}
|
||||
|
||||
public void fields(Map<String, SearchHitField> fields) {
|
||||
public void fields(Map<String, DocumentField> fields) {
|
||||
this.fields = fields;
|
||||
}
|
||||
|
||||
@ -382,10 +383,10 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable<S
|
||||
|
||||
// public because we render hit as part of completion suggestion option
|
||||
public XContentBuilder toInnerXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
List<SearchHitField> metaFields = new ArrayList<>();
|
||||
List<SearchHitField> otherFields = new ArrayList<>();
|
||||
List<DocumentField> metaFields = new ArrayList<>();
|
||||
List<DocumentField> otherFields = new ArrayList<>();
|
||||
if (fields != null && !fields.isEmpty()) {
|
||||
for (SearchHitField field : fields.values()) {
|
||||
for (DocumentField field : fields.values()) {
|
||||
if (field.getValues().isEmpty()) {
|
||||
continue;
|
||||
}
|
||||
@ -424,7 +425,7 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable<S
|
||||
} else {
|
||||
builder.field(Fields._SCORE, score);
|
||||
}
|
||||
for (SearchHitField field : metaFields) {
|
||||
for (DocumentField field : metaFields) {
|
||||
Object value = field.getValue();
|
||||
builder.field(field.getName(), value);
|
||||
}
|
||||
@ -433,7 +434,7 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable<S
|
||||
}
|
||||
if (!otherFields.isEmpty()) {
|
||||
builder.startObject(Fields.FIELDS);
|
||||
for (SearchHitField field : otherFields) {
|
||||
for (DocumentField field : otherFields) {
|
||||
builder.startArray(field.getName());
|
||||
for (Object value : field.getValues()) {
|
||||
builder.value(value);
|
||||
@ -509,7 +510,7 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable<S
|
||||
parser.declareObject((map, value) -> map.put(Fields.HIGHLIGHT, value), (p, c) -> parseHighlightFields(p),
|
||||
new ParseField(Fields.HIGHLIGHT));
|
||||
parser.declareObject((map, value) -> {
|
||||
Map<String, SearchHitField> fieldMap = get(Fields.FIELDS, map, new HashMap<String, SearchHitField>());
|
||||
Map<String, DocumentField> fieldMap = get(Fields.FIELDS, map, new HashMap<String, DocumentField>());
|
||||
fieldMap.putAll(value);
|
||||
map.put(Fields.FIELDS, fieldMap);
|
||||
}, (p, c) -> parseFields(p), new ParseField(Fields.FIELDS));
|
||||
@ -528,7 +529,7 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable<S
|
||||
String id = get(Fields._ID, values, null);
|
||||
Text type = get(Fields._TYPE, values, null);
|
||||
NestedIdentity nestedIdentity = get(NestedIdentity._NESTED, values, null);
|
||||
Map<String, SearchHitField> fields = get(Fields.FIELDS, values, null);
|
||||
Map<String, DocumentField> fields = get(Fields.FIELDS, values, null);
|
||||
|
||||
SearchHit searchHit = new SearchHit(-1, id, type, nestedIdentity, fields);
|
||||
searchHit.index = get(Fields._INDEX, values, null);
|
||||
@ -585,20 +586,20 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable<S
|
||||
&& metadatafield.equals(Fields._TYPE) == false) {
|
||||
parser.declareField((map, field) -> {
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, SearchHitField> fieldMap = (Map<String, SearchHitField>) map.computeIfAbsent(Fields.FIELDS,
|
||||
v -> new HashMap<String, SearchHitField>());
|
||||
Map<String, DocumentField> fieldMap = (Map<String, DocumentField>) map.computeIfAbsent(Fields.FIELDS,
|
||||
v -> new HashMap<String, DocumentField>());
|
||||
fieldMap.put(field.getName(), field);
|
||||
}, (p, c) -> {
|
||||
List<Object> values = new ArrayList<>();
|
||||
values.add(parseStoredFieldsValue(p));
|
||||
return new SearchHitField(metadatafield, values);
|
||||
return new DocumentField(metadatafield, values);
|
||||
}, new ParseField(metadatafield), ValueType.VALUE);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static Map<String, SearchHitField> parseFields(XContentParser parser) throws IOException {
|
||||
Map<String, SearchHitField> fields = new HashMap<>();
|
||||
private static Map<String, DocumentField> parseFields(XContentParser parser) throws IOException {
|
||||
Map<String, DocumentField> fields = new HashMap<>();
|
||||
while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
String fieldName = parser.currentName();
|
||||
ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.nextToken(), parser::getTokenLocation);
|
||||
@ -606,7 +607,7 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable<S
|
||||
while ((parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
values.add(parseStoredFieldsValue(parser));
|
||||
}
|
||||
fields.put(fieldName, new SearchHitField(fieldName, values));
|
||||
fields.put(fieldName, new DocumentField(fieldName, values));
|
||||
}
|
||||
return fields;
|
||||
}
|
||||
@ -704,12 +705,12 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable<S
|
||||
if (size == 0) {
|
||||
fields = emptyMap();
|
||||
} else if (size == 1) {
|
||||
SearchHitField hitField = SearchHitField.readSearchHitField(in);
|
||||
DocumentField hitField = DocumentField.readDocumentField(in);
|
||||
fields = singletonMap(hitField.getName(), hitField);
|
||||
} else {
|
||||
Map<String, SearchHitField> fields = new HashMap<>();
|
||||
Map<String, DocumentField> fields = new HashMap<>();
|
||||
for (int i = 0; i < size; i++) {
|
||||
SearchHitField hitField = SearchHitField.readSearchHitField(in);
|
||||
DocumentField hitField = DocumentField.readDocumentField(in);
|
||||
fields.put(hitField.getName(), hitField);
|
||||
}
|
||||
this.fields = unmodifiableMap(fields);
|
||||
@ -770,7 +771,7 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable<S
|
||||
out.writeVInt(0);
|
||||
} else {
|
||||
out.writeVInt(fields.size());
|
||||
for (SearchHitField hitField : getFields().values()) {
|
||||
for (DocumentField hitField : getFields().values()) {
|
||||
hitField.writeTo(out);
|
||||
}
|
||||
}
|
||||
|
@ -1,126 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* A single field name and values part of a {@link SearchHit}.
|
||||
*
|
||||
* @see SearchHit
|
||||
*/
|
||||
public final class SearchHitField implements Streamable, Iterable<Object> {
|
||||
|
||||
private String name;
|
||||
private List<Object> values;
|
||||
|
||||
private SearchHitField() {
|
||||
}
|
||||
|
||||
public SearchHitField(String name, List<Object> values) {
|
||||
this.name = name;
|
||||
this.values = values;
|
||||
}
|
||||
|
||||
/**
|
||||
* The name of the field.
|
||||
*/
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
/**
|
||||
* The first value of the hit.
|
||||
*/
|
||||
public <V> V getValue() {
|
||||
if (values == null || values.isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
return (V)values.get(0);
|
||||
}
|
||||
|
||||
/**
|
||||
* The field values.
|
||||
*/
|
||||
public List<Object> getValues() {
|
||||
return values;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The field is a metadata field
|
||||
*/
|
||||
public boolean isMetadataField() {
|
||||
return MapperService.isMetadataField(name);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<Object> iterator() {
|
||||
return values.iterator();
|
||||
}
|
||||
|
||||
public static SearchHitField readSearchHitField(StreamInput in) throws IOException {
|
||||
SearchHitField result = new SearchHitField();
|
||||
result.readFrom(in);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
name = in.readString();
|
||||
int size = in.readVInt();
|
||||
values = new ArrayList<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
values.add(in.readGenericValue());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(name);
|
||||
out.writeVInt(values.size());
|
||||
for (Object value : values) {
|
||||
out.writeGenericValue(value);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
SearchHitField other = (SearchHitField) obj;
|
||||
return Objects.equals(name, other.name)
|
||||
&& Objects.equals(values, other.values);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(name, values);
|
||||
}
|
||||
}
|
@ -29,6 +29,7 @@ import org.apache.lucene.util.BitSet;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.document.DocumentField;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.common.text.Text;
|
||||
@ -42,11 +43,10 @@ import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.ObjectMapper;
|
||||
import org.elasticsearch.index.mapper.SourceFieldMapper;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.SearchHitField;
|
||||
import org.elasticsearch.search.SearchHits;
|
||||
import org.elasticsearch.search.SearchPhase;
|
||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||
import org.elasticsearch.search.fetch.subphase.InnerHitsFetchSubPhase;
|
||||
import org.elasticsearch.search.SearchHits;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.search.lookup.SourceLookup;
|
||||
import org.elasticsearch.tasks.TaskCancelledException;
|
||||
@ -186,11 +186,11 @@ public class FetchPhase implements SearchPhase {
|
||||
loadStoredFields(context, subReaderContext, fieldsVisitor, subDocId);
|
||||
fieldsVisitor.postProcess(context.mapperService());
|
||||
|
||||
Map<String, SearchHitField> searchFields = null;
|
||||
Map<String, DocumentField> searchFields = null;
|
||||
if (!fieldsVisitor.fields().isEmpty()) {
|
||||
searchFields = new HashMap<>(fieldsVisitor.fields().size());
|
||||
for (Map.Entry<String, List<Object>> entry : fieldsVisitor.fields().entrySet()) {
|
||||
searchFields.put(entry.getKey(), new SearchHitField(entry.getKey(), entry.getValue()));
|
||||
searchFields.put(entry.getKey(), new DocumentField(entry.getKey(), entry.getValue()));
|
||||
}
|
||||
}
|
||||
|
||||
@ -219,7 +219,7 @@ public class FetchPhase implements SearchPhase {
|
||||
loadStoredFields(context, subReaderContext, rootFieldsVisitor, rootSubDocId);
|
||||
rootFieldsVisitor.postProcess(context.mapperService());
|
||||
|
||||
Map<String, SearchHitField> searchFields = getSearchFields(context, nestedSubDocId, fieldNames, fieldNamePatterns, subReaderContext);
|
||||
Map<String, DocumentField> searchFields = getSearchFields(context, nestedSubDocId, fieldNames, fieldNamePatterns, subReaderContext);
|
||||
DocumentMapper documentMapper = context.mapperService().documentMapper(rootFieldsVisitor.uid().type());
|
||||
SourceLookup sourceLookup = context.lookup().source();
|
||||
sourceLookup.setSegmentAndDocument(subReaderContext, nestedSubDocId);
|
||||
@ -272,8 +272,8 @@ public class FetchPhase implements SearchPhase {
|
||||
return new SearchHit(nestedTopDocId, rootFieldsVisitor.uid().id(), documentMapper.typeText(), nestedIdentity, searchFields);
|
||||
}
|
||||
|
||||
private Map<String, SearchHitField> getSearchFields(SearchContext context, int nestedSubDocId, Set<String> fieldNames, List<String> fieldNamePatterns, LeafReaderContext subReaderContext) {
|
||||
Map<String, SearchHitField> searchFields = null;
|
||||
private Map<String, DocumentField> getSearchFields(SearchContext context, int nestedSubDocId, Set<String> fieldNames, List<String> fieldNamePatterns, LeafReaderContext subReaderContext) {
|
||||
Map<String, DocumentField> searchFields = null;
|
||||
if (context.hasStoredFields() && !context.storedFieldsContext().fieldNames().isEmpty()) {
|
||||
FieldsVisitor nestedFieldsVisitor = new CustomFieldsVisitor(fieldNames == null ? Collections.emptySet() : fieldNames,
|
||||
fieldNamePatterns == null ? Collections.emptyList() : fieldNamePatterns, false);
|
||||
@ -283,7 +283,7 @@ public class FetchPhase implements SearchPhase {
|
||||
if (!nestedFieldsVisitor.fields().isEmpty()) {
|
||||
searchFields = new HashMap<>(nestedFieldsVisitor.fields().size());
|
||||
for (Map.Entry<String, List<Object>> entry : nestedFieldsVisitor.fields().entrySet()) {
|
||||
searchFields.put(entry.getKey(), new SearchHitField(entry.getKey(), entry.getValue()));
|
||||
searchFields.put(entry.getKey(), new DocumentField(entry.getKey(), entry.getValue()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -18,10 +18,10 @@
|
||||
*/
|
||||
package org.elasticsearch.search.fetch.subphase;
|
||||
|
||||
import org.elasticsearch.common.document.DocumentField;
|
||||
import org.elasticsearch.index.fielddata.AtomicFieldData;
|
||||
import org.elasticsearch.index.fielddata.ScriptDocValues;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.search.SearchHitField;
|
||||
import org.elasticsearch.search.fetch.FetchSubPhase;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
@ -55,9 +55,9 @@ public final class DocValueFieldsFetchSubPhase implements FetchSubPhase {
|
||||
if (hitContext.hit().fieldsOrNull() == null) {
|
||||
hitContext.hit().fields(new HashMap<>(2));
|
||||
}
|
||||
SearchHitField hitField = hitContext.hit().getFields().get(field);
|
||||
DocumentField hitField = hitContext.hit().getFields().get(field);
|
||||
if (hitField == null) {
|
||||
hitField = new SearchHitField(field, new ArrayList<>(2));
|
||||
hitField = new DocumentField(field, new ArrayList<>(2));
|
||||
hitContext.hit().getFields().put(field, hitField);
|
||||
}
|
||||
MappedFieldType fieldType = context.mapperService().fullName(field);
|
||||
|
@ -23,8 +23,8 @@ import org.apache.lucene.index.LeafReader;
|
||||
import org.apache.lucene.index.SortedDocValues;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.common.document.DocumentField;
|
||||
import org.elasticsearch.index.mapper.ParentFieldMapper;
|
||||
import org.elasticsearch.search.SearchHitField;
|
||||
import org.elasticsearch.search.fetch.FetchSubPhase;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
@ -51,12 +51,12 @@ public final class ParentFieldSubFetchPhase implements FetchSubPhase {
|
||||
return;
|
||||
}
|
||||
|
||||
Map<String, SearchHitField> fields = hitContext.hit().fieldsOrNull();
|
||||
Map<String, DocumentField> fields = hitContext.hit().fieldsOrNull();
|
||||
if (fields == null) {
|
||||
fields = new HashMap<>();
|
||||
hitContext.hit().fields(fields);
|
||||
}
|
||||
fields.put(ParentFieldMapper.NAME, new SearchHitField(ParentFieldMapper.NAME, Collections.singletonList(parentId)));
|
||||
fields.put(ParentFieldMapper.NAME, new DocumentField(ParentFieldMapper.NAME, Collections.singletonList(parentId)));
|
||||
}
|
||||
|
||||
public static String getParentId(ParentFieldMapper fieldMapper, LeafReader reader, int docId) {
|
||||
|
@ -18,8 +18,8 @@
|
||||
*/
|
||||
package org.elasticsearch.search.fetch.subphase;
|
||||
|
||||
import org.elasticsearch.common.document.DocumentField;
|
||||
import org.elasticsearch.script.SearchScript;
|
||||
import org.elasticsearch.search.SearchHitField;
|
||||
import org.elasticsearch.search.fetch.FetchSubPhase;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
@ -62,7 +62,7 @@ public final class ScriptFieldsFetchSubPhase implements FetchSubPhase {
|
||||
hitContext.hit().fields(new HashMap<>(2));
|
||||
}
|
||||
|
||||
SearchHitField hitField = hitContext.hit().getFields().get(scriptField.name());
|
||||
DocumentField hitField = hitContext.hit().getFields().get(scriptField.name());
|
||||
if (hitField == null) {
|
||||
final List<Object> values;
|
||||
if (value instanceof Collection) {
|
||||
@ -71,7 +71,7 @@ public final class ScriptFieldsFetchSubPhase implements FetchSubPhase {
|
||||
} else {
|
||||
values = Collections.singletonList(value);
|
||||
}
|
||||
hitField = new SearchHitField(scriptField.name(), values);
|
||||
hitField = new DocumentField(scriptField.name(), values);
|
||||
hitContext.hit().getFields().put(scriptField.name(), hitField);
|
||||
}
|
||||
}
|
||||
|
@ -24,10 +24,10 @@ import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.document.DocumentField;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.get.GetField;
|
||||
import org.elasticsearch.index.get.GetResult;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
@ -92,7 +92,7 @@ public class GetResponseTests extends ESTestCase {
|
||||
public void testToXContent() {
|
||||
{
|
||||
GetResponse getResponse = new GetResponse(new GetResult("index", "type", "id", 1, true, new BytesArray("{ \"field1\" : " +
|
||||
"\"value1\", \"field2\":\"value2\"}"), Collections.singletonMap("field1", new GetField("field1",
|
||||
"\"value1\", \"field2\":\"value2\"}"), Collections.singletonMap("field1", new DocumentField("field1",
|
||||
Collections.singletonList("value1")))));
|
||||
String output = Strings.toString(getResponse);
|
||||
assertEquals("{\"_index\":\"index\",\"_type\":\"type\",\"_id\":\"id\",\"_version\":1,\"found\":true,\"_source\":{ \"field1\" " +
|
||||
@ -108,7 +108,7 @@ public class GetResponseTests extends ESTestCase {
|
||||
public void testToString() {
|
||||
GetResponse getResponse = new GetResponse(
|
||||
new GetResult("index", "type", "id", 1, true, new BytesArray("{ \"field1\" : " + "\"value1\", \"field2\":\"value2\"}"),
|
||||
Collections.singletonMap("field1", new GetField("field1", Collections.singletonList("value1")))));
|
||||
Collections.singletonMap("field1", new DocumentField("field1", Collections.singletonList("value1")))));
|
||||
assertEquals("{\"_index\":\"index\",\"_type\":\"type\",\"_id\":\"id\",\"_version\":1,\"found\":true,\"_source\":{ \"field1\" "
|
||||
+ ": \"value1\", \"field2\":\"value2\"},\"fields\":{\"field1\":[\"value1\"]}}", getResponse.toString());
|
||||
}
|
||||
|
@ -20,6 +20,7 @@
|
||||
package org.elasticsearch.action.search;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.common.document.DocumentField;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.index.query.BoolQueryBuilder;
|
||||
@ -27,7 +28,6 @@ import org.elasticsearch.index.query.InnerHitBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.SearchHitField;
|
||||
import org.elasticsearch.search.SearchHits;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.collapse.CollapseBuilder;
|
||||
@ -105,7 +105,7 @@ public class ExpandSearchPhaseTests extends ESTestCase {
|
||||
};
|
||||
|
||||
SearchHits hits = new SearchHits(new SearchHit[]{new SearchHit(1, "ID", new Text("type"),
|
||||
Collections.singletonMap("someField", new SearchHitField("someField", Collections.singletonList(collapseValue))))},
|
||||
Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(collapseValue))))},
|
||||
1, 1.0F);
|
||||
InternalSearchResponse internalSearchResponse = new InternalSearchResponse(hits, null, null, null, false, null, 1);
|
||||
AtomicReference<SearchResponse> reference = new AtomicReference<>();
|
||||
@ -160,9 +160,9 @@ public class ExpandSearchPhaseTests extends ESTestCase {
|
||||
};
|
||||
|
||||
SearchHits hits = new SearchHits(new SearchHit[]{new SearchHit(1, "ID", new Text("type"),
|
||||
Collections.singletonMap("someField", new SearchHitField("someField", Collections.singletonList(collapseValue)))),
|
||||
Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(collapseValue)))),
|
||||
new SearchHit(2, "ID2", new Text("type"),
|
||||
Collections.singletonMap("someField", new SearchHitField("someField", Collections.singletonList(collapseValue))))}, 1,
|
||||
Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(collapseValue))))}, 1,
|
||||
1.0F);
|
||||
InternalSearchResponse internalSearchResponse = new InternalSearchResponse(hits, null, null, null, false, null, 1);
|
||||
AtomicReference<SearchResponse> reference = new AtomicReference<>();
|
||||
@ -194,9 +194,9 @@ public class ExpandSearchPhaseTests extends ESTestCase {
|
||||
};
|
||||
|
||||
SearchHits hits = new SearchHits(new SearchHit[]{new SearchHit(1, "ID", new Text("type"),
|
||||
Collections.singletonMap("someField", new SearchHitField("someField", Collections.singletonList(null)))),
|
||||
Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(null)))),
|
||||
new SearchHit(2, "ID2", new Text("type"),
|
||||
Collections.singletonMap("someField", new SearchHitField("someField", Collections.singletonList(null))))}, 1, 1.0F);
|
||||
Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(null))))}, 1, 1.0F);
|
||||
InternalSearchResponse internalSearchResponse = new InternalSearchResponse(hits, null, null, null, false, null, 1);
|
||||
AtomicReference<SearchResponse> reference = new AtomicReference<>();
|
||||
ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, internalSearchResponse, r ->
|
||||
|
@ -26,6 +26,7 @@ import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.support.replication.ReplicationRequest;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.document.DocumentField;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
@ -37,7 +38,6 @@ import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.VersionType;
|
||||
import org.elasticsearch.index.get.GetField;
|
||||
import org.elasticsearch.index.get.GetResult;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.script.MockScriptEngine;
|
||||
@ -532,9 +532,9 @@ public class UpdateRequestTests extends ESTestCase {
|
||||
assertNull(UpdateHelper.calculateRouting(getResult, indexRequest));
|
||||
assertNull(UpdateHelper.calculateParent(getResult, indexRequest));
|
||||
|
||||
Map<String, GetField> fields = new HashMap<>();
|
||||
fields.put("_parent", new GetField("_parent", Collections.singletonList("parent1")));
|
||||
fields.put("_routing", new GetField("_routing", Collections.singletonList("routing1")));
|
||||
Map<String, DocumentField> fields = new HashMap<>();
|
||||
fields.put("_parent", new DocumentField("_parent", Collections.singletonList("parent1")));
|
||||
fields.put("_routing", new DocumentField("_routing", Collections.singletonList("routing1")));
|
||||
|
||||
// Doc exists and has the parent and routing fields
|
||||
getResult = new GetResult("test", "type", "1", 0, true, null, fields);
|
||||
|
@ -26,10 +26,10 @@ import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.document.DocumentField;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.get.GetField;
|
||||
import org.elasticsearch.index.get.GetResult;
|
||||
import org.elasticsearch.index.get.GetResultTests;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
@ -68,9 +68,9 @@ public class UpdateResponseTests extends ESTestCase {
|
||||
}
|
||||
{
|
||||
BytesReference source = new BytesArray("{\"title\":\"Book title\",\"isbn\":\"ABC-123\"}");
|
||||
Map<String, GetField> fields = new HashMap<>();
|
||||
fields.put("title", new GetField("title", Collections.singletonList("Book title")));
|
||||
fields.put("isbn", new GetField("isbn", Collections.singletonList("ABC-123")));
|
||||
Map<String, DocumentField> fields = new HashMap<>();
|
||||
fields.put("title", new DocumentField("title", Collections.singletonList("Book title")));
|
||||
fields.put("isbn", new DocumentField("isbn", Collections.singletonList("ABC-123")));
|
||||
|
||||
UpdateResponse updateResponse = new UpdateResponse(new ReplicationResponse.ShardInfo(3, 2),
|
||||
new ShardId("books", "books_uuid", 2), "book", "1", 7, 17, 2, UPDATED);
|
||||
|
@ -22,6 +22,7 @@ package org.elasticsearch.index.get;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.document.DocumentField;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
@ -41,62 +42,63 @@ import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
|
||||
import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
|
||||
|
||||
public class GetFieldTests extends ESTestCase {
|
||||
public class DocumentFieldTests extends ESTestCase {
|
||||
|
||||
public void testToXContent() {
|
||||
GetField getField = new GetField("field", Arrays.asList("value1", "value2"));
|
||||
String output = Strings.toString(getField);
|
||||
DocumentField documentField = new DocumentField("field", Arrays.asList("value1", "value2"));
|
||||
String output = Strings.toString(documentField);
|
||||
assertEquals("{\"field\":[\"value1\",\"value2\"]}", output);
|
||||
}
|
||||
|
||||
public void testEqualsAndHashcode() {
|
||||
checkEqualsAndHashCode(randomGetField(XContentType.JSON).v1(), GetFieldTests::copyGetField, GetFieldTests::mutateGetField);
|
||||
checkEqualsAndHashCode(randomDocumentField(XContentType.JSON).v1(), DocumentFieldTests::copyDocumentField,
|
||||
DocumentFieldTests::mutateDocumentField);
|
||||
}
|
||||
|
||||
public void testToAndFromXContent() throws Exception {
|
||||
XContentType xContentType = randomFrom(XContentType.values());
|
||||
Tuple<GetField, GetField> tuple = randomGetField(xContentType);
|
||||
GetField getField = tuple.v1();
|
||||
GetField expectedGetField = tuple.v2();
|
||||
Tuple<DocumentField, DocumentField> tuple = randomDocumentField(xContentType);
|
||||
DocumentField documentField = tuple.v1();
|
||||
DocumentField expectedDocumentField = tuple.v2();
|
||||
boolean humanReadable = randomBoolean();
|
||||
BytesReference originalBytes = toShuffledXContent(getField, xContentType, ToXContent.EMPTY_PARAMS, humanReadable);
|
||||
BytesReference originalBytes = toShuffledXContent(documentField, xContentType, ToXContent.EMPTY_PARAMS, humanReadable);
|
||||
//test that we can parse what we print out
|
||||
GetField parsedGetField;
|
||||
DocumentField parsedDocumentField;
|
||||
try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) {
|
||||
//we need to move to the next token, the start object one that we manually added is not expected
|
||||
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
|
||||
assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
|
||||
parsedGetField = GetField.fromXContent(parser);
|
||||
parsedDocumentField = DocumentField.fromXContent(parser);
|
||||
assertEquals(XContentParser.Token.END_ARRAY, parser.currentToken());
|
||||
assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken());
|
||||
assertNull(parser.nextToken());
|
||||
}
|
||||
assertEquals(expectedGetField, parsedGetField);
|
||||
BytesReference finalBytes = toXContent(parsedGetField, xContentType, humanReadable);
|
||||
assertEquals(expectedDocumentField, parsedDocumentField);
|
||||
BytesReference finalBytes = toXContent(parsedDocumentField, xContentType, humanReadable);
|
||||
assertToXContentEquivalent(originalBytes, finalBytes, xContentType);
|
||||
}
|
||||
|
||||
private static GetField copyGetField(GetField getField) {
|
||||
return new GetField(getField.getName(), getField.getValues());
|
||||
private static DocumentField copyDocumentField(DocumentField documentField) {
|
||||
return new DocumentField(documentField.getName(), documentField.getValues());
|
||||
}
|
||||
|
||||
private static GetField mutateGetField(GetField getField) {
|
||||
List<Supplier<GetField>> mutations = new ArrayList<>();
|
||||
mutations.add(() -> new GetField(randomUnicodeOfCodepointLength(15), getField.getValues()));
|
||||
mutations.add(() -> new GetField(getField.getName(), randomGetField(XContentType.JSON).v1().getValues()));
|
||||
private static DocumentField mutateDocumentField(DocumentField documentField) {
|
||||
List<Supplier<DocumentField>> mutations = new ArrayList<>();
|
||||
mutations.add(() -> new DocumentField(randomUnicodeOfCodepointLength(15), documentField.getValues()));
|
||||
mutations.add(() -> new DocumentField(documentField.getName(), randomDocumentField(XContentType.JSON).v1().getValues()));
|
||||
return randomFrom(mutations).get();
|
||||
}
|
||||
|
||||
public static Tuple<GetField, GetField> randomGetField(XContentType xContentType) {
|
||||
public static Tuple<DocumentField, DocumentField> randomDocumentField(XContentType xContentType) {
|
||||
if (randomBoolean()) {
|
||||
String fieldName = randomFrom(ParentFieldMapper.NAME, RoutingFieldMapper.NAME, UidFieldMapper.NAME);
|
||||
GetField getField = new GetField(fieldName, Collections.singletonList(randomAlphaOfLengthBetween(3, 10)));
|
||||
return Tuple.tuple(getField, getField);
|
||||
DocumentField documentField = new DocumentField(fieldName, Collections.singletonList(randomAlphaOfLengthBetween(3, 10)));
|
||||
return Tuple.tuple(documentField, documentField);
|
||||
}
|
||||
String fieldName = randomAlphaOfLengthBetween(3, 10);
|
||||
Tuple<List<Object>, List<Object>> tuple = RandomObjects.randomStoredFieldValues(random(), xContentType);
|
||||
GetField input = new GetField(fieldName, tuple.v1());
|
||||
GetField expected = new GetField(fieldName, tuple.v2());
|
||||
DocumentField input = new DocumentField(fieldName, tuple.v1());
|
||||
DocumentField expected = new DocumentField(fieldName, tuple.v2());
|
||||
return Tuple.tuple(input, expected);
|
||||
}
|
||||
}
|
@ -23,6 +23,7 @@ import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.document.DocumentField;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
@ -42,7 +43,7 @@ import static java.util.Collections.singletonList;
|
||||
import static java.util.Collections.singletonMap;
|
||||
import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
|
||||
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
|
||||
import static org.elasticsearch.index.get.GetFieldTests.randomGetField;
|
||||
import static org.elasticsearch.index.get.DocumentFieldTests.randomDocumentField;
|
||||
import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
|
||||
|
||||
@ -72,7 +73,7 @@ public class GetResultTests extends ESTestCase {
|
||||
public void testToXContent() throws IOException {
|
||||
{
|
||||
GetResult getResult = new GetResult("index", "type", "id", 1, true, new BytesArray("{ \"field1\" : " +
|
||||
"\"value1\", \"field2\":\"value2\"}"), singletonMap("field1", new GetField("field1",
|
||||
"\"value1\", \"field2\":\"value2\"}"), singletonMap("field1", new DocumentField("field1",
|
||||
singletonList("value1"))));
|
||||
String output = Strings.toString(getResult);
|
||||
assertEquals("{\"_index\":\"index\",\"_type\":\"type\",\"_id\":\"id\",\"_version\":1,\"found\":true,\"_source\":{ \"field1\" " +
|
||||
@ -115,9 +116,9 @@ public class GetResultTests extends ESTestCase {
|
||||
}
|
||||
|
||||
public void testToXContentEmbedded() throws IOException {
|
||||
Map<String, GetField> fields = new HashMap<>();
|
||||
fields.put("foo", new GetField("foo", singletonList("bar")));
|
||||
fields.put("baz", new GetField("baz", Arrays.asList("baz_0", "baz_1")));
|
||||
Map<String, DocumentField> fields = new HashMap<>();
|
||||
fields.put("foo", new DocumentField("foo", singletonList("bar")));
|
||||
fields.put("baz", new DocumentField("baz", Arrays.asList("baz_0", "baz_1")));
|
||||
|
||||
GetResult getResult = new GetResult("index", "type", "id", 2, true,
|
||||
new BytesArray("{\"foo\":\"bar\",\"baz\":[\"baz_0\",\"baz_1\"]}"), fields);
|
||||
@ -169,7 +170,7 @@ public class GetResultTests extends ESTestCase {
|
||||
mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(), getResult.getVersion(),
|
||||
getResult.isExists(), RandomObjects.randomSource(random()), getResult.getFields()));
|
||||
mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(), getResult.getVersion(),
|
||||
getResult.isExists(), getResult.internalSourceRef(), randomGetFields(XContentType.JSON).v1()));
|
||||
getResult.isExists(), getResult.internalSourceRef(), randomDocumentFields(XContentType.JSON).v1()));
|
||||
return randomFrom(mutations).get();
|
||||
}
|
||||
|
||||
@ -180,8 +181,8 @@ public class GetResultTests extends ESTestCase {
|
||||
final long version;
|
||||
final boolean exists;
|
||||
BytesReference source = null;
|
||||
Map<String, GetField> fields = null;
|
||||
Map<String, GetField> expectedFields = null;
|
||||
Map<String, DocumentField> fields = null;
|
||||
Map<String, DocumentField> expectedFields = null;
|
||||
if (frequently()) {
|
||||
version = randomNonNegativeLong();
|
||||
exists = true;
|
||||
@ -189,7 +190,7 @@ public class GetResultTests extends ESTestCase {
|
||||
source = RandomObjects.randomSource(random());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
Tuple<Map<String, GetField>, Map<String, GetField>> tuple = randomGetFields(xContentType);
|
||||
Tuple<Map<String, DocumentField>, Map<String, DocumentField>> tuple = randomDocumentFields(xContentType);
|
||||
fields = tuple.v1();
|
||||
expectedFields = tuple.v2();
|
||||
}
|
||||
@ -202,14 +203,14 @@ public class GetResultTests extends ESTestCase {
|
||||
return Tuple.tuple(getResult, expectedGetResult);
|
||||
}
|
||||
|
||||
private static Tuple<Map<String, GetField>,Map<String, GetField>> randomGetFields(XContentType xContentType) {
|
||||
private static Tuple<Map<String, DocumentField>,Map<String, DocumentField>> randomDocumentFields(XContentType xContentType) {
|
||||
int numFields = randomIntBetween(2, 10);
|
||||
Map<String, GetField> fields = new HashMap<>(numFields);
|
||||
Map<String, GetField> expectedFields = new HashMap<>(numFields);
|
||||
Map<String, DocumentField> fields = new HashMap<>(numFields);
|
||||
Map<String, DocumentField> expectedFields = new HashMap<>(numFields);
|
||||
for (int i = 0; i < numFields; i++) {
|
||||
Tuple<GetField, GetField> tuple = randomGetField(xContentType);
|
||||
GetField getField = tuple.v1();
|
||||
GetField expectedGetField = tuple.v2();
|
||||
Tuple<DocumentField, DocumentField> tuple = randomDocumentField(xContentType);
|
||||
DocumentField getField = tuple.v1();
|
||||
DocumentField expectedGetField = tuple.v2();
|
||||
fields.put(getField.getName(), getField);
|
||||
expectedFields.put(expectedGetField.getName(), expectedGetField);
|
||||
}
|
||||
|
@ -24,6 +24,7 @@ import org.elasticsearch.action.OriginalIndices;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.document.DocumentField;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.InputStreamStreamInput;
|
||||
import org.elasticsearch.common.text.Text;
|
||||
@ -70,7 +71,7 @@ public class SearchHitTests extends ESTestCase {
|
||||
if (randomBoolean()) {
|
||||
nestedIdentity = NestedIdentityTests.createTestItem(randomIntBetween(0, 2));
|
||||
}
|
||||
Map<String, SearchHitField> fields = new HashMap<>();
|
||||
Map<String, DocumentField> fields = new HashMap<>();
|
||||
if (randomBoolean()) {
|
||||
int size = randomIntBetween(0, 10);
|
||||
for (int i = 0; i < size; i++) {
|
||||
@ -78,10 +79,10 @@ public class SearchHitTests extends ESTestCase {
|
||||
XContentType.JSON);
|
||||
if (randomBoolean()) {
|
||||
String metaField = randomFrom(META_FIELDS);
|
||||
fields.put(metaField, new SearchHitField(metaField, values.v1()));
|
||||
fields.put(metaField, new DocumentField(metaField, values.v1()));
|
||||
} else {
|
||||
String fieldName = randomAlphaOfLengthBetween(5, 10);
|
||||
fields.put(fieldName, new SearchHitField(fieldName, values.v1()));
|
||||
fields.put(fieldName, new DocumentField(fieldName, values.v1()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -23,8 +23,10 @@ import com.carrotsearch.hppc.ObjectIntHashMap;
|
||||
import com.carrotsearch.hppc.ObjectIntMap;
|
||||
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||
import com.carrotsearch.hppc.ObjectObjectMap;
|
||||
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.document.DocumentField;
|
||||
import org.elasticsearch.common.geo.GeoHashUtils;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
@ -32,7 +34,6 @@ import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.SearchHitField;
|
||||
import org.elasticsearch.search.sort.SortBuilders;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
@ -189,7 +190,7 @@ public abstract class AbstractGeoTestCase extends ESIntegTestCase {
|
||||
SearchHit searchHit = response.getHits().getAt(i);
|
||||
assertThat("Hit " + i + " with id: " + searchHit.getId(), searchHit.getIndex(), equalTo("high_card_idx"));
|
||||
assertThat("Hit " + i + " with id: " + searchHit.getId(), searchHit.getType(), equalTo("type"));
|
||||
SearchHitField hitField = searchHit.field(NUMBER_FIELD_NAME);
|
||||
DocumentField hitField = searchHit.field(NUMBER_FIELD_NAME);
|
||||
|
||||
assertThat("Hit " + i + " has wrong number of values", hitField.getValues().size(), equalTo(1));
|
||||
Long value = hitField.getValue();
|
||||
|
@ -25,6 +25,7 @@ import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchPhaseExecutionException;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.search.SearchType;
|
||||
import org.elasticsearch.common.document.DocumentField;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
@ -36,9 +37,9 @@ import org.elasticsearch.script.MockScriptPlugin;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptType;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.SearchHitField;
|
||||
import org.elasticsearch.search.SearchHits;
|
||||
import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
|
||||
import org.elasticsearch.search.aggregations.BucketOrder;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.bucket.global.Global;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
|
||||
@ -47,7 +48,6 @@ import org.elasticsearch.search.aggregations.bucket.terms.Terms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorFactory.ExecutionMode;
|
||||
import org.elasticsearch.search.aggregations.metrics.max.Max;
|
||||
import org.elasticsearch.search.aggregations.metrics.tophits.TopHits;
|
||||
import org.elasticsearch.search.aggregations.BucketOrder;
|
||||
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
|
||||
import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
|
||||
import org.elasticsearch.search.rescore.RescoreBuilder;
|
||||
@ -615,7 +615,7 @@ public class TopHitsIT extends ESIntegTestCase {
|
||||
|
||||
assertThat(hit.getMatchedQueries()[0], equalTo("test"));
|
||||
|
||||
SearchHitField field = hit.field("field1");
|
||||
DocumentField field = hit.field("field1");
|
||||
assertThat(field.getValue().toString(), equalTo("5"));
|
||||
|
||||
assertThat(hit.getSourceAsMap().get("text").toString(), equalTo("some text to entertain"));
|
||||
@ -893,7 +893,7 @@ public class TopHitsIT extends ESIntegTestCase {
|
||||
|
||||
assertThat(searchHit.getMatchedQueries(), arrayContaining("test"));
|
||||
|
||||
SearchHitField field = searchHit.field("comments.user");
|
||||
DocumentField field = searchHit.field("comments.user");
|
||||
assertThat(field.getValue().toString(), equalTo("a"));
|
||||
|
||||
field = searchHit.field("script");
|
||||
|
@ -28,10 +28,10 @@ import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.search.TopFieldDocs;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.document.DocumentField;
|
||||
import org.elasticsearch.common.io.stream.Writeable.Reader;
|
||||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.SearchHitField;
|
||||
import org.elasticsearch.search.SearchHits;
|
||||
import org.elasticsearch.search.aggregations.ParsedAggregation;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
@ -85,7 +85,7 @@ public class InternalTopHitsTests extends InternalAggregationTestCase<InternalTo
|
||||
int docId = randomValueOtherThanMany(usedDocIds::contains, () -> between(0, IndexWriter.MAX_DOCS));
|
||||
usedDocIds.add(docId);
|
||||
|
||||
Map<String, SearchHitField> searchHitFields = new HashMap<>();
|
||||
Map<String, DocumentField> searchHitFields = new HashMap<>();
|
||||
if (testInstancesLookSortedByField) {
|
||||
Object[] fields = new Object[testInstancesSortFields.length];
|
||||
for (int f = 0; f < testInstancesSortFields.length; f++) {
|
||||
|
@ -27,6 +27,7 @@ import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.termvectors.TermVectorsRequest;
|
||||
import org.elasticsearch.action.termvectors.TermVectorsResponse;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.document.DocumentField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.logging.ESLoggerFactory;
|
||||
@ -36,7 +37,6 @@ import org.elasticsearch.index.termvectors.TermVectorsService;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.plugins.SearchPlugin;
|
||||
import org.elasticsearch.search.SearchExtBuilder;
|
||||
import org.elasticsearch.search.SearchHitField;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
@ -129,9 +129,9 @@ public class FetchSubPhasePluginIT extends ESIntegTestCase {
|
||||
if (hitContext.hit().fieldsOrNull() == null) {
|
||||
hitContext.hit().fields(new HashMap<>());
|
||||
}
|
||||
SearchHitField hitField = hitContext.hit().getFields().get(NAME);
|
||||
DocumentField hitField = hitContext.hit().getFields().get(NAME);
|
||||
if (hitField == null) {
|
||||
hitField = new SearchHitField(NAME, new ArrayList<>(1));
|
||||
hitField = new DocumentField(NAME, new ArrayList<>(1));
|
||||
hitContext.hit().getFields().put(NAME, hitField);
|
||||
}
|
||||
TermVectorsRequest termVectorsRequest = new TermVectorsRequest(context.indexShard().shardId().getIndex().getName(),
|
||||
|
@ -26,8 +26,8 @@ import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.document.DocumentField;
|
||||
import org.elasticsearch.common.joda.Joda;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
@ -39,7 +39,6 @@ import org.elasticsearch.script.MockScriptPlugin;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptType;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.SearchHitField;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.lookup.FieldLookup;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
@ -487,7 +486,7 @@ public class SearchFieldsIT extends ESIntegTestCase {
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
SearchHitField fieldObj = response.getHits().getAt(0).field("test_script_1");
|
||||
DocumentField fieldObj = response.getHits().getAt(0).field("test_script_1");
|
||||
assertThat(fieldObj, notNullValue());
|
||||
List<?> fieldValues = fieldObj.getValues();
|
||||
assertThat(fieldValues, hasSize(1));
|
||||
@ -715,7 +714,7 @@ public class SearchFieldsIT extends ESIntegTestCase {
|
||||
SearchResponse searchResponse = client().prepareSearch("test").setTypes("type").setSource(
|
||||
new SearchSourceBuilder().query(QueryBuilders.matchAllQuery()).fieldDataField("test_field")).get();
|
||||
assertHitCount(searchResponse, 1);
|
||||
Map<String,SearchHitField> fields = searchResponse.getHits().getHits()[0].getFields();
|
||||
Map<String, DocumentField> fields = searchResponse.getHits().getHits()[0].getFields();
|
||||
assertThat(fields.get("test_field").getValue(), equalTo("foobar"));
|
||||
}
|
||||
|
||||
@ -854,7 +853,7 @@ public class SearchFieldsIT extends ESIntegTestCase {
|
||||
assertSearchResponse(resp);
|
||||
for (SearchHit hit : resp.getHits().getHits()) {
|
||||
final int id = Integer.parseInt(hit.getId());
|
||||
Map<String, SearchHitField> fields = hit.getFields();
|
||||
Map<String, DocumentField> fields = hit.getFields();
|
||||
assertThat(fields.get("s").getValues(), equalTo(Collections.<Object> singletonList(Integer.toString(id))));
|
||||
assertThat(fields.get("l").getValues(), equalTo(Collections.<Object> singletonList((long) id)));
|
||||
assertThat(fields.get("d").getValues(), equalTo(Collections.<Object> singletonList((double) id)));
|
||||
@ -876,7 +875,7 @@ public class SearchFieldsIT extends ESIntegTestCase {
|
||||
assertSearchResponse(response);
|
||||
assertHitCount(response, 1);
|
||||
|
||||
Map<String, SearchHitField> fields = response.getHits().getAt(0).getFields();
|
||||
Map<String, DocumentField> fields = response.getHits().getAt(0).getFields();
|
||||
|
||||
assertThat(fields.get("field1"), nullValue());
|
||||
assertThat(fields.get("_routing").isMetadataField(), equalTo(true));
|
||||
|
@ -41,3 +41,8 @@ Use `BucketOrder.key(boolean)` to order the `terms` aggregation buckets by `_ter
|
||||
In `BulkResponse`, `SearchResponse` and `TermVectorsResponse` `getTookInMiilis()` method
|
||||
has been removed in favor of `getTook` method. `getTookInMiilis()` is easily replaced by
|
||||
`getTook().getMillis()`.
|
||||
|
||||
=== `GetField` and `SearchHitField` replaced by `DocumentField`
|
||||
|
||||
As `GetField` and `SearchHitField` have the same members, they have been unified into
|
||||
`DocumentField`.
|
||||
|
@ -23,10 +23,9 @@ import org.apache.lucene.index.LeafReader;
|
||||
import org.apache.lucene.index.SortedDocValues;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.common.document.DocumentField;
|
||||
import org.elasticsearch.join.mapper.ParentIdFieldMapper;
|
||||
import org.elasticsearch.join.mapper.ParentJoinFieldMapper;
|
||||
import org.elasticsearch.search.SearchHitField;
|
||||
import org.elasticsearch.search.fetch.FetchSubPhase;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
@ -62,14 +61,14 @@ public final class ParentJoinFieldSubFetchPhase implements FetchSubPhase {
|
||||
parentId = getSortedDocValue(parentMapper.name(), hitContext.reader(), hitContext.docId());
|
||||
}
|
||||
|
||||
Map<String, SearchHitField> fields = hitContext.hit().fieldsOrNull();
|
||||
Map<String, DocumentField> fields = hitContext.hit().fieldsOrNull();
|
||||
if (fields == null) {
|
||||
fields = new HashMap<>();
|
||||
hitContext.hit().fields(fields);
|
||||
}
|
||||
fields.put(mapper.name(), new SearchHitField(mapper.name(), Collections.singletonList(joinName)));
|
||||
fields.put(mapper.name(), new DocumentField(mapper.name(), Collections.singletonList(joinName)));
|
||||
if (parentId != null) {
|
||||
fields.put(parentMapper.name(), new SearchHitField(parentMapper.name(), Collections.singletonList(parentId)));
|
||||
fields.put(parentMapper.name(), new DocumentField(parentMapper.name(), Collections.singletonList(parentId)));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -32,6 +32,7 @@ import org.apache.lucene.search.TopFieldCollector;
|
||||
import org.apache.lucene.search.TopScoreDocCollector;
|
||||
import org.apache.lucene.search.TotalHitCountCollector;
|
||||
import org.apache.lucene.search.Weight;
|
||||
import org.elasticsearch.common.document.DocumentField;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.IdFieldMapper;
|
||||
@ -44,7 +45,6 @@ import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.join.mapper.ParentIdFieldMapper;
|
||||
import org.elasticsearch.join.mapper.ParentJoinFieldMapper;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.SearchHitField;
|
||||
import org.elasticsearch.search.fetch.subphase.InnerHitsContext;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
@ -126,7 +126,7 @@ class ParentChildInnerHitContextBuilder extends InnerHitContextBuilder {
|
||||
TopDocs[] result = new TopDocs[hits.length];
|
||||
for (int i = 0; i < hits.length; i++) {
|
||||
SearchHit hit = hits[i];
|
||||
SearchHitField joinField = hit.getFields().get(joinFieldMapper.name());
|
||||
DocumentField joinField = hit.getFields().get(joinFieldMapper.name());
|
||||
if (joinField == null) {
|
||||
result[i] = Lucene.EMPTY_TOP_DOCS;
|
||||
continue;
|
||||
@ -150,7 +150,7 @@ class ParentChildInnerHitContextBuilder extends InnerHitContextBuilder {
|
||||
.add(joinFieldMapper.fieldType().termQuery(typeName, qsc), BooleanClause.Occur.FILTER)
|
||||
.build();
|
||||
} else {
|
||||
SearchHitField parentIdField = hit.getFields().get(parentIdFieldMapper.name());
|
||||
DocumentField parentIdField = hit.getFields().get(parentIdFieldMapper.name());
|
||||
q = context.mapperService().fullName(IdFieldMapper.NAME).termQuery(parentIdField.getValue(), qsc);
|
||||
}
|
||||
|
||||
@ -206,7 +206,7 @@ class ParentChildInnerHitContextBuilder extends InnerHitContextBuilder {
|
||||
} else if (isChildHit(hit)) {
|
||||
DocumentMapper hitDocumentMapper = mapperService.documentMapper(hit.getType());
|
||||
final String parentType = hitDocumentMapper.parentFieldMapper().type();
|
||||
SearchHitField parentField = hit.field(ParentFieldMapper.NAME);
|
||||
DocumentField parentField = hit.field(ParentFieldMapper.NAME);
|
||||
if (parentField == null) {
|
||||
throw new IllegalStateException("All children must have a _parent");
|
||||
}
|
||||
|
@ -111,6 +111,6 @@ public class SizeMappingIT extends ESIntegTestCase {
|
||||
client().prepareIndex("test", "type", "1").setSource(source, XContentType.JSON));
|
||||
GetResponse getResponse = client().prepareGet("test", "type", "1").setStoredFields("_size").get();
|
||||
assertNotNull(getResponse.getField("_size"));
|
||||
assertEquals(source.length(), getResponse.getField("_size").getValue());
|
||||
assertEquals(source.length(), (int) getResponse.getField("_size").getValue());
|
||||
}
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user