Add parsing from xContent to InternalSearchHit and InternalSearchHits (#22429)

This adds methods to parse InternalSearchHit and InternalSearchHits from their
xContent representation. Most of the information in the original object is
preserved when rendering the object to xContent and then parsing it back.
However, some pieces of information are lost which we currently cannot parse
back from the rest response, most notably:

* the "match" property of the lucene explanation is not rendered in the
  "_explain" section and cannot be reconstructed on the client side
* the original "shard" information (SearchShardTarget) is only rendered if the
  "explanation" is also set, also we loose the indexUUID of the contained
  ShardId because we don't write it out. As a replacement we can use
  ClusterState.UNKNOWN_UUID on the receiving side
This commit is contained in:
Christoph Büscher 2017-01-10 14:00:04 +01:00 committed by GitHub
parent cb7952e71d
commit 5f9dfe3186
14 changed files with 581 additions and 57 deletions

View File

@ -20,6 +20,7 @@
package org.elasticsearch.common.xcontent; package org.elasticsearch.common.xcontent;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.common.xcontent.XContentParser.Token;
import java.io.IOException; import java.io.IOException;
@ -56,6 +57,14 @@ public final class XContentParserUtils {
throw new ParsingException(location, String.format(Locale.ROOT, message, field)); throw new ParsingException(location, String.format(Locale.ROOT, message, field));
} }
/**
* @throws ParsingException with a "unknown token found" reason
*/
public static void throwUnknownToken(XContentParser.Token token, XContentLocation location) {
String message = "Failed to parse object: unexpected token [%s] found";
throw new ParsingException(location, String.format(Locale.ROOT, message, token));
}
/** /**
* Makes sure that provided token is of the expected type * Makes sure that provided token is of the expected type
* *
@ -67,4 +76,35 @@ public final class XContentParserUtils {
throw new ParsingException(location.get(), String.format(Locale.ROOT, message, expected, actual)); throw new ParsingException(location.get(), String.format(Locale.ROOT, message, expected, actual));
} }
} }
/**
* Parse the current token depending on its token type. The following token types will be
* parsed by the corresponding parser methods:
* <ul>
* <li>XContentParser.Token.VALUE_STRING: parser.text()</li>
* <li>XContentParser.Token.VALUE_NUMBER: parser.numberValue()</li>
* <li>XContentParser.Token.VALUE_BOOLEAN: parser.booleanValue()</li>
* <li>XContentParser.Token.VALUE_EMBEDDED_OBJECT: parser.binaryValue()</li>
* </ul>
*
* @throws ParsingException if the token none of the allowed values
*/
public static Object parseStoredFieldsValue(XContentParser parser) throws IOException {
XContentParser.Token token = parser.currentToken();
Object value = null;
if (token == XContentParser.Token.VALUE_STRING) {
//binary values will be parsed back and returned as base64 strings when reading from json and yaml
value = parser.text();
} else if (token == XContentParser.Token.VALUE_NUMBER) {
value = parser.numberValue();
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
value = parser.booleanValue();
} else if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) {
//binary values will be parsed back and returned as BytesArray when reading from cbor and smile
value = new BytesArray(parser.binaryValue());
} else {
throwUnknownToken(token, parser.getTokenLocation());
}
return value;
}
} }

View File

@ -19,8 +19,6 @@
package org.elasticsearch.index.get; package org.elasticsearch.index.get;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.io.stream.Streamable;
@ -36,6 +34,7 @@ import java.util.List;
import java.util.Objects; import java.util.Objects;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
import static org.elasticsearch.common.xcontent.XContentParserUtils.parseStoredFieldsValue;
public class GetField implements Streamable, ToXContent, Iterable<Object> { public class GetField implements Streamable, ToXContent, Iterable<Object> {
@ -119,21 +118,7 @@ public class GetField implements Streamable, ToXContent, Iterable<Object> {
ensureExpectedToken(XContentParser.Token.START_ARRAY, token, parser::getTokenLocation); ensureExpectedToken(XContentParser.Token.START_ARRAY, token, parser::getTokenLocation);
List<Object> values = new ArrayList<>(); List<Object> values = new ArrayList<>();
while((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { while((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
Object value; values.add(parseStoredFieldsValue(parser));
if (token == XContentParser.Token.VALUE_STRING) {
//binary values will be parsed back and returned as base64 strings when reading from json and yaml
value = parser.text();
} else if (token == XContentParser.Token.VALUE_NUMBER) {
value = parser.numberValue();
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
value = parser.booleanValue();
} else if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) {
//binary values will be parsed back and returned as BytesArray when reading from cbor and smile
value = new BytesArray(parser.binaryValue());
} else {
throw new ParsingException(parser.getTokenLocation(), "Failed to parse object: unsupported token found [" + token + "]");
}
values.add(value);
} }
return new GetField(fieldName, values); return new GetField(fieldName, values);
} }

View File

@ -19,6 +19,7 @@
package org.elasticsearch.index.shard; package org.elasticsearch.index.shard;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.io.stream.Streamable;
@ -71,6 +72,22 @@ public class ShardId implements Streamable, Comparable<ShardId> {
return "[" + index.getName() + "][" + shardId + "]"; return "[" + index.getName() + "][" + shardId + "]";
} }
/**
* Parse the string representation of this shardId back to an object.
* We lose index uuid information here, but since we use toString in
* rest responses, this is the best we can do to reconstruct the object
* on the client side.
*/
public static ShardId fromString(String shardIdString) {
int splitPosition = shardIdString.indexOf("][");
if (splitPosition <= 0 || shardIdString.charAt(0) != '[' || shardIdString.charAt(shardIdString.length() - 1) != ']') {
throw new IllegalArgumentException("Unexpected shardId string format, expected [indexName][shardId] but got " + shardIdString);
}
String indexName = shardIdString.substring(1, splitPosition);
int shardId = Integer.parseInt(shardIdString.substring(splitPosition + 2, shardIdString.length() - 1));
return new ShardId(new Index(indexName, IndexMetaData.INDEX_UUID_NA_VALUE), shardId);
}
@Override @Override
public boolean equals(Object o) { public boolean equals(Object o) {
if (this == o) return true; if (this == o) return true;

View File

@ -24,7 +24,7 @@ import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.text.Text; import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightField; import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
import java.util.Map; import java.util.Map;
@ -34,7 +34,7 @@ import java.util.Map;
* *
* @see SearchHits * @see SearchHits
*/ */
public interface SearchHit extends Streamable, ToXContent, Iterable<SearchHitField> { public interface SearchHit extends Streamable, ToXContentObject, Iterable<SearchHitField> {
/** /**
* The score. * The score.

View File

@ -34,6 +34,8 @@ import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
/** /**
* A field highlighted with its highlighted fragments. * A field highlighted with its highlighted fragments.
*/ */
@ -121,13 +123,16 @@ public class HighlightField implements ToXContent, Streamable {
} }
public static HighlightField fromXContent(XContentParser parser) throws IOException { public static HighlightField fromXContent(XContentParser parser) throws IOException {
XContentParser.Token token = parser.nextToken(); ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation);
assert token == XContentParser.Token.FIELD_NAME;
String fieldName = parser.currentName(); String fieldName = parser.currentName();
Text[] fragments = null; Text[] fragments = null;
token = parser.nextToken(); XContentParser.Token token = parser.nextToken();
if (token == XContentParser.Token.START_ARRAY) { if (token == XContentParser.Token.START_ARRAY) {
fragments = parseValues(parser); List<Text> values = new ArrayList<>();
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
values.add(new Text(parser.text()));
}
fragments = values.toArray(new Text[values.size()]);
} else if (token == XContentParser.Token.VALUE_NULL) { } else if (token == XContentParser.Token.VALUE_NULL) {
fragments = null; fragments = null;
} else { } else {
@ -137,14 +142,6 @@ public class HighlightField implements ToXContent, Streamable {
return new HighlightField(fieldName, fragments); return new HighlightField(fieldName, fragments);
} }
private static Text[] parseValues(XContentParser parser) throws IOException {
List<Text> values = new ArrayList<>();
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
values.add(new Text(parser.text()));
}
return values.toArray(new Text[values.size()]);
}
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(name); builder.field(name);

View File

@ -23,6 +23,7 @@ import org.apache.lucene.search.Explanation;
import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.compress.CompressorFactory;
@ -35,6 +36,9 @@ import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHitField; import org.elasticsearch.search.SearchHitField;
@ -45,6 +49,7 @@ import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
@ -58,6 +63,10 @@ import static org.elasticsearch.common.lucene.Lucene.readExplanation;
import static org.elasticsearch.common.lucene.Lucene.writeExplanation; import static org.elasticsearch.common.lucene.Lucene.writeExplanation;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
import static org.elasticsearch.common.xcontent.XContentParserUtils.parseStoredFieldsValue;
import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField;
import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownToken;
import static org.elasticsearch.search.fetch.subphase.highlight.HighlightField.readHighlightField; import static org.elasticsearch.search.fetch.subphase.highlight.HighlightField.readHighlightField;
import static org.elasticsearch.search.internal.InternalSearchHitField.readSearchHitField; import static org.elasticsearch.search.internal.InternalSearchHitField.readSearchHitField;
@ -65,7 +74,8 @@ public class InternalSearchHit implements SearchHit {
private transient int docId; private transient int docId;
private float score = Float.NEGATIVE_INFINITY; private static final float DEFAULT_SCORE = Float.NEGATIVE_INFINITY;
private float score = DEFAULT_SCORE;
private Text id; private Text id;
private Text type; private Text type;
@ -89,6 +99,8 @@ public class InternalSearchHit implements SearchHit {
@Nullable @Nullable
private SearchShardTarget shard; private SearchShardTarget shard;
private transient Text index;
private Map<String, Object> sourceAsMap; private Map<String, Object> sourceAsMap;
private byte[] sourceAsBytes; private byte[] sourceAsBytes;
@ -103,20 +115,17 @@ public class InternalSearchHit implements SearchHit {
} }
public InternalSearchHit(int docId, String id, Text type, Map<String, SearchHitField> fields) { public InternalSearchHit(int docId, String id, Text type, Map<String, SearchHitField> fields) {
this.docId = docId; this(docId, id, type, null, fields);
}
public InternalSearchHit(int nestedTopDocId, String id, Text type, InternalNestedIdentity nestedIdentity, Map<String, SearchHitField> fields) {
this.docId = nestedTopDocId;
if (id != null) { if (id != null) {
this.id = new Text(id); this.id = new Text(id);
} else { } else {
this.id = null; this.id = null;
} }
this.type = type; this.type = type;
this.fields = fields;
}
public InternalSearchHit(int nestedTopDocId, String id, Text type, InternalNestedIdentity nestedIdentity, Map<String, SearchHitField> fields) {
this.docId = nestedTopDocId;
this.id = new Text(id);
this.type = type;
this.nestedIdentity = nestedIdentity; this.nestedIdentity = nestedIdentity;
this.fields = fields; this.fields = fields;
} }
@ -126,7 +135,7 @@ public class InternalSearchHit implements SearchHit {
} }
public void shardTarget(SearchShardTarget shardTarget) { public void shardTarget(SearchShardTarget shardTarget) {
this.shard = shardTarget; shard(shardTarget);
if (innerHits != null) { if (innerHits != null) {
for (InternalSearchHits searchHits : innerHits.values()) { for (InternalSearchHits searchHits : innerHits.values()) {
searchHits.shardTarget(shardTarget); searchHits.shardTarget(shardTarget);
@ -164,7 +173,7 @@ public class InternalSearchHit implements SearchHit {
@Override @Override
public String index() { public String index() {
return shard.index(); return this.index == null ? null : this.index.string();
} }
@Override @Override
@ -337,7 +346,11 @@ public class InternalSearchHit implements SearchHit {
} }
public void sortValues(Object[] sortValues, DocValueFormat[] sortValueFormats) { public void sortValues(Object[] sortValues, DocValueFormat[] sortValueFormats) {
this.sortValues = new SearchSortValues(sortValues, sortValueFormats); sortValues(new SearchSortValues(sortValues, sortValueFormats));
}
public void sortValues(SearchSortValues sortValues) {
this.sortValues = sortValues;
} }
@Override @Override
@ -376,6 +389,9 @@ public class InternalSearchHit implements SearchHit {
public void shard(SearchShardTarget target) { public void shard(SearchShardTarget target) {
this.shard = target; this.shard = target;
if (target != null) {
this.index = target.indexText();
}
} }
public void matchedQueries(String[] matchedQueries) { public void matchedQueries(String[] matchedQueries) {
@ -417,6 +433,8 @@ public class InternalSearchHit implements SearchHit {
static final String DESCRIPTION = "description"; static final String DESCRIPTION = "description";
static final String DETAILS = "details"; static final String DETAILS = "details";
static final String INNER_HITS = "inner_hits"; static final String INNER_HITS = "inner_hits";
static final String _SHARD = "_shard";
static final String _NODE = "_node";
} }
// public because we render hit as part of completion suggestion option // public because we render hit as part of completion suggestion option
@ -439,14 +457,14 @@ public class InternalSearchHit implements SearchHit {
// For inner_hit hits shard is null and that is ok, because the parent search hit has all this information. // For inner_hit hits shard is null and that is ok, because the parent search hit has all this information.
// Even if this was included in the inner_hit hits this would be the same, so better leave it out. // Even if this was included in the inner_hit hits this would be the same, so better leave it out.
if (explanation() != null && shard != null) { if (explanation() != null && shard != null) {
builder.field("_shard", shard.shardId()); builder.field(Fields._SHARD, shard.shardId());
builder.field("_node", shard.nodeIdText()); builder.field(Fields._NODE, shard.nodeIdText());
} }
if (nestedIdentity != null) { if (nestedIdentity != null) {
nestedIdentity.toXContent(builder, params); nestedIdentity.toXContent(builder, params);
} else { } else {
if (shard != null) { if (index != null) {
builder.field(Fields._INDEX, shard.indexText()); builder.field(Fields._INDEX, index);
} }
if (type != null) { if (type != null) {
builder.field(Fields._TYPE, type); builder.field(Fields._TYPE, type);
@ -468,7 +486,7 @@ public class InternalSearchHit implements SearchHit {
builder.field(field.name(), value); builder.field(field.name(), value);
} }
if (source != null) { if (source != null) {
XContentHelper.writeRawField("_source", source, builder, params); XContentHelper.writeRawField(SourceFieldMapper.NAME, source, builder, params);
} }
if (!otherFields.isEmpty()) { if (!otherFields.isEmpty()) {
builder.startObject(Fields.FIELDS); builder.startObject(Fields.FIELDS);
@ -512,6 +530,160 @@ public class InternalSearchHit implements SearchHit {
return builder; return builder;
} }
public static InternalSearchHit fromXContent(XContentParser parser) throws IOException {
XContentParser.Token token;
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation);
String currentFieldName = null;
String type = null, id = null;
Text index = null;
float score = DEFAULT_SCORE;
long version = -1;
SearchSortValues sortValues = SearchSortValues.EMPTY;
InternalNestedIdentity nestedIdentity = null;
Map<String, HighlightField> highlightFields = new HashMap<>();
BytesReference parsedSource = null;
List<String> matchedQueries = new ArrayList<>();
Map<String, SearchHitField> fields = new HashMap<>();
Explanation explanation = null;
ShardId shardId = null;
String nodeId = null;
Map<String, InternalSearchHits> innerHits = null;
while((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (Fields._TYPE.equals(currentFieldName)) {
type = parser.text();
} else if (Fields._INDEX.equals(currentFieldName)) {
index = new Text(parser.text());
} else if (Fields._ID.equals(currentFieldName)) {
id = parser.text();
} else if (Fields._SCORE.equals(currentFieldName)) {
score = parser.floatValue();
} else if (Fields._VERSION.equals(currentFieldName)) {
version = parser.longValue();
} else if (Fields._SHARD.equals(currentFieldName)) {
shardId = ShardId.fromString(parser.text());
} else if (Fields._NODE.equals(currentFieldName)) {
nodeId = parser.text();
} else if (MapperService.isMetadataField(currentFieldName)) {
List<Object> values = new ArrayList<>();
values.add(parseStoredFieldsValue(parser));
fields.put(currentFieldName, new InternalSearchHitField(currentFieldName, values));
} else {
throwUnknownField(currentFieldName, parser.getTokenLocation());
}
} else if (token == XContentParser.Token.VALUE_NULL) {
if (Fields._SCORE.equals(currentFieldName)) {
score = Float.NaN;
} else {
throwUnknownField(currentFieldName, parser.getTokenLocation());
}
} else if (token == XContentParser.Token.START_OBJECT) {
if (SourceFieldMapper.NAME.equals(currentFieldName)) {
try (XContentBuilder builder = XContentBuilder.builder(parser.contentType().xContent())) {
//the original document gets slightly modified: whitespaces or pretty printing are not preserved,
//it all depends on the current builder settings
builder.copyCurrentStructure(parser);
parsedSource = builder.bytes();
}
} else if (Fields.HIGHLIGHT.equals(currentFieldName)) {
while((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
HighlightField highlightField = HighlightField.fromXContent(parser);
highlightFields.put(highlightField.getName(), highlightField);
}
} else if (Fields.FIELDS.equals(currentFieldName)) {
while((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
String fieldName = parser.currentName();
List<Object> values = new ArrayList<>();
ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.nextToken(), parser::getTokenLocation);
while((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
values.add(parseStoredFieldsValue(parser));
}
fields.put(fieldName, new InternalSearchHitField(fieldName, values));
}
} else if (Fields._EXPLANATION.equals(currentFieldName)) {
explanation = parseExplanation(parser);
} else if (Fields.INNER_HITS.equals(currentFieldName)) {
innerHits = new HashMap<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
// parse the key
ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation);
String name = parser.currentName();
innerHits.put(name, InternalSearchHits.fromXContent(parser));
parser.nextToken();
ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.currentToken(), parser::getTokenLocation);
}
} else if (InternalNestedIdentity.Fields._NESTED.equals(currentFieldName)) {
nestedIdentity = InternalNestedIdentity.fromXContent(parser);
} else {
throwUnknownField(currentFieldName, parser.getTokenLocation());
}
} else if (token == XContentParser.Token.START_ARRAY) {
if (Fields.SORT.equals(currentFieldName)) {
sortValues = SearchSortValues.fromXContent(parser);
} else if (Fields.MATCHED_QUERIES.equals(currentFieldName)) {
while((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
matchedQueries.add(parser.text());
}
} else {
throwUnknownField(currentFieldName, parser.getTokenLocation());
}
} else {
throwUnknownToken(token, parser.getTokenLocation());
}
}
InternalSearchHit internalSearchHit = new InternalSearchHit(-1, id, new Text(type), nestedIdentity, Collections.emptyMap());
internalSearchHit.index = index;
internalSearchHit.score(score);
internalSearchHit.version(version);
internalSearchHit.sortValues(sortValues);
internalSearchHit.highlightFields(highlightFields);
internalSearchHit.sourceRef(parsedSource);
internalSearchHit.explanation(explanation);
internalSearchHit.setInnerHits(innerHits);
if (matchedQueries.size() > 0) {
internalSearchHit.matchedQueries(matchedQueries.toArray(new String[matchedQueries.size()]));
}
if (shardId != null && nodeId != null) {
internalSearchHit.shard(new SearchShardTarget(nodeId, shardId));
}
internalSearchHit.fields(fields);
return internalSearchHit;
}
private static Explanation parseExplanation(XContentParser parser) throws IOException {
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation);
XContentParser.Token token;
Float value = null;
String description = null;
List<Explanation> details = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, () -> parser.getTokenLocation());
String currentFieldName = parser.currentName();
token = parser.nextToken();
if (Fields.VALUE.equals(currentFieldName)) {
value = parser.floatValue();
} else if (Fields.DESCRIPTION.equals(currentFieldName)) {
description = parser.textOrNull();
} else if (Fields.DETAILS.equals(currentFieldName)) {
ensureExpectedToken(XContentParser.Token.START_ARRAY, token, () -> parser.getTokenLocation());
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
details.add(parseExplanation(parser));
}
} else {
throwUnknownField(currentFieldName, parser.getTokenLocation());
}
}
if (value == null) {
throw new ParsingException(parser.getTokenLocation(), "missing explanation value");
}
if (description == null) {
throw new ParsingException(parser.getTokenLocation(), "missing explanation description");
}
return Explanation.match(value, description, details);
}
private void buildExplanation(XContentBuilder builder, Explanation explanation) throws IOException { private void buildExplanation(XContentBuilder builder, Explanation explanation) throws IOException {
builder.startObject(); builder.startObject();
builder.field(Fields.VALUE, explanation.getValue()); builder.field(Fields.VALUE, explanation.getValue());
@ -595,7 +767,8 @@ public class InternalSearchHit implements SearchHit {
matchedQueries[i] = in.readString(); matchedQueries[i] = in.readString();
} }
} }
shard = in.readOptionalWriteable(SearchShardTarget::new); // we call the setter here because that also sets the local index parameter
shard(in.readOptionalWriteable(SearchShardTarget::new));
size = in.readVInt(); size = in.readVInt();
if (size > 0) { if (size > 0) {
innerHits = new HashMap<>(size); innerHits = new HashMap<>(size);
@ -762,5 +935,4 @@ public class InternalSearchHit implements SearchHit {
static final String _NESTED_OFFSET = "offset"; static final String _NESTED_OFFSET = "offset";
} }
} }
} }

View File

@ -22,14 +22,19 @@ package org.elasticsearch.search.internal;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.SearchShardTarget;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Iterator; import java.util.Iterator;
import java.util.List;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField;
import static org.elasticsearch.search.internal.InternalSearchHit.readSearchHit; import static org.elasticsearch.search.internal.InternalSearchHit.readSearchHit;
public class InternalSearchHits implements SearchHits { public class InternalSearchHits implements SearchHits {
@ -132,6 +137,44 @@ public class InternalSearchHits implements SearchHits {
return builder; return builder;
} }
public static InternalSearchHits fromXContent(XContentParser parser) throws IOException {
if (parser.currentToken() != XContentParser.Token.START_OBJECT) {
parser.nextToken();
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation);
}
XContentParser.Token token = parser.currentToken();
String currentFieldName = null;
List<InternalSearchHit> hits = new ArrayList<>();
long totalHits = 0;
float maxScore = 0f;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (Fields.TOTAL.equals(currentFieldName)) {
totalHits = parser.longValue();
} else if (Fields.MAX_SCORE.equals(currentFieldName)) {
maxScore = parser.floatValue();
} else {
throwUnknownField(currentFieldName, parser.getTokenLocation());
}
} else if (token == XContentParser.Token.VALUE_NULL) {
if (Fields.MAX_SCORE.equals(currentFieldName)) {
maxScore = Float.NaN; // NaN gets rendered as null-field
} else {
throwUnknownField(currentFieldName, parser.getTokenLocation());
}
} else if (token == XContentParser.Token.START_ARRAY) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
hits.add(InternalSearchHit.fromXContent(parser));
}
}
}
InternalSearchHits internalSearchHits = new InternalSearchHits(hits.toArray(new InternalSearchHit[hits.size()]), totalHits,
maxScore);
return internalSearchHits;
}
public static InternalSearchHits readSearchHits(StreamInput in) throws IOException { public static InternalSearchHits readSearchHits(StreamInput in) throws IOException {
InternalSearchHits hits = new InternalSearchHits(); InternalSearchHits hits = new InternalSearchHits();

View File

@ -139,9 +139,7 @@ public class SearchSortValues implements ToXContent, Writeable {
} }
public static SearchSortValues fromXContent(XContentParser parser) throws IOException { public static SearchSortValues fromXContent(XContentParser parser) throws IOException {
XContentParserUtils.ensureFieldName(parser, parser.currentToken(), Fields.SORT); XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser::getTokenLocation);
XContentParser.Token token = parser.nextToken();
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, token, parser::getTokenLocation);
return new SearchSortValues(parser.list().toArray()); return new SearchSortValues(parser.list().toArray());
} }

View File

@ -30,7 +30,6 @@ import org.elasticsearch.index.mapper.UidFieldMapper;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.RandomObjects; import org.elasticsearch.test.RandomObjects;
import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
@ -43,7 +42,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXC
public class GetFieldTests extends ESTestCase { public class GetFieldTests extends ESTestCase {
public void testToXContent() throws IOException { public void testToXContent() {
GetField getField = new GetField("field", Arrays.asList("value1", "value2")); GetField getField = new GetField("field", Arrays.asList("value1", "value2"));
String output = Strings.toString(getField); String output = Strings.toString(getField);
assertEquals("{\"field\":[\"value1\",\"value2\"]}", output); assertEquals("{\"field\":[\"value1\",\"value2\"]}", output);

View File

@ -0,0 +1,54 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.shard;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.test.ESTestCase;
public class ShardIdTests extends ESTestCase {
public void testShardIdFromString() {
String indexName = randomAsciiOfLengthBetween(3,50);
int shardId = randomInt();
ShardId id = ShardId.fromString("["+indexName+"]["+shardId+"]");
assertEquals(indexName, id.getIndexName());
assertEquals(shardId, id.getId());
assertEquals(indexName, id.getIndex().getName());
assertEquals(IndexMetaData.INDEX_UUID_NA_VALUE, id.getIndex().getUUID());
id = ShardId.fromString("[some]weird[0]Name][-125]");
assertEquals("some]weird[0]Name", id.getIndexName());
assertEquals(-125, id.getId());
assertEquals("some]weird[0]Name", id.getIndex().getName());
assertEquals(IndexMetaData.INDEX_UUID_NA_VALUE, id.getIndex().getUUID());
String badId = indexName + "," + shardId; // missing separator
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> ShardId.fromString(badId));
assertEquals("Unexpected shardId string format, expected [indexName][shardId] but got " + badId, ex.getMessage());
String badId2 = indexName + "][" + shardId + "]"; // missing opening bracket
ex = expectThrows(IllegalArgumentException.class,
() -> ShardId.fromString(badId2));
String badId3 = "[" + indexName + "][" + shardId; // missing closing bracket
ex = expectThrows(IllegalArgumentException.class,
() -> ShardId.fromString(badId3));
}
}

View File

@ -63,6 +63,7 @@ public class HighlightFieldTests extends ESTestCase {
builder.endObject(); builder.endObject();
XContentParser parser = createParser(builder); XContentParser parser = createParser(builder);
parser.nextToken(); // skip to the opening object token, fromXContent advances from here and starts with the field name parser.nextToken(); // skip to the opening object token, fromXContent advances from here and starts with the field name
parser.nextToken();
HighlightField parsedField = HighlightField.fromXContent(parser); HighlightField parsedField = HighlightField.fromXContent(parser);
assertEquals(highlightField, parsedField); assertEquals(highlightField, parsedField);
if (highlightField.fragments() != null) { if (highlightField.fragments() != null) {

View File

@ -19,24 +19,144 @@
package org.elasticsearch.search.internal; package org.elasticsearch.search.internal;
import org.apache.lucene.search.Explanation;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.InputStreamStreamInput; import org.elasticsearch.common.io.stream.InputStreamStreamInput;
import org.elasticsearch.common.text.Text; import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.Index; import org.elasticsearch.index.Index;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.search.SearchHitField;
import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightFieldTests;
import org.elasticsearch.search.internal.InternalSearchHit.InternalNestedIdentity;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.RandomObjects;
import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set;
import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.nullValue;
public class InternalSearchHitTests extends ESTestCase { public class InternalSearchHitTests extends ESTestCase {
private static Set<String> META_FIELDS = Sets.newHashSet("_uid", "_all", "_parent", "_routing", "_size", "_timestamp", "_ttl");
public static InternalSearchHit createTestItem(boolean withOptionalInnerHits) {
int internalId = randomInt();
String uid = randomAsciiOfLength(10);
Text type = new Text(randomAsciiOfLengthBetween(5, 10));
InternalNestedIdentity nestedIdentity = null;
if (randomBoolean()) {
nestedIdentity = InternalNestedIdentityTests.createTestItem(randomIntBetween(0, 2));
}
Map<String, SearchHitField> fields = new HashMap<>();
if (randomBoolean()) {
int size = randomIntBetween(0, 10);
for (int i = 0; i < size; i++) {
Tuple<List<Object>, List<Object>> values = RandomObjects.randomStoredFieldValues(random(),
XContentType.JSON);
if (randomBoolean()) {
String metaField = randomFrom(META_FIELDS);
fields.put(metaField, new InternalSearchHitField(metaField, values.v1()));
} else {
String fieldName = randomAsciiOfLengthBetween(5, 10);
fields.put(fieldName, new InternalSearchHitField(fieldName, values.v1()));
}
}
}
InternalSearchHit hit = new InternalSearchHit(internalId, uid, type, nestedIdentity, fields);
if (frequently()) {
if (rarely()) {
hit.score(Float.NaN);
} else {
hit.score(randomFloat());
}
}
if (frequently()) {
hit.sourceRef(RandomObjects.randomSource(random()));
}
if (randomBoolean()) {
hit.version(randomLong());
}
if (randomBoolean()) {
hit.sortValues(SearchSortValuesTests.createTestItem());
}
if (randomBoolean()) {
int size = randomIntBetween(0, 5);
Map<String, HighlightField> highlightFields = new HashMap<>(size);
for (int i = 0; i < size; i++) {
highlightFields.put(randomAsciiOfLength(5), HighlightFieldTests.createTestItem());
}
hit.highlightFields(highlightFields);
}
if (randomBoolean()) {
int size = randomIntBetween(0, 5);
String[] matchedQueries = new String[size];
for (int i = 0; i < size; i++) {
matchedQueries[i] = randomAsciiOfLength(5);
}
hit.matchedQueries(matchedQueries);
}
if (randomBoolean()) {
hit.explanation(createExplanation(randomIntBetween(0, 5)));
}
if (withOptionalInnerHits) {
int innerHitsSize = randomIntBetween(0, 3);
Map<String, InternalSearchHits> innerHits = new HashMap<>(innerHitsSize);
for (int i = 0; i < innerHitsSize; i++) {
innerHits.put(randomAsciiOfLength(5), InternalSearchHitsTests.createTestItem());
}
hit.setInnerHits(innerHits);
}
if (randomBoolean()) {
hit.shard(new SearchShardTarget(randomAsciiOfLengthBetween(5, 10),
new ShardId(new Index(randomAsciiOfLengthBetween(5, 10), randomAsciiOfLengthBetween(5, 10)), randomInt())));
}
return hit;
}
public void testFromXContent() throws IOException {
InternalSearchHit searchHit = createTestItem(true);
XContentType xcontentType = randomFrom(XContentType.values());
XContentBuilder builder = XContentFactory.contentBuilder(xcontentType);
builder = searchHit.toXContent(builder, ToXContent.EMPTY_PARAMS);
XContentParser parser = createParser(builder);
parser.nextToken(); // jump to first START_OBJECT
InternalSearchHit parsed = InternalSearchHit.fromXContent(parser);
assertToXContentEquivalent(builder.bytes(), toXContent(parsed, xcontentType), xcontentType);
assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
assertNull(parser.nextToken());
}
public void testToXContent() throws IOException {
InternalSearchHit internalSearchHit = new InternalSearchHit(1, "id1", new Text("type"), Collections.emptyMap());
internalSearchHit.score(1.5f);
XContentBuilder builder = JsonXContent.contentBuilder();
internalSearchHit.toXContent(builder, ToXContent.EMPTY_PARAMS);
assertEquals("{\"_type\":\"type\",\"_id\":\"id1\",\"_score\":1.5}", builder.string());
}
public void testSerializeShardTarget() throws Exception { public void testSerializeShardTarget() throws Exception {
SearchShardTarget target = new SearchShardTarget("_node_id", new Index("_index", "_na_"), 0); SearchShardTarget target = new SearchShardTarget("_node_id", new Index("_index", "_na_"), 0);
@ -94,4 +214,17 @@ public class InternalSearchHitTests extends ESTestCase {
searchHit.sourceRef(new BytesArray("{}")); searchHit.sourceRef(new BytesArray("{}"));
assertTrue(searchHit.hasSource()); assertTrue(searchHit.hasSource());
} }
private static Explanation createExplanation(int depth) {
String description = randomAsciiOfLengthBetween(5, 20);
float value = randomFloat();
List<Explanation> details = new ArrayList<>();
if (depth > 0) {
int numberOfDetails = randomIntBetween(1, 3);
for (int i = 0; i < numberOfDetails; i++) {
details.add(createExplanation(depth - 1));
}
}
return Explanation.match(value, description, details);
}
} }

View File

@ -0,0 +1,84 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.internal;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.Collections;
import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
public class InternalSearchHitsTests extends ESTestCase {
public static InternalSearchHits createTestItem() {
int searchHits = randomIntBetween(0, 5);
InternalSearchHit[] hits = new InternalSearchHit[searchHits];
for (int i = 0; i < searchHits; i++) {
hits[i] = InternalSearchHitTests.createTestItem(false); // creating random innerHits could create loops
}
long totalHits = randomLong();
float maxScore = frequently() ? randomFloat() : Float.NaN;
return new InternalSearchHits(hits, totalHits, maxScore);
}
public void testFromXContent() throws IOException {
InternalSearchHits searchHits = createTestItem();
XContentType xcontentType = XContentType.JSON; //randomFrom(XContentType.values());
XContentBuilder builder = XContentFactory.contentBuilder(xcontentType);
builder.startObject();
builder = searchHits.toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.endObject();
XContentParser parser = createParser(builder);
InternalSearchHits parsed = InternalSearchHits.fromXContent(parser);
assertToXContentEquivalent(builder.bytes(), toXContent(parsed, xcontentType), xcontentType);
assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
parser.nextToken();
assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
assertNull(parser.nextToken());
}
public void testToXContent() throws IOException {
InternalSearchHit[] hits = new InternalSearchHit[] {
new InternalSearchHit(1, "id1", new Text("type"), Collections.emptyMap()),
new InternalSearchHit(2, "id2", new Text("type"), Collections.emptyMap()) };
long totalHits = 1000;
float maxScore = 1.5f;
InternalSearchHits searchHits = new InternalSearchHits(hits, totalHits, maxScore);
XContentBuilder builder = JsonXContent.contentBuilder();
builder.startObject();
searchHits.toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.endObject();
assertEquals("{\"hits\":{\"total\":1000,\"max_score\":1.5," +
"\"hits\":[{\"_type\":\"type\",\"_id\":\"id1\",\"_score\":\"-Infinity\"},"+
"{\"_type\":\"type\",\"_id\":\"id2\",\"_score\":\"-Infinity\"}]}}", builder.string());
}
}

View File

@ -54,7 +54,7 @@ public class SearchSortValuesTests extends ESTestCase {
valueSuppliers.add(() -> randomBoolean()); valueSuppliers.add(() -> randomBoolean());
valueSuppliers.add(() -> frequently() ? randomAsciiOfLengthBetween(1, 30) : randomRealisticUnicodeOfCodepointLength(30)); valueSuppliers.add(() -> frequently() ? randomAsciiOfLengthBetween(1, 30) : randomRealisticUnicodeOfCodepointLength(30));
int size = randomInt(20); int size = randomIntBetween(1, 20);
Object[] values = new Object[size]; Object[] values = new Object[size];
for (int i = 0; i < size; i++) { for (int i = 0; i < size; i++) {
Supplier<Object> supplier = randomFrom(valueSuppliers); Supplier<Object> supplier = randomFrom(valueSuppliers);
@ -75,7 +75,8 @@ public class SearchSortValuesTests extends ESTestCase {
builder.endObject(); builder.endObject();
XContentParser parser = createParser(builder); XContentParser parser = createParser(builder);
parser.nextToken(); // skip to the elements field name token, fromXContent advances from there if called from ourside parser.nextToken(); // skip to the elements start array token, fromXContent advances from there if called
parser.nextToken();
parser.nextToken(); parser.nextToken();
if (sortValues.sortValues().length > 0) { if (sortValues.sortValues().length > 0) {
SearchSortValues parsed = SearchSortValues.fromXContent(parser); SearchSortValues parsed = SearchSortValues.fromXContent(parser);