Add fromXContent to HighlightField

This adds a fromXContent method and unit test to the HighlightField class so we
can parse it as part of a serch response. This is part of the preparation for
parsing search responses on the client side.
This commit is contained in:
Christoph Büscher 2016-12-06 15:39:30 +01:00
parent 8923b36780
commit 7454a9647b
4 changed files with 212 additions and 23 deletions

View File

@ -19,18 +19,25 @@
package org.elasticsearch.search.fetch.subphase.highlight;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
/**
* A field highlighted with its highlighted fragments.
*/
public class HighlightField implements Streamable {
public class HighlightField implements ToXContent, Streamable {
private String name;
@ -40,7 +47,7 @@ public class HighlightField implements Streamable {
}
public HighlightField(String name, Text[] fragments) {
this.name = name;
this.name = Objects.requireNonNull(name, "missing highlight field name");
this.fragments = fragments;
}
@ -112,4 +119,62 @@ public class HighlightField implements Streamable {
}
}
}
public static HighlightField fromXContent(XContentParser parser) throws IOException {
XContentParser.Token token = parser.nextToken();
assert token == XContentParser.Token.FIELD_NAME;
String fieldName = parser.currentName();
Text[] fragments = null;
token = parser.nextToken();
if (token == XContentParser.Token.START_ARRAY) {
fragments = parseValues(parser);
} else if (token == XContentParser.Token.VALUE_NULL) {
fragments = null;
} else {
throw new ParsingException(parser.getTokenLocation(),
"unexpected token type [" + token + "]");
}
return new HighlightField(fieldName, fragments);
}
private static Text[] parseValues(XContentParser parser) throws IOException {
List<Text> values = new ArrayList<>();
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
values.add(new Text(parser.text()));
}
return values.toArray(new Text[values.size()]);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(name);
if (fragments == null) {
builder.nullValue();
} else {
builder.startArray();
for (Text fragment : fragments) {
builder.value(fragment);
}
builder.endArray();
}
return builder;
}
@Override
public final boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
HighlightField other = (HighlightField) obj;
return Objects.equals(name, other.name) && Arrays.equals(fragments, other.fragments);
}
@Override
public final int hashCode() {
return Objects.hash(name, Arrays.hashCode(fragments));
}
}

View File

@ -468,7 +468,8 @@ public class InternalSearchHit implements SearchHit {
builder.field(Fields._SCORE, score);
}
for (SearchHitField field : metaFields) {
builder.field(field.name(), (Object) field.value());
Object value = (Object) field.value();
builder.field(field.name(), value);
}
if (source != null) {
XContentHelper.writeRawField("_source", source, builder, params);
@ -487,16 +488,7 @@ public class InternalSearchHit implements SearchHit {
if (highlightFields != null && !highlightFields.isEmpty()) {
builder.startObject(Fields.HIGHLIGHT);
for (HighlightField field : highlightFields.values()) {
builder.field(field.name());
if (field.fragments() == null) {
builder.nullValue();
} else {
builder.startArray();
for (Text fragment : field.fragments()) {
builder.value(fragment);
}
builder.endArray();
}
field.toXContent(builder, params);
}
builder.endObject();
}

View File

@ -0,0 +1,141 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.fetch.subphase.highlight;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.Arrays;
import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode;
public class HighlightFieldTests extends ESTestCase {
public static HighlightField createTestItem() {
String name = frequently() ? randomAsciiOfLengthBetween(1, 20) : randomRealisticUnicodeOfCodepointLengthBetween(1, 20);
Text[] fragments = null;
if (frequently()) {
int size = randomIntBetween(0, 5);
fragments = new Text[size];
for (int i = 0; i < size; i++) {
fragments[i] = new Text(
frequently() ? randomAsciiOfLengthBetween(10, 30) : randomRealisticUnicodeOfCodepointLengthBetween(10, 30));
}
}
return new HighlightField(name, fragments);
}
public void testFromXContent() throws IOException {
HighlightField highlightField = createTestItem();
XContentType xcontentType = randomFrom(XContentType.values());
XContentBuilder builder = XContentFactory.contentBuilder(xcontentType);
if (randomBoolean()) {
builder.prettyPrint();
}
builder.startObject(); // we need to wrap xContent output in proper object to create a parser for it
builder = highlightField.toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.endObject();
XContentParser parser = xcontentType.xContent().createParser(builder.bytes());
parser.nextToken(); // skip to the opening object token, fromXContent advances from here and starts with the field name
HighlightField parsedField = HighlightField.fromXContent(parser);
assertEquals(highlightField, parsedField);
if (highlightField.fragments() != null) {
assertEquals(XContentParser.Token.END_ARRAY, parser.currentToken());
}
assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken());
assertNull(parser.nextToken());
}
public void testToXContent() throws IOException {
HighlightField field = new HighlightField("foo", new Text[] { new Text("bar"), new Text("baz") });
XContentBuilder builder = JsonXContent.contentBuilder();
builder.prettyPrint();
builder.startObject();
field.toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.endObject();
assertEquals(
"{\n" +
" \"foo\" : [\n" +
" \"bar\",\n" +
" \"baz\"\n" +
" ]\n" +
"}", builder.string());
field = new HighlightField("foo", null);
builder = JsonXContent.contentBuilder();
builder.prettyPrint();
builder.startObject();
field.toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.endObject();
assertEquals(
"{\n" +
" \"foo\" : null\n" +
"}", builder.string());
}
/**
* Test equality and hashCode properties
*/
public void testEqualsAndHashcode() {
checkEqualsAndHashCode(createTestItem(), HighlightFieldTests::copy, HighlightFieldTests::mutate);
}
public void testSerialization() throws IOException {
HighlightField testField = createTestItem();
try (BytesStreamOutput output = new BytesStreamOutput()) {
testField.writeTo(output);
try (StreamInput in = output.bytes().streamInput()) {
HighlightField deserializedCopy = HighlightField.readHighlightField(in);
assertEquals(testField, deserializedCopy);
assertEquals(testField.hashCode(), deserializedCopy.hashCode());
assertNotSame(testField, deserializedCopy);
}
}
}
private static HighlightField mutate(HighlightField original) {
Text[] fragments = original.getFragments();
if (randomBoolean()) {
return new HighlightField(original.getName()+"_suffix", fragments);
} else {
if (fragments == null) {
fragments = new Text[]{new Text("field")};
} else {
fragments = Arrays.copyOf(fragments, fragments.length + 1);
fragments[fragments.length - 1] = new Text("something new");
}
return new HighlightField(original.getName(), fragments);
}
}
private static HighlightField copy(HighlightField original) {
return new HighlightField(original.getName(), original.getFragments());
}
}

View File

@ -136,16 +136,7 @@ public class PercolateResponse extends BroadcastResponse implements Iterable<Per
if (match.getHighlightFields().isEmpty() == false) {
builder.startObject(Fields.HIGHLIGHT);
for (HighlightField field : match.getHighlightFields().values()) {
builder.field(field.name());
if (field.fragments() == null) {
builder.nullValue();
} else {
builder.startArray();
for (Text fragment : field.fragments()) {
builder.value(fragment);
}
builder.endArray();
}
field.toXContent(builder, params);
}
builder.endObject();
}