Add the ability to retrieve fields from field data

Adds a new FetchSubPhase, FieldDataFieldsFetchSubPhase, which loads the
field data cache for a field and returns an array of values for the
field.

Also removes `doc['<field>']` and `_source.<field>` workaround no longer
needed in field name resolving.

Closes #4492
This commit is contained in:
Lee Hinman 2014-01-09 15:20:06 -07:00
parent 53192919c6
commit 2c289fb538
18 changed files with 397 additions and 106 deletions

View File

@ -87,6 +87,8 @@ include::request/fields.asciidoc[]
include::request/script-fields.asciidoc[]
include::request/fielddata-fields.asciidoc[]
include::request/post-filter.asciidoc[]
include::request/highlighting.asciidoc[]
@ -108,4 +110,3 @@ include::request/index-boost.asciidoc[]
include::request/min-score.asciidoc[]
include::request/named-queries-and-filters.asciidoc[]

View File

@ -0,0 +1,21 @@
[[search-request-fielddata-fields]]
=== Field Data Fields
Allows to return the field data representiation of a field for each hit, for
example:
[source,js]
--------------------------------------------------
{
"query" : {
...
},
"fielddata_fields" : ["test1", "test2"]
}
--------------------------------------------------
Field data fields can work on fields that are not stored.
It's important to understand that using the `fielddata_fields` parameter will
cause the terms for that field to be loaded to memory (cached), which will
result in more memory consumption.

View File

@ -417,6 +417,17 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
return this;
}
/**
* Adds a field data based field to load and return. The field does not have to be stored,
* but its recommended to use non analyzed or numeric fields.
*
* @param name The field to get from the field data cache
*/
public SearchRequestBuilder addFieldDataField(String name) {
sourceBuilder().fieldDataField(name);
return this;
}
/**
* Adds a script based field to load and return. The field does not have to be stored,
* but its recommended to use non analyzed or numeric fields.

View File

@ -238,30 +238,6 @@ public class ShardGetService extends AbstractIndexShardComponent {
}
} else if (field.equals(SizeFieldMapper.NAME) && docMapper.rootMapper(SizeFieldMapper.class).fieldType().stored()) {
value = source.source.length();
} else {
if (field.contains("_source.")) {
if (searchLookup == null) {
searchLookup = new SearchLookup(mapperService, fieldDataService, new String[]{type});
}
if (sourceAsMap == null) {
sourceAsMap = SourceLookup.sourceAsMap(source.source);
}
SearchScript searchScript = scriptService.search(searchLookup, "mvel", field, null);
// we can't do this, only allow to run scripts against the source
//searchScript.setNextReader(docIdAndVersion.reader);
//searchScript.setNextDocId(docIdAndVersion.docId);
// but, we need to inject the parsed source into the script, so it will be used...
searchScript.setNextSource(sourceAsMap);
try {
value = searchScript.run();
} catch (RuntimeException e) {
if (logger.isTraceEnabled()) {
logger.trace("failed to execute get request script field [{}]", e, field);
}
// ignore
}
} else {
if (searchLookup == null) {
searchLookup = new SearchLookup(mapperService, fieldDataService, new String[]{type});
@ -284,7 +260,6 @@ public class ShardGetService extends AbstractIndexShardComponent {
}
}
}
}
if (value != null) {
if (fields == null) {
fields = newHashMapWithExpectedSize(2);
@ -368,25 +343,6 @@ public class ShardGetService extends AbstractIndexShardComponent {
SearchLookup searchLookup = null;
for (String field : gFields) {
Object value = null;
if (field.contains("_source.") || field.contains("doc[")) {
if (searchLookup == null) {
searchLookup = new SearchLookup(mapperService, fieldDataService, new String[]{type});
searchLookup.source().setNextSource(source);
searchLookup.setNextReader(docIdAndVersion.context);
searchLookup.setNextDocId(docIdAndVersion.docId);
}
SearchScript searchScript = scriptService.search(searchLookup, "mvel", field, null);
searchScript.setNextReader(docIdAndVersion.context);
searchScript.setNextDocId(docIdAndVersion.docId);
try {
value = searchScript.run();
} catch (RuntimeException e) {
if (logger.isTraceEnabled()) {
logger.trace("failed to execute get request script field [{}]", e, field);
}
// ignore
}
} else {
FieldMappers x = docMapper.mappers().smartName(field);
if (x == null) {
if (docMapper.objectMappers().get(field) != null) {
@ -409,7 +365,6 @@ public class ShardGetService extends AbstractIndexShardComponent {
value = values;
}
}
}
if (value != null) {
if (fields == null) {

View File

@ -61,6 +61,7 @@ import org.elasticsearch.search.dfs.DfsSearchResult;
import org.elasticsearch.search.facet.SearchContextFacets;
import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsContext;
import org.elasticsearch.search.fetch.partial.PartialFieldsContext;
import org.elasticsearch.search.fetch.script.ScriptFieldsContext;
import org.elasticsearch.search.fetch.source.FetchSourceContext;
@ -407,6 +408,16 @@ public class PercolateContext extends SearchContext {
throw new UnsupportedOperationException();
}
@Override
public boolean hasFieldDataFields() {
throw new UnsupportedOperationException();
}
@Override
public FieldDataFieldsContext fieldDataFields() {
throw new UnsupportedOperationException();
}
@Override
public boolean hasScriptFields() {
throw new UnsupportedOperationException();

View File

@ -24,8 +24,6 @@ import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.inject.Module;
import org.elasticsearch.common.inject.SpawnModules;
import org.elasticsearch.index.query.functionscore.FunctionScoreModule;
import org.elasticsearch.indices.fielddata.breaker.CircuitBreakerService;
import org.elasticsearch.indices.fielddata.breaker.InternalCircuitBreakerService;
import org.elasticsearch.search.action.SearchServiceTransportAction;
import org.elasticsearch.search.aggregations.AggregationModule;
import org.elasticsearch.search.controller.SearchPhaseController;
@ -33,6 +31,7 @@ import org.elasticsearch.search.dfs.DfsPhase;
import org.elasticsearch.search.facet.FacetModule;
import org.elasticsearch.search.fetch.FetchPhase;
import org.elasticsearch.search.fetch.explain.ExplainFetchSubPhase;
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsFetchSubPhase;
import org.elasticsearch.search.fetch.matchedqueries.MatchedQueriesFetchSubPhase;
import org.elasticsearch.search.fetch.partial.PartialFieldsFetchSubPhase;
import org.elasticsearch.search.fetch.script.ScriptFieldsFetchSubPhase;
@ -62,6 +61,7 @@ public class SearchModule extends AbstractModule implements SpawnModules {
bind(FetchPhase.class).asEagerSingleton();
bind(ExplainFetchSubPhase.class).asEagerSingleton();
bind(FieldDataFieldsFetchSubPhase.class).asEagerSingleton();
bind(ScriptFieldsFetchSubPhase.class).asEagerSingleton();
bind(PartialFieldsFetchSubPhase.class).asEagerSingleton();
bind(FetchSourceSubPhase.class).asEagerSingleton();

View File

@ -98,6 +98,7 @@ public class SearchSourceBuilder implements ToXContent {
private long timeoutInMillis = -1;
private List<String> fieldNames;
private List<String> fieldDataFields;
private List<ScriptField> scriptFields;
private List<PartialField> partialFields;
private FetchSourceContext fetchSourceContext;
@ -568,6 +569,17 @@ public class SearchSourceBuilder implements ToXContent {
return this;
}
/**
* Adds a field to load from the field data cache and return as part of the search request.
*/
public SearchSourceBuilder fieldDataField(String name) {
if (fieldDataFields == null) {
fieldDataFields = new ArrayList<String>();
}
fieldDataFields.add(name);
return this;
}
/**
* Adds a script field under the given name with the provided script.
*
@ -769,6 +781,14 @@ public class SearchSourceBuilder implements ToXContent {
}
}
if (fieldDataFields != null) {
builder.startArray("fielddata_fields");
for (String fieldName : fieldDataFields) {
builder.value(fieldName);
}
builder.endArray();
}
if (partialFields != null) {
builder.startObject("partial_fields");
for (PartialField partialField : partialFields) {

View File

@ -34,6 +34,7 @@ import org.elasticsearch.search.SearchHitField;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.SearchPhase;
import org.elasticsearch.search.fetch.explain.ExplainFetchSubPhase;
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsFetchSubPhase;
import org.elasticsearch.search.fetch.matchedqueries.MatchedQueriesFetchSubPhase;
import org.elasticsearch.search.fetch.partial.PartialFieldsFetchSubPhase;
import org.elasticsearch.search.fetch.script.ScriptFieldsFetchSubPhase;
@ -61,9 +62,9 @@ public class FetchPhase implements SearchPhase {
@Inject
public FetchPhase(HighlightPhase highlightPhase, ScriptFieldsFetchSubPhase scriptFieldsPhase, PartialFieldsFetchSubPhase partialFieldsPhase,
MatchedQueriesFetchSubPhase matchedQueriesPhase, ExplainFetchSubPhase explainPhase, VersionFetchSubPhase versionPhase,
FetchSourceSubPhase fetchSourceSubPhase) {
FetchSourceSubPhase fetchSourceSubPhase, FieldDataFieldsFetchSubPhase fieldDataFieldsFetchSubPhase) {
this.fetchSubPhases = new FetchSubPhase[]{scriptFieldsPhase, partialFieldsPhase, matchedQueriesPhase, explainPhase, highlightPhase,
fetchSourceSubPhase, versionPhase};
fetchSourceSubPhase, versionPhase, fieldDataFieldsFetchSubPhase};
}
@Override

View File

@ -37,27 +37,16 @@ public class FieldsParseElement implements SearchParseElement {
boolean added = false;
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
String name = parser.text();
if (name.contains("_source.") || name.contains("doc[")) {
// script field to load from source
SearchScript searchScript = context.scriptService().search(context.lookup(), "mvel", name, null);
context.scriptFields().add(new ScriptFieldsContext.ScriptField(name, searchScript, true));
} else {
added = true;
context.fieldNames().add(name);
}
}
if (!added) {
context.emptyFieldNames();
}
} else if (token == XContentParser.Token.VALUE_STRING) {
String name = parser.text();
if (name.contains("_source.") || name.contains("doc[")) {
// script field to load from source
SearchScript searchScript = context.scriptService().search(context.lookup(), "mvel", name, null);
context.scriptFields().add(new ScriptFieldsContext.ScriptField(name, searchScript, true));
} else {
context.fieldNames().add(name);
}
}
}
}

View File

@ -0,0 +1,55 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.fetch.fielddata;
import com.google.common.collect.Lists;
import java.util.List;
/**
* All the required context to pull a field from the field data cache.
*/
public class FieldDataFieldsContext {
public static class FieldDataField {
private final String name;
public FieldDataField(String name) {
this.name = name;
}
public String name() {
return name;
}
}
private List<FieldDataField> fields = Lists.newArrayList();
public FieldDataFieldsContext() {
}
public void add(FieldDataField field) {
this.fields.add(field);
}
public List<FieldDataField> fields() {
return this.fields;
}
}

View File

@ -0,0 +1,93 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.fetch.fielddata;
import com.google.common.collect.ImmutableMap;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.index.fielddata.AtomicFieldData;
import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.search.SearchHitField;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.internal.InternalSearchHit;
import org.elasticsearch.search.internal.InternalSearchHitField;
import org.elasticsearch.search.internal.SearchContext;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
/**
* Query sub phase which pulls data from field data (using the cache if
* available, building it if not).
*
* Specifying {@code "fielddata_fields": ["field1", "field2"]}
*/
public class FieldDataFieldsFetchSubPhase implements FetchSubPhase {
@Inject
public FieldDataFieldsFetchSubPhase() {
}
@Override
public Map<String, ? extends SearchParseElement> parseElements() {
ImmutableMap.Builder<String, SearchParseElement> parseElements = ImmutableMap.builder();
parseElements.put("fielddata_fields", new FieldDataFieldsParseElement())
.put("fielddataFields", new FieldDataFieldsParseElement());
return parseElements.build();
}
@Override
public boolean hitsExecutionNeeded(SearchContext context) {
return false;
}
@Override
public void hitsExecute(SearchContext context, InternalSearchHit[] hits) throws ElasticsearchException {
}
@Override
public boolean hitExecutionNeeded(SearchContext context) {
return context.hasFieldDataFields();
}
@Override
public void hitExecute(SearchContext context, HitContext hitContext) throws ElasticsearchException {
for (FieldDataFieldsContext.FieldDataField field : context.fieldDataFields().fields()) {
if (hitContext.hit().fieldsOrNull() == null) {
hitContext.hit().fields(new HashMap<String, SearchHitField>(2));
}
SearchHitField hitField = hitContext.hit().fields().get(field.name());
if (hitField == null) {
hitField = new InternalSearchHitField(field.name(), new ArrayList<Object>(2));
hitContext.hit().fields().put(field.name(), hitField);
}
FieldMapper mapper = context.mapperService().smartNameFieldMapper(field.name());
if (mapper != null) {
AtomicFieldData data = context.fieldData().getForField(mapper).load(hitContext.readerContext());
ScriptDocValues values = data.getScriptValues();
values.setNextDocId(hitContext.docId());
hitField.values().addAll(values.getValues());
}
}
}
}

View File

@ -0,0 +1,46 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.fetch.fielddata;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.internal.SearchContext;
/**
* Parses field name values from the {@code fielddata_fields} parameter in a
* search request.
*
* <pre>
* {
* "query": {...},
* "fielddata_fields" : ["field1", "field2"]
* }
* </pre>
*/
public class FieldDataFieldsParseElement implements SearchParseElement {
@Override
public void parse(XContentParser parser, SearchContext context) throws Exception {
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
String fieldName = parser.text();
context.fieldDataFields().add(new FieldDataFieldsContext.FieldDataField(fieldName));
}
}
}

View File

@ -58,6 +58,7 @@ import org.elasticsearch.search.aggregations.SearchContextAggregations;
import org.elasticsearch.search.dfs.DfsSearchResult;
import org.elasticsearch.search.facet.SearchContextFacets;
import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsContext;
import org.elasticsearch.search.fetch.partial.PartialFieldsContext;
import org.elasticsearch.search.fetch.script.ScriptFieldsContext;
import org.elasticsearch.search.fetch.source.FetchSourceContext;
@ -122,6 +123,7 @@ public class DefaultSearchContext extends SearchContext {
private boolean version = false; // by default, we don't return versions
private List<String> fieldNames;
private FieldDataFieldsContext fieldDataFields;
private ScriptFieldsContext scriptFields;
private PartialFieldsContext partialFields;
private FetchSourceContext fetchSourceContext;
@ -348,6 +350,17 @@ public class DefaultSearchContext extends SearchContext {
this.rescore = rescore;
}
public boolean hasFieldDataFields() {
return fieldDataFields != null;
}
public FieldDataFieldsContext fieldDataFields() {
if (fieldDataFields == null) {
fieldDataFields = new FieldDataFieldsContext();
}
return this.fieldDataFields;
}
public boolean hasScriptFields() {
return scriptFields != null;
}

View File

@ -47,6 +47,7 @@ import org.elasticsearch.search.aggregations.SearchContextAggregations;
import org.elasticsearch.search.dfs.DfsSearchResult;
import org.elasticsearch.search.facet.SearchContextFacets;
import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsContext;
import org.elasticsearch.search.fetch.partial.PartialFieldsContext;
import org.elasticsearch.search.fetch.script.ScriptFieldsContext;
import org.elasticsearch.search.fetch.source.FetchSourceContext;
@ -139,6 +140,10 @@ public abstract class SearchContext implements Releasable {
public abstract void rescore(RescoreSearchContext rescore);
public abstract boolean hasFieldDataFields();
public abstract FieldDataFieldsContext fieldDataFields();
public abstract boolean hasScriptFields();
public abstract ScriptFieldsContext scriptFields();

View File

@ -107,11 +107,11 @@ public class DocumentActionsTests extends ElasticsearchIntegrationTest {
logger.info("Get [type1/1] with script");
for (int i = 0; i < 5; i++) {
getResult = client().prepareGet("test", "type1", "1").setFields("_source.type1.name").execute().actionGet();
getResult = client().prepareGet("test", "type1", "1").setFields("type1.name").execute().actionGet();
assertThat(getResult.getIndex(), equalTo(getConcreteIndexName()));
assertThat(getResult.isExists(), equalTo(true));
assertThat(getResult.getSourceAsBytes(), nullValue());
assertThat(getResult.getField("_source.type1.name").getValues().get(0).toString(), equalTo("test"));
assertThat(getResult.getField("type1.name").getValues().get(0).toString(), equalTo("test"));
}
logger.info("Get [type1/2] (should be empty)");

View File

@ -151,15 +151,14 @@ public class ExplainActionTests extends ElasticsearchIntegrationTest {
response = client().prepareExplain("test", "test", "1")
.setQuery(QueryBuilders.matchAllQuery())
.setFields("_source.obj1")
.setFields("obj1.field1", "obj1.field2")
.execute().actionGet();
assertNotNull(response);
assertTrue(response.isMatch());
assertThat(response.getGetResult().getFields().size(), equalTo(1));
Map<String, String> fields = (Map<String, String>) response.getGetResult().field("_source.obj1").getValue();
assertThat(fields.size(), equalTo(2));
assertThat(fields.get("field1"), equalTo("value1"));
assertThat(fields.get("field2"), equalTo("value2"));
String v1 = (String) response.getGetResult().field("obj1.field1").getValue();
String v2 = (String) response.getGetResult().field("obj1.field2").getValue();
assertThat(v1, equalTo("value1"));
assertThat(v2, equalTo("value2"));
}
@SuppressWarnings("unchecked")

View File

@ -47,6 +47,7 @@ import org.elasticsearch.search.aggregations.SearchContextAggregations;
import org.elasticsearch.search.dfs.DfsSearchResult;
import org.elasticsearch.search.facet.SearchContextFacets;
import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsContext;
import org.elasticsearch.search.fetch.partial.PartialFieldsContext;
import org.elasticsearch.search.fetch.script.ScriptFieldsContext;
import org.elasticsearch.search.fetch.source.FetchSourceContext;
@ -212,6 +213,16 @@ class TestSearchContext extends SearchContext {
public void rescore(RescoreSearchContext rescore) {
}
@Override
public boolean hasFieldDataFields() {
return false;
}
@Override
public FieldDataFieldsContext fieldDataFields() {
return null;
}
@Override
public boolean hasScriptFields() {
return false;

View File

@ -19,6 +19,7 @@
package org.elasticsearch.search.fields;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.Base64;
import org.elasticsearch.common.Priority;
@ -199,7 +200,6 @@ public class SearchFieldsTests extends ElasticsearchIntegrationTest {
SearchResponse response = client().prepareSearch()
.setQuery(matchAllQuery())
.addField("_source.obj1") // we also automatically detect _source in fields
.addScriptField("s_obj1", "_source.obj1")
.addScriptField("s_obj1_test", "_source.obj1.test")
.addScriptField("s_obj2", "_source.obj2")
@ -209,11 +209,9 @@ public class SearchFieldsTests extends ElasticsearchIntegrationTest {
assertThat("Failures " + Arrays.toString(response.getShardFailures()), response.getShardFailures().length, equalTo(0));
Map<String, Object> sObj1 = response.getHits().getAt(0).field("_source.obj1").value();
assertThat(sObj1.get("test").toString(), equalTo("something"));
assertThat(response.getHits().getAt(0).field("s_obj1_test").value().toString(), equalTo("something"));
sObj1 = response.getHits().getAt(0).field("s_obj1").value();
Map<String, Object> sObj1 = response.getHits().getAt(0).field("s_obj1").value();
assertThat(sObj1.get("test").toString(), equalTo("something"));
assertThat(response.getHits().getAt(0).field("s_obj1_test").value().toString(), equalTo("something"));
@ -423,4 +421,66 @@ public class SearchFieldsTests extends ElasticsearchIntegrationTest {
assertThat(searchResponse.getHits().getAt(0).field(field).getValues().get(0).toString(), equalTo("value1"));
assertThat(searchResponse.getHits().getAt(0).field(field).getValues().get(1).toString(), equalTo("value2"));
}
@Test
public void testFieldsPulledFromFieldData() throws Exception {
createIndex("test");
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForYellowStatus().execute().actionGet();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
.startObject("_source").field("enabled", false).endObject()
.startObject("string_field").field("type", "string").endObject()
.startObject("byte_field").field("type", "byte").endObject()
.startObject("short_field").field("type", "short").endObject()
.startObject("integer_field").field("type", "integer").endObject()
.startObject("long_field").field("type", "long").endObject()
.startObject("float_field").field("type", "float").endObject()
.startObject("double_field").field("type", "double").endObject()
.startObject("date_field").field("type", "date").endObject()
.startObject("boolean_field").field("type", "boolean").endObject()
.startObject("binary_field").field("type", "binary").endObject()
.endObject().endObject().endObject().string();
client().admin().indices().preparePutMapping().setType("type1").setSource(mapping).execute().actionGet();
client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject()
.field("string_field", "foo")
.field("byte_field", (byte) 1)
.field("short_field", (short) 2)
.field("integer_field", 3)
.field("long_field", 4l)
.field("float_field", 5.0f)
.field("double_field", 6.0d)
.field("date_field", Joda.forPattern("dateOptionalTime").printer().print(new DateTime(2012, 3, 22, 0, 0, DateTimeZone.UTC)))
.field("boolean_field", true)
.endObject()).execute().actionGet();
client().admin().indices().prepareRefresh().execute().actionGet();
SearchRequestBuilder builder = client().prepareSearch().setQuery(matchAllQuery())
.addFieldDataField("string_field")
.addFieldDataField("byte_field")
.addFieldDataField("short_field")
.addFieldDataField("integer_field")
.addFieldDataField("long_field")
.addFieldDataField("float_field")
.addFieldDataField("double_field")
.addFieldDataField("date_field")
.addFieldDataField("boolean_field");
SearchResponse searchResponse = builder.execute().actionGet();
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l));
assertThat(searchResponse.getHits().hits().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).fields().size(), equalTo(9));
assertThat(searchResponse.getHits().getAt(0).fields().get("byte_field").value().toString(), equalTo("1"));
assertThat(searchResponse.getHits().getAt(0).fields().get("short_field").value().toString(), equalTo("2"));
assertThat(searchResponse.getHits().getAt(0).fields().get("integer_field").value(), equalTo((Object) 3l));
assertThat(searchResponse.getHits().getAt(0).fields().get("long_field").value(), equalTo((Object) 4l));
assertThat(searchResponse.getHits().getAt(0).fields().get("float_field").value(), equalTo((Object) 5.0));
assertThat(searchResponse.getHits().getAt(0).fields().get("double_field").value(), equalTo((Object) 6.0d));
assertThat(searchResponse.getHits().getAt(0).fields().get("date_field").value(), equalTo((Object) 1332374400000L));
assertThat(searchResponse.getHits().getAt(0).fields().get("boolean_field").value().toString(), equalTo("T"));
}
}